File: parse.rs

package info (click to toggle)
rust-json-event-parser 0.2.2-1
  • links: PTS, VCS
  • area: main
  • in suites: experimental, forky, sid, trixie
  • size: 256 kB
  • sloc: python: 14; makefile: 2; sh: 1
file content (83 lines) | stat: -rw-r--r-- 2,880 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
#![no_main]

use json_event_parser::{
    JsonEvent, JsonSyntaxError, LowLevelJsonParser, LowLevelJsonParserResult, WriterJsonSerializer,
};
use libfuzzer_sys::fuzz_target;

fn parse_chunks(chunks: &[&[u8]]) -> (String, Option<JsonSyntaxError>) {
    let mut input_buffer = Vec::new();
    let mut input_cursor = 0;
    let mut output_buffer = Vec::new();
    let mut reader = LowLevelJsonParser::new();
    let mut writer = WriterJsonSerializer::new(&mut output_buffer);
    let mut error = None;
    for (i, chunk) in chunks.iter().enumerate() {
        input_buffer.extend_from_slice(chunk);
        loop {
            let LowLevelJsonParserResult {
                event,
                consumed_bytes,
            } = reader.parse_next(&input_buffer[input_cursor..], i == chunks.len() - 1);
            input_cursor += consumed_bytes;
            match event {
                Some(Ok(JsonEvent::Eof)) => {
                    if error.is_none() {
                        writer.finish().unwrap();
                    }
                    return (String::from_utf8(output_buffer).unwrap(), error);
                }
                Some(Ok(event)) => {
                    if error.is_none() {
                        writer.serialize_event(event).unwrap();
                    } else {
                        let _ = writer.serialize_event(event); // We don't know if we write ok structure
                    }
                }
                Some(Err(e)) => {
                    if error.is_none() {
                        error = Some(e)
                    }
                }
                None => break,
            }
        }
    }
    panic!("Should not be reached")
}

fn merge<'a>(slices: impl IntoIterator<Item = &'a [u8]>) -> Vec<u8> {
    let mut buf = Vec::new();
    for slice in slices {
        buf.extend_from_slice(slice);
    }
    buf
}

fuzz_target!(|data: &[u8]| {
    // We parse with separators
    let (with_separators, with_separators_error) =
        parse_chunks(&data.split(|c| *c == 0xFF).collect::<Vec<_>>());
    let (without_separators, without_separators_error) =
        parse_chunks(&[&merge(data.split(|c| *c == 0xFF))]);
    assert_eq!(
        with_separators_error
            .as_ref()
            .map_or_else(String::new, |e| e.to_string()),
        without_separators_error
            .as_ref()
            .map_or_else(String::new, |e| e.to_string()),
        "{with_separators_error:?} vs {without_separators_error:?}"
    );
    assert_eq!(with_separators, without_separators);

    if with_separators_error.is_none() {
        let (again, again_error) = parse_chunks(&[with_separators.as_bytes()]);
        assert!(
            again_error.is_none(),
            "Failed to parse '{with_separators}' with error {}",
            again_error.unwrap()
        );
        assert_eq!(with_separators, again);
    }
});