File: noop-tokenize.rs

package info (click to toggle)
rustc-web 1.78.0%2Bdfsg1-2~deb11u3
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 1,245,360 kB
  • sloc: xml: 147,985; javascript: 18,022; sh: 11,083; python: 10,265; ansic: 6,172; cpp: 5,023; asm: 4,390; makefile: 4,269
file content (43 lines) | stat: -rw-r--r-- 1,367 bytes parent folder | download | duplicates (17)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
// Copyright 2014-2017 The html5ever Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

// Run a single benchmark once.  For use with profiling tools.

extern crate html5ever;

use std::default::Default;
use std::io;

use html5ever::tendril::*;
use html5ever::tokenizer::{BufferQueue, Token, TokenSink, TokenSinkResult, Tokenizer};

struct Sink(Vec<Token>);

impl TokenSink for Sink {
    type Handle = ();

    fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
        // Don't use the token, but make sure we don't get
        // optimized out entirely.
        self.0.push(token);
        TokenSinkResult::Continue
    }
}

fn main() {
    let mut chunk = ByteTendril::new();
    io::stdin().read_to_tendril(&mut chunk).unwrap();
    let mut input = BufferQueue::new();
    input.push_back(chunk.try_reinterpret().unwrap());

    let mut tok = Tokenizer::new(Sink(Vec::new()), Default::default());
    let _ = tok.feed(&mut input);
    assert!(input.is_empty());
    tok.end();
}