File: bin.rs

package info (click to toggle)
rust-peg 0.8.5-1
  • links: PTS, VCS
  • area: main
  • in suites: experimental, sid, trixie
  • size: 756 kB
  • sloc: makefile: 20; sh: 12
file content (56 lines) | stat: -rw-r--r-- 1,655 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
//! Standalone version of rust-peg used for bootstrapping the meta-grammar

extern crate proc_macro;
extern crate proc_macro2;
extern crate quote;

use std::env;
use std::fs::File;
use std::io::{stderr, stdin, stdout};
use std::io::{Read, Write};
use std::path::Path;
use std::process;

// This can't use the `peg` crate as it would be a circular dependency, but the generated code in grammar.rs
// requires `::peg` paths.
extern crate peg_runtime as peg;

mod analysis;
mod ast;
mod grammar;
mod tokens;
mod translate;

fn main() {
    let args = env::args_os().collect::<Vec<_>>();
    let progname = &args[0];
    let mut log = stderr();

    let mut source = String::new();

    if args.len() == 2 && &args[1] != "-h" {
        File::open(Path::new(&args[1]))
            .unwrap()
            .read_to_string(&mut source)
            .unwrap();
    } else if args.len() == 1 {
        stdin().read_to_string(&mut source).unwrap();
    } else {
        writeln!(log, "Usage: {} [file]", progname.to_string_lossy()).unwrap();
        process::exit(0);
    }

    let source_tokens = source.parse().expect("Error tokenizing input");
    let input_tokens = tokens::FlatTokenStream::new(source_tokens);
    let grammar = match grammar::peg::peg_grammar(&input_tokens) {
        Ok(g) => g,
        Err(err) => {
            eprintln!("Failed to parse grammar: expected {}", err.expected);
            process::exit(1);
        }
    };
    let parser_tokens = translate::compile_grammar(&grammar);
    let mut out = stdout();
    writeln!(&mut out, "// Generated by rust-peg. Do not edit.").unwrap();
    write!(&mut out, "{}", parser_tokens).unwrap();
}