File: main.py

package info (click to toggle)
python-pegen 0.3.0-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 11,148 kB
  • sloc: python: 15,081; makefile: 89
file content (43 lines) | stat: -rw-r--r-- 1,151 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#!/usr/bin/env python3.8

import sys
from tokenize import generate_tokens

from story3.grammar import GrammarParser
from story3.tokenizer import Tokenizer
from story3.generator3 import generate
from story3.visualizer import Visualizer

def main():
    file = "story3/toy.gram"
    print("Reading", file)
    with open(file) as f:
        tokengen = generate_tokens(f.readline)
        vis = None
        if "-v" in sys.argv:
            vis = Visualizer()
        tok = Tokenizer(tokengen, vis)
        p = GrammarParser(tok)
        try:
            rules = p.grammar()
            if vis:
                vis.wait()
        finally:
            if vis:
                vis.close()
    if not rules:
        sys.exit("Fail")
    print("[")
    for rule in rules:
        print(f"  {rule},")
    print("]")
    for rule in rules:
        print(rule.name, end=": ", file=sys.stderr)
        print(*(" ".join(alt) for alt in rule.alts), sep=" | ", file=sys.stderr)
    outfile = "story3/toy.py"
    print("Updating", outfile, file=sys.stderr)
    with open(outfile, "w") as stream:
        generate(rules, stream)

if __name__ == '__main__':
    main()