File: __init__.py

package info (click to toggle)
pypy3 7.0.0%2Bdfsg-3
  • links: PTS, VCS
  • area: main
  • in suites: buster
  • size: 111,848 kB
  • sloc: python: 1,291,746; ansic: 74,281; asm: 5,187; cpp: 3,017; sh: 2,533; makefile: 544; xml: 243; lisp: 45; csh: 21; awk: 4
file content (45 lines) | stat: -rw-r--r-- 1,389 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
from pypy.interpreter.mixedmodule import MixedModule
from pypy.interpreter.gateway import unwrap_spec
from pypy.interpreter.pyparser import pytoken, pygram


class Module(MixedModule):

    appleveldefs = {}
    interpleveldefs = {
        "NT_OFFSET" : "space.newint(256)",
        "ISTERMINAL" : "__init__.isterminal",
        "ISNONTERMINAL" : "__init__.isnonterminal",
        "ISEOF" : "__init__.iseof"
        }


def _init_tokens():
    tok_name = {}
    for tok, id in pytoken.python_tokens.iteritems():
        Module.interpleveldefs[tok] = "space.wrap(%d)" % (id,)
        tok_name[id] = tok
    Module.interpleveldefs["tok_name"] = "space.wrap(%r)" % (tok_name,)
    Module.interpleveldefs["N_TOKENS"] = "space.wrap(%d)" % len(tok_name)
    all_names = Module.interpleveldefs.keys()
    # obscure, but these names are not in CPython's token module
    # so we remove them from 'token.__all__' otherwise they end up
    # twice in 'tokenize.__all__'
    all_names.remove('COMMENT')
    all_names.remove('NL')
    Module.interpleveldefs["__all__"] = "space.wrap(%r)" % (all_names,)

_init_tokens()


@unwrap_spec(tok=int)
def isterminal(space, tok):
    return space.newbool(tok < 256)

@unwrap_spec(tok=int)
def isnonterminal(space, tok):
    return space.newbool(tok >= 256)

@unwrap_spec(tok=int)
def iseof(space, tok):
    return space.newbool(tok == pygram.tokens.ENDMARKER)