File: tokenize.pyi

package info (click to toggle)
python-libcst 1.8.6-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 6,240 kB
  • sloc: python: 78,096; makefile: 15; sh: 2
file content (95 lines) | stat: -rw-r--r-- 1,693 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
from token import (
    AMPER,
    AMPEREQUAL,
    AT,
    ATEQUAL,
    CIRCUMFLEX,
    CIRCUMFLEXEQUAL,
    COLON,
    COLONEQUAL,
    COMMA,
    COMMENT,
    DEDENT,
    DOT,
    DOUBLESLASH,
    DOUBLESLASHEQUAL,
    DOUBLESTAR,
    DOUBLESTAREQUAL,
    ELLIPSIS,
    ENCODING,
    ENDMARKER,
    EQEQUAL,
    EQUAL,
    ERRORTOKEN,
    EXACT_TOKEN_TYPES,
    GREATER,
    GREATEREQUAL,
    INDENT,
    LBRACE,
    LEFTSHIFT,
    LEFTSHIFTEQUAL,
    LESS,
    LESSEQUAL,
    LPAR,
    LSQB,
    MINEQUAL,
    MINUS,
    N_TOKENS,
    NAME,
    NEWLINE,
    NL,
    NOTEQUAL,
    NT_OFFSET,
    NUMBER,
    OP,
    PERCENT,
    PERCENTEQUAL,
    PLUS,
    PLUSEQUAL,
    RARROW,
    RBRACE,
    RIGHTSHIFT,
    RIGHTSHIFTEQUAL,
    RPAR,
    RSQB,
    SEMI,
    SLASH,
    SLASHEQUAL,
    STAR,
    STAREQUAL,
    STRING,
    TILDE,
    TYPE_COMMENT,
    TYPE_IGNORE,
    VBAR,
    VBAREQUAL,
)
from typing import Callable, Generator, Sequence, Tuple

Hexnumber: str = ...
Binnumber: str = ...
Octnumber: str = ...
Decnumber: str = ...
Intnumber: str = ...
Exponent: str = ...
Pointfloat: str = ...
Expfloat: str = ...
Floatnumber: str = ...
Imagnumber: str = ...
Number: str = ...
Whitespace: str = ...
Comment: str = ...
Ignore: str = ...
Name: str = ...

class TokenInfo(Tuple[int, str, Tuple[int, int], Tuple[int, int], int]):
    exact_type: int = ...
    type: int = ...
    string: str = ...
    start: Tuple[int, int] = ...
    end: Tuple[int, int] = ...
    line: int = ...
    def __repr__(self) -> str: ...

def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...