File: base.pyi

package info (click to toggle)
python-django-stubs 5.2.9-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 4,832 kB
  • sloc: python: 5,185; makefile: 15; sh: 8
file content (167 lines) | stat: -rw-r--r-- 5,531 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
from enum import Enum
from logging import Logger
from re import Pattern
from typing import Any, cast

from django.template.context import Context
from django.template.engine import Engine
from django.template.library import Library
from django.template.loaders.base import Loader
from django.utils.safestring import SafeString

FILTER_SEPARATOR: str
FILTER_ARGUMENT_SEPARATOR: str
VARIABLE_ATTRIBUTE_SEPARATOR: str
BLOCK_TAG_START: str
BLOCK_TAG_END: str
VARIABLE_TAG_START: str
VARIABLE_TAG_END: str
COMMENT_TAG_START: str
COMMENT_TAG_END: str
SINGLE_BRACE_START: str
SINGLE_BRACE_END: str
UNKNOWN_SOURCE: str
tag_re: Pattern[str]
logger: Logger

class TokenType(Enum):
    TEXT = cast(int, ...)
    VAR = cast(int, ...)
    BLOCK = cast(int, ...)
    COMMENT = cast(int, ...)

class VariableDoesNotExist(Exception):
    msg: str
    params: tuple[dict[str, str] | str]
    def __init__(self, msg: str, params: tuple[dict[str, str] | str] = ...) -> None: ...

class Origin:
    name: str
    template_name: bytes | str | None
    loader: Loader | None
    def __init__(self, name: str, template_name: bytes | str | None = None, loader: Loader | None = None) -> None: ...
    @property
    def loader_name(self) -> str | None: ...

class Template:
    name: str | None
    origin: Origin
    engine: Engine
    source: str
    nodelist: NodeList
    def __init__(
        self,
        template_string: Template | str,
        origin: Origin | None = None,
        name: str | None = None,
        engine: Engine | None = None,
    ) -> None: ...
    def render(self, context: Context) -> SafeString: ...
    def compile_nodelist(self) -> NodeList: ...
    def get_exception_info(self, exception: Exception, token: Token) -> dict[str, Any]: ...

def linebreak_iter(template_source: str) -> Iterator[int]: ...

class Token:
    contents: str
    token_type: TokenType
    lineno: int | None
    position: tuple[int, int] | None
    def __init__(
        self,
        token_type: TokenType,
        contents: str,
        position: tuple[int, int] | None = None,
        lineno: int | None = None,
    ) -> None: ...
    def split_contents(self) -> list[str]: ...

class Lexer:
    template_string: str
    verbatim: bool | str
    def __init__(self, template_string: str) -> None: ...
    def tokenize(self) -> list[Token]: ...
    def create_token(self, token_string: str, position: tuple[int, int] | None, lineno: int, in_tag: bool) -> Token: ...

class DebugLexer(Lexer):
    template_string: str
    verbatim: bool | str
    def tokenize(self) -> list[Token]: ...

class Parser:
    tokens: list[Token] | str
    tags: dict[str, Callable]
    filters: dict[str, Callable]
    command_stack: list[tuple[str, Token]]
    libraries: dict[str, Library]
    origin: Origin | None
    def __init__(
        self,
        tokens: list[Token] | str,
        libraries: dict[str, Library] | None = None,
        builtins: list[Library] | None = None,
        origin: Origin | None = None,
    ) -> None: ...
    def parse(self, parse_until: Iterable[str] | None = None) -> NodeList: ...
    def skip_past(self, endtag: str) -> None: ...
    def extend_nodelist(self, nodelist: NodeList, node: Node, token: Token) -> None: ...
    def error(self, token: Token, e: Exception | str) -> Exception: ...
    def invalid_block_tag(self, token: Token, command: str, parse_until: Iterable[str] | None = None) -> Any: ...
    def unclosed_block_tag(self, parse_until: Iterable[str]) -> Any: ...
    def next_token(self) -> Token: ...
    def prepend_token(self, token: Token) -> None: ...
    def delete_first_token(self) -> None: ...
    def add_library(self, lib: Library) -> None: ...
    def compile_filter(self, token: str) -> FilterExpression: ...
    def find_filter(self, filter_name: str) -> Callable: ...

constant_string: str
filter_raw_string: str
filter_re: Pattern[str]

class FilterExpression:
    token: str
    filters: list[Any]
    var: Any
    def __init__(self, token: str, parser: Parser) -> None: ...
    def resolve(self, context: Context, ignore_failures: bool = False) -> Any: ...
    @staticmethod
    def args_check(name: str, func: Callable, provided: list[tuple[bool, Any]]) -> bool: ...

class Variable:
    var: dict[Any, Any] | str
    literal: SafeString | float | None
    lookups: tuple[str, ...] | None
    translate: bool
    message_context: str | None
    def __init__(self, var: dict[Any, Any] | str) -> None: ...
    def resolve(self, context: Mapping[str, Mapping[str, Any]] | Context | int | str) -> Any: ...

class Node:
    must_be_first: bool
    child_nodelists: Any
    origin: Origin
    token: Token | None
    def render(self, context: Context) -> str: ...
    def render_annotated(self, context: Context) -> int | str: ...
    def get_nodes_by_type(self, nodetype: type[Node]) -> list[Node]: ...

class NodeList(list[Node]):
    contains_nontext: bool
    def render(self, context: Context) -> SafeString: ...
    def get_nodes_by_type(self, nodetype: type[Node]) -> list[Node]: ...

class TextNode(Node):
    s: str
    def __init__(self, s: str) -> None: ...

def render_value_in_context(value: Any, context: Context) -> str: ...

class VariableNode(Node):
    filter_expression: FilterExpression
    def __init__(self, filter_expression: FilterExpression) -> None: ...

kwarg_re: Pattern[str]

def token_kwargs(bits: Sequence[str], parser: Parser, support_legacy: bool = False) -> dict[str, FilterExpression]: ...