File: pyproject.toml

package info (click to toggle)
python-scrapli 2023.7.30-5
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 4,536 kB
  • sloc: python: 14,459; makefile: 72
file content (147 lines) | stat: -rw-r--r-- 4,106 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
[build-system]
build-backend = "setuptools.build_meta"
requires = [
  "setuptools",
  "wheel",
]

[project]
name = "scrapli"
description = "Fast, flexible, sync/async, Python 3.7+ screen scraping client specifically for network devices"
readme = "README.md"
keywords = [
  "arista",
  "automation",
  "cisco",
  "eos",
  "iosxe",
  "iosxr",
  "juniper",
  "junos",
  "netconf",
  "network",
  "nxos",
  "ssh",
  "telnet",
]
license = { file = "LICENSE" }
authors = [
    { name = "Carl Montanari", email = "carl.r.montanari@gmail.com" },
]
requires-python = ">=3.7"
classifiers = [
    "License :: OSI Approved :: MIT License",
    "Operating System :: POSIX :: Linux",
    "Operating System :: MacOS",
    "Programming Language :: Python",
    "Programming Language :: Python :: 3.7",
    "Programming Language :: Python :: 3.8",
    "Programming Language :: Python :: 3.9",
    "Programming Language :: Python :: 3.10",
    "Programming Language :: Python :: 3.11",
    "Programming Language :: Python :: 3 :: Only",
    "Topic :: Software Development :: Libraries :: Python Modules",
]
dynamic = [
  "dependencies",
  "optional-dependencies",
  "version",
]
[project.urls]
Changelog = "https://carlmontanari.github.io/scrapli/changelog"
Docs = "https://carlmontanari.github.io/scrapli/"
Homepage = "https://github.com/carlmontanari/scrapli"

[tool.setuptools.dynamic]
version = { attr = "scrapli.__version__" }
dependencies = { file = "requirements.txt" }
optional-dependencies.dev = { file = [
    "requirements-dev.txt",
    "requirements-textfsm.txt",
    "requirements-genie.txt",
    "requirements-ttp.txt",
    "requirements-paramiko.txt",
    "requirements-ssh2.txt",
    "requirements-asyncssh.txt",
    "requirements-community.txt",
] }
optional-dependencies.docs = { file = "requirements-docs.txt" }
optional-dependencies.textfsm = { file = "requirements-textfsm.txt" }
optional-dependencies.genie = { file = "requirements-genie.txt" }
optional-dependencies.ttp = { file = "requirements-ttp.txt" }
optional-dependencies.paramiko = { file = "requirements-paramiko.txt" }
optional-dependencies.ssh2 = { file = "requirements-ssh2.txt" }
optional-dependencies.asyncssh = { file = "requirements-asyncssh.txt" }
optional-dependencies.community = { file = "requirements-community.txt" }

[tool.setuptools.package-data]
scrapli = [
    "py.typed"
]

[tool.black]
line-length = 100
target-version = [
    "py311",
]

[tool.isort]
profile = "black"
line_length = 100
multi_line_output = 3
include_trailing_comma = true
known_first_party = "scrapli"
known_third_party = "asyncssh,pytest"
[tool.pytest.ini_options]
asyncio_mode = "auto"

[tool.coverage.run]
source = [
    "scrapli/"
]
omit = [
    "scrapli/transport/plugins/system/ptyprocess.py"
]

[tool.coverage.report]
sort = "cover"
omit = ["scrapli/transport/plugins/system/ptyprocess.py"]

[tool.mypy]
python_version = "3.11"
pretty = true
ignore_missing_imports = true
warn_redundant_casts = true
warn_unused_configs = true
strict_optional = true

[[tool.mypy.overrides]]
module = "scrapli.transport.plugins.system.ptyprocess"
ignore_errors = true

[tool.pylama]
linters = "mccabe,pycodestyle,pylint"
skip = ".nox/*,.private/*,build/*,docs/*,private/*,scrapli/transport/plugins/system/ptyprocess.py,site/*,tests/*,venv/*"

[tool.pylama.pycodestyle]
max_line_length = 100

[tool.pylama.pylint]
rcfile = ".pylintrc"

[tool.pydocstyle]
match-dir = "^scrapli/*"
ignore = "D101,D202,D203,D212,D400,D406,D407,D408,D409,D415"
# D101: missing docstring in public class
# D202: No blank lines allowed after function docstring
# D203: 1 blank line required before class docstring
# D212: Multi-line docstring summary should start at the first line
# D400: First line should end with a period
# D406: Section name should end with a newline
# D407: Missing dashed underline after section
# D408: Section underline should be in the line following the sections name
# D409: Section underline should match the length of its name
# D415: first line should end with a period, question mark, or exclamation point

[tool.setuptools.package-dir]
scrapli = "scrapli"