1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207
|
# -*- coding: UTF-8 -*-
"""
Invoke test tasks.
"""
from __future__ import print_function
import os.path
import sys
from invoke import task, Collection
# -- TASK-LIBRARY:
from invoke_cleanup import cleanup_tasks, cleanup_dirs, cleanup_files
# ---------------------------------------------------------------------------
# CONSTANTS:
# ---------------------------------------------------------------------------
USE_BEHAVE = False
# ---------------------------------------------------------------------------
# TASKS
# ---------------------------------------------------------------------------
@task(name="all", help={
"args": "Command line args for test run.",
})
def test_all(ctx, args="", options=""):
"""Run all tests (default)."""
pytest_args = select_by_prefix(args, ctx.pytest.scopes)
behave_args = None
if USE_BEHAVE:
behave_args = select_by_prefix(args, ctx.behave_test.scopes)
pytest_should_run = not args or (args and pytest_args)
behave_should_run = not args or (args and behave_args)
if pytest_should_run:
pytest(ctx, pytest_args, options=options)
if behave_should_run and USE_BEHAVE:
behave(ctx, behave_args, options=options)
@task
def clean(ctx, dry_run=False):
"""Cleanup (temporary) test artifacts."""
directories = ctx.test.clean.directories or []
files = ctx.test.clean.files or []
cleanup_dirs(directories, dry_run=dry_run)
cleanup_files(files, dry_run=dry_run)
@task(name="unit")
def unittest(ctx, args="", options=""):
"""Run unit tests."""
pytest(ctx, args, options)
@task
def pytest(ctx, args="", options=""):
"""Run unit tests."""
args = args or ctx.pytest.args
options = options or ctx.pytest.options
ctx.run("pytest {options} {args}".format(options=options, args=args))
@task(help={
"args": "Command line args for behave",
"format": "Formatter to use (progress, pretty, ...)",
})
def behave(ctx, args="", format="", options=""):
"""Run behave tests."""
format = format or ctx.behave_test.format
options = options or ctx.behave_test.options
args = args or ctx.behave_test.args
if os.path.exists("bin/behave"):
behave_cmd = "{python} bin/behave".format(python=sys.executable)
else:
behave_cmd = "{python} -m behave".format(python=sys.executable)
for group_args in grouped_by_prefix(args, ctx.behave_test.scopes):
ctx.run("{behave} -f {format} {options} {args}".format(
behave=behave_cmd, format=format, options=options, args=group_args))
@task(help={
"args": "Tests to run (empty: all)",
"report": "Coverage report format to use (report, html, xml)",
})
def coverage(ctx, args="", report="report", append=False):
"""Determine test coverage (run pytest, behave)"""
append = append or ctx.coverage.append
report_formats = ctx.coverage.report_formats or []
if report not in report_formats:
report_formats.insert(0, report)
opts = []
if append:
opts.append("--append")
pytest_args = select_by_prefix(args, ctx.pytest.scopes)
behave_args = select_by_prefix(args, ctx.behave_test.scopes)
pytest_should_run = not args or (args and pytest_args)
behave_should_run = not args or (args and behave_args) and USE_BEHAVE
if not args:
behave_args = ctx.behave_test.args or "features"
if isinstance(pytest_args, list):
pytest_args = " ".join(pytest_args)
if isinstance(behave_args, list):
behave_args = " ".join(behave_args)
# -- RUN TESTS WITH COVERAGE:
if pytest_should_run:
ctx.run("coverage run {options} -m pytest {args}".format(
args=pytest_args, options=" ".join(opts)))
if behave_should_run and USE_BEHAVE:
behave_options = ctx.behave_test.coverage_options or ""
os.environ["COVERAGE_PROCESS_START"] = os.path.abspath(".coveragerc")
behave(ctx, args=behave_args, options=behave_options)
del os.environ["COVERAGE_PROCESS_START"]
# -- POST-PROCESSING:
ctx.run("coverage combine")
for report_format in report_formats:
ctx.run("coverage {report_format}".format(report_format=report_format))
# ---------------------------------------------------------------------------
# UTILITIES:
# ---------------------------------------------------------------------------
def select_prefix_for(arg, prefixes):
for prefix in prefixes:
if arg.startswith(prefix):
return prefix
return os.path.dirname(arg)
def select_by_prefix(args, prefixes):
selected = []
for arg in args.strip().split():
assert not arg.startswith("-"), "REQUIRE: arg, not options"
scope = select_prefix_for(arg, prefixes)
if scope:
selected.append(arg)
return " ".join(selected)
def grouped_by_prefix(args, prefixes):
"""Group behave args by (directory) scope into multiple test-runs."""
group_args = []
current_scope = None
for arg in args.strip().split():
assert not arg.startswith("-"), "REQUIRE: arg, not options"
scope = select_prefix_for(arg, prefixes)
if scope != current_scope:
if group_args:
# -- DETECTED GROUP-END:
yield " ".join(group_args)
group_args = []
current_scope = scope
group_args.append(arg)
if group_args:
yield " ".join(group_args)
# ---------------------------------------------------------------------------
# TASK MANAGEMENT / CONFIGURATION
# ---------------------------------------------------------------------------
namespace = Collection(clean, unittest, pytest, coverage)
namespace.add_task(test_all, default=True)
if USE_BEHAVE:
namespace.add_task(behave)
namespace.configure({
"test": {
"clean": {
"directories": [
".cache", "assets", # -- TEST RUNS
# -- BEHAVE-SPECIFIC:
"__WORKDIR__", "reports", "test_results",
],
"files": [
".coverage", ".coverage.*",
# -- BEHAVE-SPECIFIC:
"report.html",
"rerun*.txt", "rerun*.featureset", "testrun*.json",
],
},
},
"pytest": {
"scopes": ["tests"],
"args": "",
"options": "", # -- NOTE: Overide in configfile "invoke.yaml"
},
# "behave_test": behave.namespace._configuration["behave_test"],
"behave_test": {
"scopes": ["features"],
"args": "features",
"format": "progress",
"options": "", # -- NOTE: Overide in configfile "invoke.yaml"
"coverage_options": "",
},
"coverage": {
"append": False,
"report_formats": ["report", "html"],
},
})
# -- ADD CLEANUP TASK:
cleanup_tasks.add_task(clean, "clean_test")
cleanup_tasks.configure(namespace.configuration())
|