1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69
|
from __future__ import annotations
import pytest
import pyvista
pytest_plugins = 'pytester'
from typing import TYPE_CHECKING
from tests.test_conftest import PytesterStdoutParser
from tests.test_conftest import RunResultsReport
from tests.test_conftest import results_parser # noqa: F401
if TYPE_CHECKING:
from unittest.mock import MagicMock
from pytest_mock import MockerFixture
@pytest.fixture(autouse=True)
def _load_current_config(
pytestconfig: pytest.Config,
pytester: pytest.Pytester,
):
with (pytestconfig.rootpath / 'pyproject.toml').open('r') as file:
toml = pytester.makepyprojecttoml(file.read())
with (pytestconfig.rootpath / 'tests/plotting/conftest.py').open('r') as file:
conftest = pytester.makeconftest(file.read())
yield
toml.unlink()
conftest.unlink()
@pytest.mark.parametrize('support_plotting', [True, False])
def test_skip_plotting_mark(
support_plotting: bool,
pytester: pytest.Pytester,
results_parser: PytesterStdoutParser, # noqa: F811
mocker: MockerFixture,
):
tests = """
import pytest
@pytest.mark.skip_plotting
def test_plotting():
...
def test_no_plotting():
...
"""
mock: MagicMock = mocker.patch.object(pyvista.plotting, 'system_supports_plotting')
mock.return_value = support_plotting
p = pytester.makepyfile(tests)
results = pytester.runpytest(p)
results.assert_outcomes(
skipped=0 if support_plotting else 1,
passed=2 if support_plotting else 1,
)
results = results_parser.parse(results=results)
report = RunResultsReport(results)
assert 'test_no_plotting' in report.passed
assert 'test_plotting' in (report.passed if support_plotting else report.skipped)
|