1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396
|
"""Tests for advanced_alchemy.utils.fixtures module."""
import gzip
import json
import tempfile
import zipfile
from collections.abc import Generator
from pathlib import Path
from typing import Any
import pytest
from advanced_alchemy.utils.fixtures import open_fixture, open_fixture_async
@pytest.fixture
def sample_data() -> "list[dict[str, Any]]":
"""Sample JSON data for testing."""
return [
{"id": 1, "name": "Alice", "email": "alice@example.com"},
{"id": 2, "name": "Bob", "email": "bob@example.com"},
{"id": 3, "name": "Charlie", "email": "charlie@example.com"},
]
@pytest.fixture
def temp_fixtures_dir(sample_data: "list[dict[str, Any]]") -> "Generator[Path, None, None]":
"""Create temporary directory with test fixtures in various formats."""
with tempfile.TemporaryDirectory() as temp_dir:
fixtures_path = Path(temp_dir)
# Create plain JSON fixture
json_file = fixtures_path / "users.json"
with open(json_file, "w", encoding="utf-8") as f:
json.dump(sample_data, f, indent=2)
# Create gzipped JSON fixture
gz_file = fixtures_path / "users_gz.json.gz"
with gzip.open(gz_file, "wt", encoding="utf-8") as f:
json.dump(sample_data, f, indent=2)
# Create zipped JSON fixture (single file)
zip_file = fixtures_path / "users_zip.json.zip"
with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("users_zip.json", json.dumps(sample_data, indent=2))
# Create zipped JSON fixture with multiple files (should pick the first)
multi_zip_file = fixtures_path / "users_multi.json.zip"
with zipfile.ZipFile(multi_zip_file, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("other.json", json.dumps([{"other": "data"}]))
zf.writestr("users_multi.json", json.dumps(sample_data, indent=2))
# Create zipped JSON fixture with preferred name match
preferred_zip_file = fixtures_path / "users_preferred.json.zip"
with zipfile.ZipFile(preferred_zip_file, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("other.json", json.dumps([{"other": "data"}]))
zf.writestr("users_preferred.json", json.dumps(sample_data, indent=2))
# Create empty zip file (should raise error)
empty_zip_file = fixtures_path / "empty.json.zip"
with zipfile.ZipFile(empty_zip_file, "w", zipfile.ZIP_DEFLATED):
pass # Empty zip
# Create zip with no JSON files
no_json_zip_file = fixtures_path / "no_json.json.zip"
with zipfile.ZipFile(no_json_zip_file, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("readme.txt", "No JSON files here")
yield fixtures_path
class TestOpenFixture:
"""Test cases for synchronous open_fixture function."""
def test_open_plain_json_fixture(self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]") -> None:
"""Test loading plain JSON fixture."""
result = open_fixture(temp_fixtures_dir, "users")
assert result == sample_data
def test_open_gzipped_fixture(self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]") -> None:
"""Test loading gzipped JSON fixture."""
result = open_fixture(temp_fixtures_dir, "users_gz")
assert result == sample_data
def test_open_zipped_fixture(self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]") -> None:
"""Test loading zipped JSON fixture."""
result = open_fixture(temp_fixtures_dir, "users_zip")
assert result == sample_data
def test_open_zipped_fixture_multiple_files(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test loading zipped JSON fixture with multiple files, should prefer matching name."""
result = open_fixture(temp_fixtures_dir, "users_multi")
assert result == sample_data
def test_open_zipped_fixture_preferred_name(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test loading zipped JSON fixture prefers file with matching name."""
result = open_fixture(temp_fixtures_dir, "users_preferred")
assert result == sample_data
def test_case_insensitive_support(self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]") -> None:
"""Test case-insensitive fixture loading (uppercase takes priority for compressed files)."""
# Create uppercase gzipped file
uppercase_gz_file = temp_fixtures_dir / "TESTCASE.json.gz"
with gzip.open(uppercase_gz_file, "wt", encoding="utf-8") as f:
json.dump(sample_data, f)
# Test that uppercase is found
result = open_fixture(temp_fixtures_dir, "testcase")
assert result == sample_data
# Create lowercase version and test priority (uppercase should still win)
lowercase_gz_file = temp_fixtures_dir / "testcase.json.gz"
lowercase_data = [{"different": "data"}]
with gzip.open(lowercase_gz_file, "wt", encoding="utf-8") as f:
json.dump(lowercase_data, f)
# Should still load uppercase version first
result = open_fixture(temp_fixtures_dir, "testcase")
assert result == sample_data # Original data, not lowercase_data
# Remove uppercase, should fallback to lowercase
uppercase_gz_file.unlink()
result = open_fixture(temp_fixtures_dir, "testcase")
assert result == lowercase_data
def test_file_format_priority(self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]") -> None:
"""Test that plain JSON is preferred over compressed formats."""
# Create all three formats for the same fixture name
json_file = temp_fixtures_dir / "priority.json"
gz_file = temp_fixtures_dir / "priority.json.gz"
zip_file = temp_fixtures_dir / "priority.json.zip"
# Different data for each format to test which one is loaded
plain_data = [{"format": "plain"}]
gz_data = [{"format": "gzip"}]
zip_data = [{"format": "zip"}]
with open(json_file, "w", encoding="utf-8") as f:
json.dump(plain_data, f)
with gzip.open(gz_file, "wt", encoding="utf-8") as f:
json.dump(gz_data, f)
with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("priority.json", json.dumps(zip_data))
# Should load plain JSON first
result = open_fixture(temp_fixtures_dir, "priority")
assert result == plain_data
# Remove plain JSON, should load gzip
json_file.unlink()
result = open_fixture(temp_fixtures_dir, "priority")
assert result == gz_data
# Remove gzip, should load zip
gz_file.unlink()
result = open_fixture(temp_fixtures_dir, "priority")
assert result == zip_data
def test_fixture_not_found(self, temp_fixtures_dir: Path) -> None:
"""Test FileNotFoundError when fixture doesn't exist."""
with pytest.raises(FileNotFoundError) as exc_info:
open_fixture(temp_fixtures_dir, "nonexistent")
assert "Could not find the nonexistent fixture" in str(exc_info.value)
assert "(tried .json, .json.gz, .json.zip with case variations)" in str(exc_info.value)
def test_empty_zip_file(self, temp_fixtures_dir: Path) -> None:
"""Test error handling for empty zip file."""
with pytest.raises(ValueError) as exc_info:
open_fixture(temp_fixtures_dir, "empty")
assert "No JSON files found in zip archive" in str(exc_info.value)
def test_zip_with_no_json_files(self, temp_fixtures_dir: Path) -> None:
"""Test error handling for zip file with no JSON files."""
with pytest.raises(ValueError) as exc_info:
open_fixture(temp_fixtures_dir, "no_json")
assert "No JSON files found in zip archive" in str(exc_info.value)
def test_corrupted_gzip_file(self, temp_fixtures_dir: Path) -> None:
"""Test error handling for corrupted gzip file."""
# Create corrupted gzip file
corrupted_file = temp_fixtures_dir / "corrupted.json.gz"
with open(corrupted_file, "wb") as f:
f.write(b"not a gzip file")
with pytest.raises(OSError) as exc_info:
open_fixture(temp_fixtures_dir, "corrupted")
assert "Error reading fixture file" in str(exc_info.value)
def test_corrupted_zip_file(self, temp_fixtures_dir: Path) -> None:
"""Test error handling for corrupted zip file."""
# Create corrupted zip file
corrupted_file = temp_fixtures_dir / "corrupted_zip.json.zip"
with open(corrupted_file, "wb") as f:
f.write(b"not a zip file")
with pytest.raises(OSError) as exc_info:
open_fixture(temp_fixtures_dir, "corrupted_zip")
assert "Error reading fixture file" in str(exc_info.value)
def test_invalid_json_content(self, temp_fixtures_dir: Path) -> None:
"""Test error handling for invalid JSON content."""
invalid_file = temp_fixtures_dir / "invalid.json"
with open(invalid_file, "w", encoding="utf-8") as f:
f.write("{ invalid json content")
with pytest.raises(Exception): # decode_json will raise an appropriate exception
open_fixture(temp_fixtures_dir, "invalid")
class TestOpenFixtureAsync:
"""Test cases for asynchronous open_fixture_async function."""
@pytest.mark.asyncio
async def test_open_plain_json_fixture_async(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test loading plain JSON fixture asynchronously."""
result = await open_fixture_async(temp_fixtures_dir, "users")
assert result == sample_data
@pytest.mark.asyncio
async def test_open_gzipped_fixture_async(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test loading gzipped JSON fixture asynchronously."""
result = await open_fixture_async(temp_fixtures_dir, "users_gz")
assert result == sample_data
@pytest.mark.asyncio
async def test_open_zipped_fixture_async(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test loading zipped JSON fixture asynchronously."""
result = await open_fixture_async(temp_fixtures_dir, "users_zip")
assert result == sample_data
@pytest.mark.asyncio
async def test_open_zipped_fixture_multiple_files_async(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test loading zipped JSON fixture with multiple files asynchronously."""
result = await open_fixture_async(temp_fixtures_dir, "users_multi")
assert result == sample_data
@pytest.mark.asyncio
async def test_case_insensitive_support_async(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test case-insensitive fixture loading asynchronously."""
# Create uppercase gzipped file
uppercase_gz_file = temp_fixtures_dir / "ASYNCCASE.json.gz"
with gzip.open(uppercase_gz_file, "wt", encoding="utf-8") as f:
json.dump(sample_data, f)
# Test that uppercase is found
result = await open_fixture_async(temp_fixtures_dir, "asynccase")
assert result == sample_data
@pytest.mark.asyncio
async def test_file_format_priority_async(self, temp_fixtures_dir: Path) -> None:
"""Test that plain JSON is preferred over compressed formats in async version."""
# Create all three formats for the same fixture name
json_file = temp_fixtures_dir / "priority_async.json"
gz_file = temp_fixtures_dir / "priority_async.json.gz"
zip_file = temp_fixtures_dir / "priority_async.json.zip"
# Different data for each format to test which one is loaded
plain_data = [{"format": "plain"}]
gz_data = [{"format": "gzip"}]
zip_data = [{"format": "zip"}]
with open(json_file, "w", encoding="utf-8") as f:
json.dump(plain_data, f)
with gzip.open(gz_file, "wt", encoding="utf-8") as f:
json.dump(gz_data, f)
with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("priority_async.json", json.dumps(zip_data))
# Should load plain JSON first
result = await open_fixture_async(temp_fixtures_dir, "priority_async")
assert result == plain_data
@pytest.mark.asyncio
async def test_fixture_not_found_async(self, temp_fixtures_dir: Path) -> None:
"""Test FileNotFoundError when fixture doesn't exist in async version."""
with pytest.raises(FileNotFoundError) as exc_info:
await open_fixture_async(temp_fixtures_dir, "nonexistent")
assert "Could not find the nonexistent fixture" in str(exc_info.value)
assert "(tried .json, .json.gz, .json.zip with case variations)" in str(exc_info.value)
@pytest.mark.asyncio
async def test_empty_zip_file_async(self, temp_fixtures_dir: Path) -> None:
"""Test error handling for empty zip file in async version."""
with pytest.raises(ValueError) as exc_info:
await open_fixture_async(temp_fixtures_dir, "empty")
assert "No JSON files found in zip archive" in str(exc_info.value)
@pytest.mark.asyncio
async def test_corrupted_gzip_file_async(self, temp_fixtures_dir: Path) -> None:
"""Test error handling for corrupted gzip file in async version."""
# Create corrupted gzip file
corrupted_file = temp_fixtures_dir / "corrupted_async.json.gz"
with open(corrupted_file, "wb") as f:
f.write(b"not a gzip file")
with pytest.raises(OSError) as exc_info:
await open_fixture_async(temp_fixtures_dir, "corrupted_async")
assert "Error reading fixture file" in str(exc_info.value)
@pytest.mark.skip(reason="Import mocking is complex and anyio is required by the project")
@pytest.mark.asyncio
async def test_missing_anyio_dependency(self, temp_fixtures_dir: Path) -> None:
"""Test MissingDependencyError when anyio is not available."""
# Note: This test documents the expected behavior when anyio is not available.
# In practice, anyio is a required dependency for this project.
pass
class TestIntegration:
"""Integration tests to ensure compatibility with existing usage patterns."""
def test_backward_compatibility_sync(self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]") -> None:
"""Test that existing sync code still works as expected."""
# This mirrors the usage pattern in test_sqlquery_service.py
fixture_data = open_fixture(temp_fixtures_dir, "users")
assert fixture_data == sample_data
assert len(fixture_data) == 3
assert fixture_data[0]["name"] == "Alice"
@pytest.mark.asyncio
async def test_backward_compatibility_async(
self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]"
) -> None:
"""Test that existing async code still works as expected."""
# This mirrors the usage pattern in test_sqlquery_service.py
fixture_data = await open_fixture_async(temp_fixtures_dir, "users")
assert fixture_data == sample_data
assert len(fixture_data) == 3
assert fixture_data[0]["name"] == "Alice"
def test_compression_efficiency(self, temp_fixtures_dir: Path, sample_data: "list[dict[str, Any]]") -> None:
"""Test that compressed fixtures are actually smaller than plain JSON."""
# Create a larger dataset for meaningful compression
large_data = sample_data * 100 # Repeat the data 100 times
# Create plain JSON
json_file = temp_fixtures_dir / "large.json"
with open(json_file, "w", encoding="utf-8") as f:
json.dump(large_data, f, indent=2)
# Create gzipped version
gz_file = temp_fixtures_dir / "large.json.gz"
with gzip.open(gz_file, "wt", encoding="utf-8") as f:
json.dump(large_data, f, indent=2)
# Create zipped version
zip_file = temp_fixtures_dir / "large.json.zip"
with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("large.json", json.dumps(large_data, indent=2))
# Check file sizes
json_size = json_file.stat().st_size
gz_size = gz_file.stat().st_size
zip_size = zip_file.stat().st_size
# Compressed versions should be smaller
assert gz_size < json_size
assert zip_size < json_size
# Verify all formats load the same data
json_data = open_fixture(temp_fixtures_dir, "large")
# Remove plain JSON to force loading compressed versions
json_file.unlink()
gz_data = open_fixture(temp_fixtures_dir, "large")
gz_file.unlink()
zip_data = open_fixture(temp_fixtures_dir, "large")
assert json_data == gz_data == zip_data == large_data
|