File: test_benchmark_create.py

package info (click to toggle)
ormar 0.20.2-1
  • links: PTS, VCS
  • area: main
  • in suites: forky
  • size: 3,136 kB
  • sloc: python: 23,758; makefile: 34; sh: 14
file content (91 lines) | stat: -rw-r--r-- 2,843 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import random
import string

import pytest

from benchmarks.conftest import Author, Book, Publisher

pytestmark = pytest.mark.asyncio


@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_creating_models_individually(aio_benchmark, num_models: int):
    @aio_benchmark
    async def create(num_models: int):
        authors = []
        for idx in range(0, num_models):
            author = await Author.objects.create(
                name="".join(random.sample(string.ascii_letters, 5)),
                score=int(random.random() * 100),
            )
            authors.append(author)
        return authors

    authors = create(num_models)
    for author in authors:
        assert author.id is not None


@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_creating_individually_with_related_models(
    aio_benchmark, num_models: int, author: Author, publisher: Publisher
):
    @aio_benchmark
    async def create_with_related_models(
        author: Author, publisher: Publisher, num_models: int
    ):
        books = []
        for idx in range(0, num_models):
            book = await Book.objects.create(
                author=author,
                publisher=publisher,
                title="".join(random.sample(string.ascii_letters, 5)),
                year=random.randint(0, 2000),
            )
            books.append(book)

        return books

    books = create_with_related_models(
        author=author, publisher=publisher, num_models=num_models
    )

    for book in books:
        assert book.id is not None


@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_get_or_create_when_create(aio_benchmark, num_models: int):
    @aio_benchmark
    async def get_or_create(num_models: int):
        authors = []
        for idx in range(0, num_models):
            author, created = await Author.objects.get_or_create(
                name="".join(random.sample(string.ascii_letters, 5)),
                score=int(random.random() * 100),
            )
            assert created
            authors.append(author)
        return authors

    authors = get_or_create(num_models)
    for author in authors:
        assert author.id is not None


@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_update_or_create_when_create(aio_benchmark, num_models: int):
    @aio_benchmark
    async def update_or_create(num_models: int):
        authors = []
        for idx in range(0, num_models):
            author = await Author.objects.update_or_create(
                name="".join(random.sample(string.ascii_letters, 5)),
                score=int(random.random() * 100),
            )
            authors.append(author)
        return authors

    authors = update_or_create(num_models)
    for author in authors:
        assert author.id is not None