File: models.py

package info (click to toggle)
python-moto 5.1.18-3
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 116,520 kB
  • sloc: python: 636,725; javascript: 181; makefile: 39; sh: 3
file content (172 lines) | stat: -rw-r--r-- 5,989 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
import re
from datetime import datetime
from typing import Optional

from moto.core.base_backend import BackendDict, BaseBackend
from moto.core.utils import iso_8601_datetime_without_milliseconds
from moto.utilities.utils import get_partition

from .exceptions import (
    InvalidInputException,
    ResourceAlreadyExistsException,
    ResourceNotFoundException,
    ValidationException,
)


class DatasetGroup:
    accepted_dataset_group_name_format = re.compile(r"^[a-zA-Z][a-z-A-Z0-9_]*")
    accepted_dataset_group_arn_format = re.compile(r"^[a-zA-Z0-9\-\_\.\/\:]+$")
    accepted_dataset_types = [
        "INVENTORY_PLANNING",
        "METRICS",
        "RETAIL",
        "EC2_CAPACITY",
        "CUSTOM",
        "WEB_TRAFFIC",
        "WORK_FORCE",
    ]

    def __init__(
        self,
        account_id: str,
        region_name: str,
        dataset_arns: list[str],
        dataset_group_name: str,
        domain: str,
        tags: Optional[list[dict[str, str]]] = None,
    ):
        self.creation_date = iso_8601_datetime_without_milliseconds(datetime.now())
        self.modified_date = self.creation_date

        self.arn = f"arn:{get_partition(region_name)}:forecast:{region_name}:{account_id}:dataset-group/{dataset_group_name}"
        self.dataset_arns = dataset_arns if dataset_arns else []
        self.dataset_group_name = dataset_group_name
        self.domain = domain
        self.tags = tags
        self._validate()

    def update(self, dataset_arns: list[str]) -> None:
        self.dataset_arns = dataset_arns
        self.last_modified_date = iso_8601_datetime_without_milliseconds(datetime.now())

    def _validate(self) -> None:
        errors = []

        errors.extend(self._validate_dataset_group_name())
        errors.extend(self._validate_dataset_group_name_len())
        errors.extend(self._validate_dataset_group_domain())

        if errors:
            err_count = len(errors)
            message = str(err_count) + " validation error"
            message += "s" if err_count > 1 else ""
            message += " detected: "
            message += "; ".join(errors)
            raise ValidationException(message)

    def _validate_dataset_group_name(self) -> list[str]:
        errors = []
        if not re.match(
            self.accepted_dataset_group_name_format, self.dataset_group_name
        ):
            errors.append(
                "Value '"
                + self.dataset_group_name
                + "' at 'datasetGroupName' failed to satisfy constraint: Member must satisfy regular expression pattern "
                + self.accepted_dataset_group_name_format.pattern
            )
        return errors

    def _validate_dataset_group_name_len(self) -> list[str]:
        errors = []
        if len(self.dataset_group_name) >= 64:
            errors.append(
                "Value '"
                + self.dataset_group_name
                + "' at 'datasetGroupName' failed to satisfy constraint: Member must have length less than or equal to 63"
            )
        return errors

    def _validate_dataset_group_domain(self) -> list[str]:
        errors = []
        if self.domain not in self.accepted_dataset_types:
            errors.append(
                "Value '"
                + self.domain
                + "' at 'domain' failed to satisfy constraint: Member must satisfy enum value set "
                + str(self.accepted_dataset_types)
            )
        return errors


class ForecastBackend(BaseBackend):
    def __init__(self, region_name: str, account_id: str):
        super().__init__(region_name, account_id)
        self.dataset_groups: dict[str, DatasetGroup] = {}
        self.datasets: dict[str, str] = {}

    def create_dataset_group(
        self,
        dataset_group_name: str,
        domain: str,
        dataset_arns: list[str],
        tags: list[dict[str, str]],
    ) -> DatasetGroup:
        dataset_group = DatasetGroup(
            account_id=self.account_id,
            region_name=self.region_name,
            dataset_group_name=dataset_group_name,
            domain=domain,
            dataset_arns=dataset_arns,
            tags=tags,
        )

        if dataset_arns:
            for dataset_arn in dataset_arns:
                if dataset_arn not in self.datasets:
                    raise InvalidInputException(
                        "Dataset arns: [" + dataset_arn + "] are not found"
                    )

        if self.dataset_groups.get(dataset_group.arn):
            raise ResourceAlreadyExistsException(
                "A dataset group already exists with the arn: " + dataset_group.arn
            )

        self.dataset_groups[dataset_group.arn] = dataset_group
        return dataset_group

    def describe_dataset_group(self, dataset_group_arn: str) -> DatasetGroup:
        try:
            return self.dataset_groups[dataset_group_arn]
        except KeyError:
            raise ResourceNotFoundException("No resource found " + dataset_group_arn)

    def delete_dataset_group(self, dataset_group_arn: str) -> None:
        try:
            del self.dataset_groups[dataset_group_arn]
        except KeyError:
            raise ResourceNotFoundException("No resource found " + dataset_group_arn)

    def update_dataset_group(
        self, dataset_group_arn: str, dataset_arns: list[str]
    ) -> None:
        try:
            dsg = self.dataset_groups[dataset_group_arn]
        except KeyError:
            raise ResourceNotFoundException("No resource found " + dataset_group_arn)

        for dataset_arn in dataset_arns:
            if dataset_arn not in dsg.dataset_arns:
                raise InvalidInputException(
                    "Dataset arns: [" + dataset_arn + "] are not found"
                )

        dsg.update(dataset_arns)

    def list_dataset_groups(self) -> list[DatasetGroup]:
        return [v for (_, v) in self.dataset_groups.items()]


forecast_backends = BackendDict(ForecastBackend, "forecast")