File: data.py

package info (click to toggle)
python-pyflume 0.8.7-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 332 kB
  • sloc: python: 857; makefile: 6
file content (218 lines) | stat: -rw-r--r-- 7,361 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
"""Retrieve data from Flume API."""

from datetime import datetime, timedelta, timezone

from ratelimit import limits, sleep_and_retry
from requests import Session

from .constants import (  # noqa: WPS300
    API_LIMIT,
    API_QUERY_URL,
    CONST_OPERATION,
    CONST_UNIT_OF_MEASUREMENT,
    DEFAULT_TIMEOUT,
)
from .utils import (  # noqa: WPS300
    configure_logger,
    flume_response_error,
    format_start_month,
    format_start_today,
    format_start_week,
    format_time,
)

try:
    from zoneinfo import ZoneInfo  # noqa: WPS433
except ImportError:  # Python < 3.9
    from backports.zoneinfo import ZoneInfo  # noqa: WPS433,WPS440

# Configure logging
LOGGER = configure_logger(__name__)


class FlumeData:
    """Get the latest data and update the states."""

    def __init__(  # noqa: WPS211
        self,
        flume_auth,
        device_id,
        device_tz,
        scan_interval,
        update_on_init=True,
        http_session=None,
        timeout=DEFAULT_TIMEOUT,
        query_payload=None,
    ):
        """

        Initialize the data object.

        Args:
            flume_auth: Authentication object.
            device_id: flume device id.
            device_tz: timezone of device
            scan_interval: duration of scan, ex: 60 minutes.
            update_on_init: update on initialization.
            http_session: Requests Session()
            timeout: Requests timeout for throttling.
            query_payload: Specific query_payload to request for device.

        """
        self._timeout = timeout
        self._flume_auth = flume_auth
        self._scan_interval = scan_interval
        self.device_id = device_id
        self.device_tz = device_tz
        self.values = {}  # noqa: WPS110
        if query_payload is None:
            self.query_payload = self._generate_api_query_payload(
                self._scan_interval,
                device_tz,
            )
        else:
            self.query_payload = query_payload
        if http_session is None:
            self._http_session = Session()
        else:
            self._http_session = http_session
        self._query_keys = [
            query["request_id"] for query in self.query_payload["queries"]
        ]
        if update_on_init:
            self.update()

    @sleep_and_retry
    @limits(calls=2, period=API_LIMIT)
    def update(self):
        """
        Return updated value for session.

        Returns:
            Returns status of update

        """
        return self.update_force()

    def update_force(self):
        """Return updated value for session without auto retry or limits."""
        self.query_payload = self._generate_api_query_payload(
            self._scan_interval,
            self.device_tz,
        )

        url = API_QUERY_URL.format(
            user_id=self._flume_auth.user_id,
            device_id=self.device_id,
        )
        response = self._http_session.post(
            url,
            json=self.query_payload,
            headers=self._flume_auth.authorization_header,
            timeout=self._timeout,
        )

        LOGGER.debug("Update URL: %s", url)  # noqa: WPS323
        LOGGER.debug("Update query_payload: %s", self.query_payload)  # noqa: WPS323
        LOGGER.debug("Update Response: %s", response.text)  # noqa: WPS323

        # Check for response errors.
        flume_response_error(
            "Can't update flume data for user id {0}".format(self._flume_auth.user_id),
            response,
        )

        responses = response.json()["data"][0]

        # Step 1: Initialize an empty dictionary
        values_dict = {}

        # Step 2: Loop through each key in self._query_keys
        for key in self._query_keys:
            # Step 3: Check the length of the responses for the current key
            if len(responses[key]) == 1:
                # Step 4: Assign the value to the dictionary if the condition is met
                values_dict[key] = responses[key][0]["value"]
            else:
                # Step 5: Assign None to the dictionary if the condition is not met
                values_dict[key] = None

        # Step 6: Assign the result to self.values
        self.values = values_dict  # noqa: WPS110

    def _generate_api_query_payload(self, scan_interval, device_tz):
        """Generate API Query payload to support getting data from Flume API.

        Args:
            scan_interval (_type_): Interval to scan.
            device_tz (_type_): Time Zone of Flume device.

        Returns:
            JSON: API Query to retrieve API details.
        """
        datetime_localtime = datetime.now(timezone.utc).astimezone(ZoneInfo(device_tz))

        queries = [
            {
                "request_id": "current_interval",
                "bucket": "MIN",
                "since_datetime": format_time(
                    (datetime_localtime - scan_interval).replace(second=0),
                ),
                "until_datetime": format_time(datetime_localtime.replace(second=0)),
                "operation": CONST_OPERATION,
                "units": CONST_UNIT_OF_MEASUREMENT,
            },
            {
                "request_id": "today",
                "bucket": "DAY",
                "since_datetime": format_start_today(datetime_localtime),
                "until_datetime": format_time(datetime_localtime),
                "operation": CONST_OPERATION,
                "units": CONST_UNIT_OF_MEASUREMENT,
            },
            {
                "request_id": "week_to_date",
                "bucket": "DAY",
                "since_datetime": format_start_week(datetime_localtime),
                "until_datetime": format_time(datetime_localtime),
                "operation": CONST_OPERATION,
                "units": CONST_UNIT_OF_MEASUREMENT,
            },
            {
                "request_id": "month_to_date",
                "bucket": "MON",
                "since_datetime": format_start_month(datetime_localtime),
                "until_datetime": format_time(datetime_localtime),
                "units": CONST_UNIT_OF_MEASUREMENT,
            },
            {
                "request_id": "last_60_min",
                "bucket": "MIN",
                "since_datetime": format_time(
                    datetime_localtime - timedelta(minutes=60),
                ),
                "until_datetime": format_time(datetime_localtime),
                "operation": CONST_OPERATION,
                "units": CONST_UNIT_OF_MEASUREMENT,
            },
            {
                "request_id": "last_24_hrs",
                "bucket": "HR",
                "since_datetime": format_time(datetime_localtime - timedelta(hours=24)),
                "until_datetime": format_time(datetime_localtime),
                "operation": CONST_OPERATION,
                "units": CONST_UNIT_OF_MEASUREMENT,
            },
            {
                "request_id": "last_30_days",
                "bucket": "DAY",
                "since_datetime": format_time(
                    datetime_localtime - timedelta(days=30),  # noqa: WPS432
                ),
                "until_datetime": format_time(datetime_localtime),
                "operation": CONST_OPERATION,
                "units": CONST_UNIT_OF_MEASUREMENT,
            },
        ]
        return {"queries": queries}