File: timeoutlib.py

package info (click to toggle)
cockpit 239-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 67,268 kB
  • sloc: javascript: 245,474; ansic: 72,273; python: 23,634; xml: 6,155; sh: 2,919; makefile: 923; sed: 5
file content (283 lines) | stat: -rwxr-xr-x 8,919 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
#!/usr/bin/python3

# This file is part of Cockpit.
#
# Copyright (C) 2016 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
# Author: Miloš Prchlík (https://gist.github.com/happz/d50897af8a2e90cce8c7)

import signal
import time


class TimeoutError(RuntimeError):
    pass


class Timeout(object):
    def __init__(self, retry, timeout):
        self.retry = retry
        self.timeout = timeout

    def __enter__(self):
        def timeout_handler(signum, frame):
            if __debug__:
                self.retry.timeouts_triggered += 1

            raise TimeoutError("%is timeout reached" % self.timeout)

        self.orig_sighand = signal.signal(signal.SIGALRM, timeout_handler)
        signal.alarm(self.timeout)

    def __exit__(self, type, value, traceback):
        signal.alarm(0)
        signal.signal(signal.SIGALRM, self.orig_sighand)


class NOPTimeout(object):
    def __init__(self, *args, **kwargs):
        pass

    def __enter__(self):
        pass

    def __exit__(self, *args, **kwargs):
        pass


class Retry(object):
    def __init__(self, attempts=1, timeout=None, exceptions=(), error=None, inverse=False, delay=None):
        """
        Try to run things ATTEMPTS times, at max, each attempt must not exceed TIMEOUT seconds.
        Restart only when one of EXCEPTIONS is raised, all other exceptions will just bubble up.
        When the maximal number of attempts is reached, raise ERROR. Wait DELAY seconds between
        attempts.
        When INVERSE is True, successful return of wrapped code is considered as a failure.
        """

        self.attempts = attempts
        self.timeout = timeout
        self.exceptions = exceptions
        self.error = error or Exception('Too many retries!')
        self.inverse = inverse
        self.timeout_wrapper = Timeout if timeout is not None else NOPTimeout
        self.delay = delay if delay is not None else timeout

        # some accounting, for testing purposes
        if __debug__:
            self.failed_attempts = 0
            self.timeouts_triggered = 0

    def handle_failure(self, start_time):
        if __debug__:
            self.failed_attempts += 1

        self.attempts -= 1
        if self.attempts == 0:
            raise self.error

        # Before the next iteration sleep $delay seconds. It's the
        # remaining time to the $timeout Since it makes not much sense
        # to feed time.sleep() with negative delays, return None.

        if self.delay is None:
            return None

        delay = self.delay - (time.time() - start_time)
        return delay if delay > 0 else None

    def __call__(self, fn):
        def __wrap(*args, **kwargs):
            # This is not an endless loop. It will be broken by
            # 1) first "successful" return of fn() - taking self.inverse into account, of course - or
            # 2) by decrementing self.attempts to zero, or
            # 3) when unexpected exception is raised by fn().

            output = None
            delay = None  # no delay yet

            while True:
                if delay is not None:
                    time.sleep(delay)

                with self.timeout_wrapper(self, self.timeout):
                    start_time = time.time()

                    try:
                        output = fn(*args, **kwargs)
                        if not self.inverse:
                            return output

                    except (self.exceptions + (TimeoutError,)):
                        if self.inverse:
                            return True

                        # Handle exceptions we are expected to catch, by logging a failed
                        # attempt, and checking the number of attempts.
                        delay = self.handle_failure(start_time)
                        continue

                    except Exception as e:
                        # Handle all other exceptions, by logging a failed attempt and
                        # re-raising the exception, effectively killing the loop.
                        if __debug__:
                            self.failed_attempts += 1
                        raise e

                delay = self.handle_failure(start_time)

        return __wrap


def wait(func, msg=None, delay=1, tries=60):
    """
    Wait for FUNC to return something truthy, and return that.

    FUNC is called repeatedly until it returns a true value or until a
    timeout occurs.  In the latter case, a exception is raised that
    describes the situation.  The exception is either the last one
    thrown by FUNC, or includes MSG, or a default message.

    Arguments:
      func: The function to call.
      msg: A error message to use when the timeout occurs.  Defaults
        to a generic message.
      delay: How long to wait between calls to FUNC, in seconds.
        Defaults to 1.
      tries: How often to call FUNC.  Defaults to 60.

    Raises:
      TimeoutError: When a timeout occurs.
    """

    t = 0
    while t < tries:
        try:
            val = func()
            if val:
                return val
        except Exception:
            if t == tries - 1:
                raise
            else:
                pass
        t = t + 1
        time.sleep(delay)
    raise TimeoutError(msg or "Condition did not become true.")


if __name__ == '__main__':
    class IFailedError(Exception):
        pass

    white_horse = []

    # Simple "try so many times, and die" case
    @Retry(attempts=5, exceptions=(IFailedError,), error=IFailedError('Too many retries!'))
    def do_something1(a, b, c, d=79):
        white_horse.append(d)
        raise IFailedError()

    try:
        do_something1(2, 4, 6, d=97)

    except IFailedError:
        retry = do_something1.func_closure[1].cell_contents

        assert len(white_horse) == 5
        assert retry.failed_attempts == 5
        assert retry.timeouts_triggered == 0

    # Now with timeout
    black_horse = []
    brown_horse = []

    @Retry(attempts=2, timeout=5, error=IFailedError('Too many retries!'))
    def do_something2(a, b):
        black_horse.append(b)
        time.sleep(30)
        brown_horse.append(True)

    try:
        do_something2(1, 2)

    except IFailedError:
        retry = do_something2.func_closure[1].cell_contents

        assert not len(brown_horse)
        assert len(black_horse) == 2
        assert retry.timeouts_triggered == 2
        assert retry.failed_attempts == 2

    # And react only to a set of exceptions
    @Retry(attempts=3, exceptions=(ValueError,))
    def do_something3():
        raise IndexError('This one goes right to the top')

    try:
        do_something3()

    except IndexError:
        retry = do_something3.func_closure[1].cell_contents

        assert retry.failed_attempts == 1
        assert retry.timeouts_triggered == 0

    # Use inverted result of wrapped fn
    @Retry(attempts=1, timeout=1, exceptions=(IFailedError,), error=IFailedError('Too many retries!'), inverse=True)
    def do_something4():
        raise IFailedError('No, I did not!')

    assert do_something4() is True

    # Test delay usage
    red_horse = []

    @Retry(attempts=5, timeout=5, error=IFailedError('Too many retries!'), delay=20)
    def do_something5():
        red_horse.append(time.time())
        time.sleep(10)  # should be enough to get killed by watchdog

    try:
        start_time = time.time()

        do_something5()

    except IFailedError:
        end_time = time.time()

        retry = do_something5.func_closure[1].cell_contents

        assert retry.failed_attempts == 5
        assert retry.timeouts_triggered == 5

        for i in range(1, 5):
            assert red_horse[i] - red_horse[i - 1] >= 20.0, 'Interval #%i was shorter than expected: %f' % (i, red_horse[i] - red_horse[i - 1])

        assert (end_time - start_time) >= (4 * 20.0 + 5.0), 'All attempts took shorter time than expected: %f' % (end_time - start_time)

    # Immediately fail on unexpected exceptions
    @Retry(attempts=3)
    def do_something6():
        raise IndexError('This one goes right to the top')

    try:
        do_something6()

    except IndexError:
        retry = do_something6.func_closure[1].cell_contents

        assert retry.failed_attempts == 1
        assert retry.timeouts_triggered == 0