File: backends.py

package info (click to toggle)
python-tasklib 2.5.1-3
  • links: PTS, VCS
  • area: main
  • in suites: bookworm, sid, trixie
  • size: 240 kB
  • sloc: python: 2,257; makefile: 147
file content (425 lines) | stat: -rw-r--r-- 14,387 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
import abc
import copy
import datetime
import json
import logging
import os
import re
import subprocess
from functools import lru_cache

from .task import Task, TaskQuerySet, ReadOnlyDictView
from .filters import TaskWarriorFilter

DATE_FORMAT_CALC = '%Y-%m-%dT%H:%M:%S'

logger = logging.getLogger(__name__)


class Backend(object):

    @abc.abstractproperty
    def filter_class(self):
        """Returns the TaskFilter class used by this backend"""
        pass

    @abc.abstractmethod
    def filter_tasks(self, filter_obj):
        """Returns a list of Task objects matching the given filter"""
        pass

    @abc.abstractmethod
    def save_task(self, task):
        pass

    @abc.abstractmethod
    def delete_task(self, task):
        pass

    @abc.abstractmethod
    def start_task(self, task):
        pass

    @abc.abstractmethod
    def stop_task(self, task):
        pass

    @abc.abstractmethod
    def complete_task(self, task):
        pass

    @abc.abstractmethod
    def refresh_task(self, task, after_save=False):
        """
        Refreshes the given task. Returns new data dict with serialized
        attributes.
        """
        pass

    @abc.abstractmethod
    def annotate_task(self, task, annotation):
        pass

    @abc.abstractmethod
    def denotate_task(self, task, annotation):
        pass

    @abc.abstractmethod
    def sync(self):
        """Syncs the backend database with the taskd server"""
        pass

    def convert_datetime_string(self, value):
        """
        Converts TW syntax datetime string to a localized datetime
        object. This method is not mandatory.
        """
        raise NotImplementedError


class TaskWarriorException(Exception):
    pass


class TaskWarrior(Backend):

    VERSION_2_4_0 = '2.4.0'
    VERSION_2_4_1 = '2.4.1'
    VERSION_2_4_2 = '2.4.2'
    VERSION_2_4_3 = '2.4.3'
    VERSION_2_4_4 = '2.4.4'
    VERSION_2_4_5 = '2.4.5'

    def __init__(self, data_location=None, create=True,
                 taskrc_location=None, task_command='task',
                 version_override=None):
        self.taskrc_location = None
        if taskrc_location:
            self.taskrc_location = os.path.expanduser(taskrc_location)

            # If taskrc does not exist, pass / to use defaults and avoid creating
            # dummy .taskrc file by TaskWarrior
            if not os.path.exists(self.taskrc_location):
                self.taskrc_location = '/'

        self.task_command = task_command

        self._config = None
        self.version = version_override or self._get_version()
        self.overrides = {
            'confirmation': 'no',
            'dependency.confirmation': 'no',  # See TW-1483 or taskrc man page
            'recurrence.confirmation': 'no',  # Necessary for modifying R tasks

            # Defaults to on since 2.4.5, we expect off during parsing
            'json.array': 'off',

            # 2.4.3 onwards supports 0 as infite bulk, otherwise set just
            # arbitrary big number which is likely to be large enough
            'bulk': 0 if self.version >= self.VERSION_2_4_3 else 100000,
        }

        # Set data.location override if passed via kwarg
        if data_location is not None:
            data_location = os.path.expanduser(data_location)
            if create and not os.path.exists(data_location):
                os.makedirs(data_location)
            self.overrides['data.location'] = data_location

        self.tasks = TaskQuerySet(self)

    def _get_task_command(self):
        return self.task_command.split()

    def _get_command_args(self, args, config_override=None):
        command_args = self._get_task_command()
        overrides = self.overrides.copy()
        overrides.update(config_override or dict())
        for item in overrides.items():
            command_args.append('rc.{0}={1}'.format(*item))
        command_args.extend([
            x.decode('utf-8') if isinstance(x, bytes)
            else str(x) for x in args
        ])
        return command_args

    def _get_version(self):
        p = subprocess.Popen(
            self._get_task_command() + ['--version'],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
        return stdout.strip('\n')

    def _get_modified_task_fields_as_args(self, task):
        args = []

        def add_field(field):
            # Add the output of format_field method to args list (defaults to
            # field:value)
            serialized_value = task._serialize(field, task._data[field])

            # Empty values should not be enclosed in quotation marks, see
            # TW-1510
            if serialized_value == '':
                escaped_serialized_value = ''
            else:
                escaped_serialized_value = "'{0}'".format(
                    serialized_value)

            format_default = lambda task: "{0}:{1}".format(
                field, escaped_serialized_value)

            format_func = getattr(self, 'format_{0}'.format(field),
                                  format_default)

            args.append(format_func(task))

        # If we're modifying saved task, simply pass on all modified fields
        if task.saved:
            for field in task._modified_fields:
                add_field(field)

        # For new tasks, pass all fields that make sense
        else:
            for field in task._data.keys():
                # We cannot set stuff that's read only (ID, UUID, ..)
                if field in task.read_only_fields:
                    continue
                # We do not want to do field deletion for new tasks
                if task._data[field] is None:
                    continue
                # Otherwise we're fine
                add_field(field)

        return args

    def format_depends(self, task):
        # We need to generate added and removed dependencies list,
        # since Taskwarrior does not accept redefining dependencies.

        # This cannot be part of serialize_depends, since we need
        # to keep a list of all depedencies in the _data dictionary,
        # not just currently added/removed ones

        old_dependencies = task._original_data.get('depends', set())

        added = task['depends'] - old_dependencies
        removed = old_dependencies - task['depends']

        # Removed dependencies need to be prefixed with '-'
        return 'depends:' + ','.join(
            [t['uuid'] for t in added] +
            ['-' + t['uuid'] for t in removed]
        )

    def format_description(self, task):
        return "description:'{0}'".format(
            task._data['description'] or '',
        )

    def convert_datetime_string(self, value):
        # For strings, use 'calc' to evaluate the string to datetime
        # available since TW 2.4.0
        args = value.split()
        result = self.execute_command(['calc'] + args)
        naive = datetime.datetime.strptime(result[0], DATE_FORMAT_CALC)
        localized = naive.astimezone()
        return localized

    @property
    def filter_class(self):
        return TaskWarriorFilter

    # Public interface

    @property
    def config(self):
        # First, check if memoized information is available
        if self._config:
            return self._config

        # If not, fetch the config using the 'show' command
        raw_output = self.execute_command(
            ['show'],
            config_override={'verbose': 'nothing'}
        )

        config = dict()
        config_regex = re.compile(r'^(?P<key>[^\s]+)\s+(?P<value>[^\s].*$)')

        for line in raw_output:
            match = config_regex.match(line)
            if match:
                config[match.group('key')] = match.group('value').strip()

        # Memoize the config dict
        self._config = ReadOnlyDictView(config)

        return self._config

    def execute_command(self, args, config_override=None, allow_failure=True,
                        return_all=False):
        command_args = self._get_command_args(
            args, config_override=config_override)
        logger.debug(u' '.join(command_args))

        env = os.environ.copy()
        if self.taskrc_location:
            env['TASKRC'] = self.taskrc_location
        p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE, env=env)
        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
        if p.returncode and allow_failure:
            if stderr.strip():
                error_msg = stderr.strip()
            else:
                error_msg = stdout.strip()
            error_msg += u'\nCommand used: ' + u' '.join(command_args)
            raise TaskWarriorException(error_msg)

        # Return all whole triplet only if explicitly asked for
        if not return_all:
            return stdout.rstrip().split('\n')
        else:
            return (stdout.rstrip().split('\n'),
                    stderr.rstrip().split('\n'),
                    p.returncode)

    def enforce_recurrence(self):
        # Run arbitrary report command which will trigger generation
        # of recurrent tasks.

        # Only necessary for TW up to 2.4.1, fixed in 2.4.2.
        if self.version < self.VERSION_2_4_2:
            self.execute_command(['next'], allow_failure=False)

    def merge_with(self, path, push=False):
        path = path.rstrip('/') + '/'
        self.execute_command(['merge', path], config_override={
            'merge.autopush': 'yes' if push else 'no',
        })

    def undo(self):
        self.execute_command(['undo'])

    @lru_cache(maxsize=128)
    def get_task(self, uuid):
        return self.tasks.get(uuid=uuid)

    # Backend interface implementation

    def filter_tasks(self, filter_obj):
        self.enforce_recurrence()
        args = filter_obj.get_filter_params() + ["export"]
        tasks = []
        for line in self.execute_command(args):
            if line:
                data = line.strip(',')
                try:
                    filtered_task = Task(self)
                    filtered_task._load_data(json.loads(data))
                    tasks.append(filtered_task)
                except ValueError:
                    raise TaskWarriorException('Invalid JSON: %s' % data)
        return tasks

    def save_task(self, task):
        """Save a task into TaskWarrior database using add/modify call"""

        args = [task['uuid'], 'modify'] if task.saved else ['add']
        args.extend(self._get_modified_task_fields_as_args(task))
        output = self.execute_command(
            args,
            config_override={'verbose': 'new-uuid'}
        )

        # Parse out the new ID, if the task is being added for the first time
        if not task.saved:
            id_lines = [l for l in output if l.startswith('Created task ')]

            # Complain loudly if it seems that more tasks were created
            # Should not happen.
            # Expected output: Created task bd23f69a-a078-48a4-ac11-afba0643eca9.
            #                  Created task bd23f69a-a078-48a4-ac11-afba0643eca9 (recurrence template).
            if len(id_lines) != 1 or len(id_lines[0].split(' ')) not in (3, 5):
                raise TaskWarriorException(
                    'Unexpected output when creating '
                    'task: %s' % '\n'.join(id_lines),
                )

            # Circumvent the ID storage, since ID is considered read-only
            identifier = id_lines[0].split(' ')[2].rstrip('.')

            # Identifier is UUID, because we used new-uuid verbosity override
            task._data['uuid'] = identifier

        # Refreshing is very important here, as not only modification time
        # is updated, but arbitrary attribute may have changed due hooks
        # altering the data before saving
        task.refresh(after_save=True)

    def delete_task(self, task):
        self.execute_command([task['uuid'], 'delete'])

    def start_task(self, task):
        self.execute_command([task['uuid'], 'start'])

    def stop_task(self, task):
        self.execute_command([task['uuid'], 'stop'])

    def complete_task(self, task):
        self.execute_command([task['uuid'], 'done'])

    def annotate_task(self, task, annotation):
        args = [task['uuid'], 'annotate', annotation]
        self.execute_command(args)

    def denotate_task(self, task, annotation):
        args = [task['uuid'], 'denotate', annotation]
        self.execute_command(args)

    def refresh_task(self, task, after_save=False):
        # We need to use ID as backup for uuid here for the refreshes
        # of newly saved tasks. Any other place in the code is fine
        # with using UUID only.
        args = [task['uuid'] or task['id'], 'export']
        output = self.execute_command(
            args,
            # Supress GC, which can change ID numbers (undesirable for refresh)
            config_override={'gc': '0'}
        )

        def valid(output):
            return len(output) == 1 and output[0].startswith('{')

        # For older TW versions attempt to uniquely locate the task
        # using the data we have if it has been just saved.
        # This can happen when adding a completed task on older TW versions.
        if (not valid(output) and self.version < self.VERSION_2_4_5
                and after_save):

            # Make a copy, removing ID and UUID. It's most likely invalid
            # (ID 0) if it failed to match a unique task.
            data = copy.deepcopy(task._data)
            data.pop('id', None)
            data.pop('uuid', None)

            taskfilter = self.filter_class(self)
            for key, value in data.items():
                taskfilter.add_filter_param(key, value)

            output = self.execute_command(taskfilter.get_filter_params() + ['export'])

        # If more than 1 task has been matched still, raise an exception
        if not valid(output):
            raise TaskWarriorException(
                'Unique identifiers {0} with description: {1} matches '
                'multiple tasks: {2}'.format(
                    task['uuid'] or task['id'], task['description'], output)
            )

        return json.loads(output[0])

    def sync(self):
        self.execute_command(['sync'])