File: cached_tasks.py

package info (click to toggle)
firefox 147.0.3-1
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 4,683,320 kB
  • sloc: cpp: 7,607,359; javascript: 6,533,295; ansic: 3,775,223; python: 1,415,500; xml: 634,561; asm: 438,949; java: 186,241; sh: 62,752; makefile: 18,079; objc: 13,092; perl: 12,808; yacc: 4,583; cs: 3,846; pascal: 3,448; lex: 1,720; ruby: 1,003; php: 436; lisp: 258; awk: 247; sql: 66; sed: 54; csh: 10; exp: 6
file content (106 lines) | stat: -rw-r--r-- 3,143 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.


from collections import deque

import taskgraph
from taskgraph.transforms.base import TransformSequence

from gecko_taskgraph.util.cached_tasks import add_optimization

transforms = TransformSequence()


def order_tasks(config, tasks):
    """Iterate image tasks in an order where parent tasks come first."""
    kind_prefix = config.kind + "-"

    pending = deque(tasks)
    task_labels = {task["label"] for task in pending}
    emitted = set()
    while True:
        try:
            task = pending.popleft()
        except IndexError:
            break
        parents = {
            task
            for task in task.get("dependencies", {}).values()
            if task.startswith(kind_prefix)
        }
        if parents and not emitted.issuperset(parents & task_labels):
            pending.append(task)
            continue
        emitted.add(task["label"])
        yield task


def format_task_digest(cached_task):
    return "/".join(
        [
            cached_task["type"],
            cached_task["name"],
            cached_task["digest"],
        ]
    )


@transforms.add
def cache_task(config, tasks):
    if taskgraph.fast:
        for task in tasks:
            yield task
        return

    digests = {}
    for task in config.kind_dependencies_tasks.values():
        if (
            "cached_task" in task.attributes
            and task.attributes["cached_task"] is not False
        ):
            digests[task.label] = format_task_digest(task.attributes["cached_task"])

    for task in order_tasks(config, tasks):
        cache = task.pop("cache", None)
        if cache is None:
            yield task
            continue

        dependency_digests = []
        for p in task.get("dependencies", {}).values():
            if p in digests:
                dependency_digests.append(digests[p])
            elif config.params["project"] == "toolchains":
                # The toolchains repository uses non-cached toolchain artifacts. Allow
                # tasks to use them.
                cache = None
                break
            else:
                raise Exception(
                    "Cached task {} has uncached parent task: {}".format(
                        task["label"], p
                    )
                )

        if cache is None:
            yield task
            continue

        digest_data = cache["digest-data"] + sorted(dependency_digests)
        # Ensure we don't re-use cached tasks across repo types, doing so
        # breaks some CoT verifications.
        if config.params["repository_type"] == "git":
            digest_data.append(config.params["repository_type"])

        add_optimization(
            config,
            task,
            cache_type=cache["type"],
            cache_name=cache["name"],
            digest_data=digest_data,
        )
        digests[task["label"]] = format_task_digest(task["attributes"]["cached_task"])

        yield task