1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Do transforms specific to l10n kind
"""
from mozbuild.chunkify import chunkify
from taskgraph.transforms.base import TransformSequence
from taskgraph.util import json
from taskgraph.util.copy import deepcopy
from taskgraph.util.dependencies import get_dependencies, get_primary_dependency
from taskgraph.util.schema import (
Schema,
optionally_keyed_by,
resolve_keyed_by,
taskref_or_string,
)
from taskgraph.util.taskcluster import get_artifact_prefix
from taskgraph.util.treeherder import add_suffix
from voluptuous import Any, Optional, Required
from gecko_taskgraph.transforms.job import job_description_schema
from gecko_taskgraph.transforms.task import task_description_schema
from gecko_taskgraph.util.attributes import (
copy_attributes_from_dependent_job,
sorted_unique_list,
task_name,
)
def _by_platform(arg):
return optionally_keyed_by("build-platform", arg)
l10n_description_schema = Schema(
{
# Name for this job, inferred from the dependent job before validation
Required("name"): str,
# build-platform, inferred from dependent job before validation
Required("build-platform"): str,
# max run time of the task
Required("run-time"): _by_platform(int),
# Locales not to repack for
Required("ignore-locales"): _by_platform([str]),
# All l10n jobs use mozharness
Required("mozharness"): {
# Script to invoke for mozharness
Required("script"): _by_platform(str),
# Config files passed to the mozharness script
Required("config"): _by_platform([str]),
# Additional paths to look for mozharness configs in. These should be
# relative to the base of the source checkout
Optional("config-paths"): [str],
# Options to pass to the mozharness script
Optional("options"): _by_platform([str]),
# Action commands to provide to mozharness script
Required("actions"): _by_platform([str]),
# if true, perform a checkout of a comm-central based branch inside the
# gecko checkout
Optional("comm-checkout"): bool,
},
# Items for the taskcluster index
Optional("index"): {
# Product to identify as in the taskcluster index
Required("product"): _by_platform(str),
# Job name to identify as in the taskcluster index
Required("job-name"): _by_platform(str),
# Type of index
Optional("type"): _by_platform(str),
},
# Description of the localized task
Required("description"): _by_platform(str),
Optional("run-on-projects"): job_description_schema["run-on-projects"],
Optional("run-on-repo-type"): job_description_schema["run-on-repo-type"],
# worker-type to utilize
Required("worker-type"): _by_platform(str),
# File which contains the used locales
Required("locales-file"): _by_platform(str),
# Tooltool visibility required for task.
Required("tooltool"): _by_platform(Any("internal", "public")),
# Docker image required for task. We accept only in-tree images
# -- generally desktop-build or android-build -- for now.
Optional("docker-image"): _by_platform(
# an in-tree generated docker image (from `taskcluster/docker/<name>`)
{"in-tree": str},
),
Optional("fetches"): {
str: _by_platform([str]),
},
# The set of secret names to which the task has access; these are prefixed
# with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
# this will enable any worker features required and set the task's scopes
# appropriately. `true` here means ['*'], all secrets. Not supported on
# Windows
Optional("secrets"): _by_platform(Any(bool, [str])),
# Information for treeherder
Required("treeherder"): {
# Platform to display the task on in treeherder
Required("platform"): _by_platform(str),
# Symbol to use
Required("symbol"): str,
# Tier this task is
Required("tier"): _by_platform(int),
},
# Extra environment values to pass to the worker
Optional("env"): _by_platform({str: taskref_or_string}),
# Max number locales per chunk
Optional("locales-per-chunk"): _by_platform(int),
# Task deps to chain this task with, added in transforms from primary dependency
# if this is a shippable-style build
Optional("dependencies"): {str: str},
# Run the task when the listed files change (if present).
Optional("when"): {"files-changed": [str]},
# passed through directly to the job description
Optional("attributes"): job_description_schema["attributes"],
Optional("extra"): job_description_schema["extra"],
# Shipping product and phase
Optional("shipping-product"): task_description_schema["shipping-product"],
Optional("shipping-phase"): task_description_schema["shipping-phase"],
Optional("task-from"): task_description_schema["task-from"],
}
)
transforms = TransformSequence()
def parse_locales_file(locales_file, platform=None):
"""Parse the passed locales file for a list of locales."""
locales = []
with open(locales_file) as f:
if locales_file.endswith("json"):
all_locales = json.load(f)
# XXX Only single locales are fetched
locales = {
locale: data["revision"]
for locale, data in all_locales.items()
if platform is None or platform in data["platforms"]
}
else:
all_locales = f.read().split()
# 'default' is the hg revision at the top of hg repo, in this context
locales = {locale: "default" for locale in all_locales}
return locales
def _remove_locales(locales, to_remove=None):
# ja-JP-mac is a mac-only locale, but there are no mac builds being repacked,
# so just omit it unconditionally
return {
locale: revision
for locale, revision in locales.items()
if locale not in to_remove
}
@transforms.add
def setup_name(config, jobs):
for job in jobs:
dep = get_primary_dependency(config, job)
assert dep
# Set the name to the same as the dep task, without kind name.
# Label will get set automatically with this kinds name.
job["name"] = job.get("name", task_name(dep))
yield job
@transforms.add
def copy_in_useful_magic(config, jobs):
for job in jobs:
dep = get_primary_dependency(config, job)
assert dep
attributes = copy_attributes_from_dependent_job(dep)
attributes.update(job.get("attributes", {}))
# build-platform is needed on `job` for by-build-platform
job["build-platform"] = attributes.get("build_platform")
job["attributes"] = attributes
yield job
transforms.add_validate(l10n_description_schema)
@transforms.add
def gather_required_signoffs(config, jobs):
for job in jobs:
job.setdefault("attributes", {})["required_signoffs"] = sorted_unique_list(
*(
dep.attributes.get("required_signoffs", [])
for dep in get_dependencies(config, job)
)
)
yield job
@transforms.add
def remove_repackage_dependency(config, jobs):
for job in jobs:
build_platform = job["attributes"]["build_platform"]
if not build_platform.startswith("macosx"):
del job["dependencies"]["repackage"]
yield job
@transforms.add
def handle_keyed_by(config, jobs):
"""Resolve fields that can be keyed by platform, etc."""
fields = [
"locales-file",
"locales-per-chunk",
"worker-type",
"description",
"run-time",
"docker-image",
"secrets",
"fetches.toolchain",
"fetches.fetch",
"tooltool",
"env",
"ignore-locales",
"mozharness.config",
"mozharness.options",
"mozharness.actions",
"mozharness.script",
"treeherder.tier",
"treeherder.platform",
"index.type",
"index.product",
"index.job-name",
"when.files-changed",
]
for job in jobs:
job = deepcopy(job) # don't overwrite dict values here
for field in fields:
resolve_keyed_by(item=job, field=field, item_name=job["name"])
yield job
@transforms.add
def handle_artifact_prefix(config, jobs):
"""Resolve ``artifact_prefix`` in env vars"""
for job in jobs:
artifact_prefix = get_artifact_prefix(job)
for k1, v1 in job.get("env", {}).items():
if isinstance(v1, str):
job["env"][k1] = v1.format(artifact_prefix=artifact_prefix)
elif isinstance(v1, dict):
for k2, v2 in v1.items():
job["env"][k1][k2] = v2.format(artifact_prefix=artifact_prefix)
yield job
@transforms.add
def all_locales_attribute(config, jobs):
for job in jobs:
locales_platform = job["attributes"]["build_platform"].replace("-shippable", "")
locales_platform = locales_platform.replace("-pgo", "")
locales_with_changesets = parse_locales_file(
job["locales-file"], platform=locales_platform
)
locales_with_changesets = _remove_locales(
locales_with_changesets, to_remove=job["ignore-locales"]
)
locales = sorted(locales_with_changesets.keys())
attributes = job.setdefault("attributes", {})
attributes["all_locales"] = locales
attributes["all_locales_with_changesets"] = locales_with_changesets
if job.get("shipping-product"):
attributes["shipping_product"] = job["shipping-product"]
yield job
@transforms.add
def chunk_locales(config, jobs):
"""Utilizes chunking for l10n stuff"""
for job in jobs:
locales_per_chunk = job.get("locales-per-chunk")
locales_with_changesets = job["attributes"]["all_locales_with_changesets"]
if locales_per_chunk:
chunks, remainder = divmod(len(locales_with_changesets), locales_per_chunk)
if remainder:
chunks = int(chunks + 1)
for this_chunk in range(1, chunks + 1):
chunked = deepcopy(job)
chunked["name"] = chunked["name"].replace("/", f"-{this_chunk}/", 1)
chunked["mozharness"]["options"] = chunked["mozharness"].get(
"options", []
)
# chunkify doesn't work with dicts
locales_with_changesets_as_list = sorted(
locales_with_changesets.items()
)
chunked_locales = chunkify(
locales_with_changesets_as_list, this_chunk, chunks
)
chunked["mozharness"]["options"].extend(
[
f"locale={locale}:{changeset}"
for locale, changeset in chunked_locales
]
)
chunked["attributes"]["l10n_chunk"] = str(this_chunk)
# strip revision
chunked["attributes"]["chunk_locales"] = [
locale for locale, _ in chunked_locales
]
# add the chunk number to the TH symbol
chunked["treeherder"]["symbol"] = add_suffix(
chunked["treeherder"]["symbol"], this_chunk
)
yield chunked
else:
job["mozharness"]["options"] = job["mozharness"].get("options", [])
job["mozharness"]["options"].extend(
[
f"locale={locale}:{changeset}"
for locale, changeset in sorted(locales_with_changesets.items())
]
)
yield job
transforms.add_validate(l10n_description_schema)
@transforms.add
def stub_installer(config, jobs):
for job in jobs:
job.setdefault("attributes", {})
job.setdefault("env", {})
if job["attributes"].get("stub-installer"):
job["env"].update({"USE_STUB_INSTALLER": "1"})
yield job
@transforms.add
def set_extra_config(config, jobs):
for job in jobs:
job["mozharness"].setdefault("extra-config", {})["branch"] = config.params[
"project"
]
if "update-channel" in job["attributes"]:
job["mozharness"]["extra-config"]["update_channel"] = job["attributes"][
"update-channel"
]
yield job
@transforms.add
def make_job_description(config, jobs):
for job in jobs:
job["mozharness"].update(
{
"using": "mozharness",
"job-script": "taskcluster/scripts/builder/build-l10n.sh",
"secrets": job.get("secrets", False),
}
)
job_description = {
"name": job["name"],
"worker-type": job["worker-type"],
"description": job["description"],
"run": job["mozharness"],
"attributes": job["attributes"],
"treeherder": {
"kind": "build",
"tier": job["treeherder"]["tier"],
"symbol": job["treeherder"]["symbol"],
"platform": job["treeherder"]["platform"],
},
"run-on-projects": (
job.get("run-on-projects") if job.get("run-on-projects") else []
),
"run-on-repo-type": job.get("run-on-repo-type", ["git", "hg"]),
}
if job.get("extra"):
job_description["extra"] = job["extra"]
job_description["run"]["tooltool-downloads"] = job["tooltool"]
job_description["worker"] = {
"max-run-time": job["run-time"],
"chain-of-trust": True,
}
if job["worker-type"] in ["b-win2012", "b-win2022"]:
job_description["worker"]["os"] = "windows"
job_description["run"]["use-simple-package"] = False
job_description["run"]["use-magic-mh-args"] = False
if job.get("docker-image"):
job_description["worker"]["docker-image"] = job["docker-image"]
if job.get("fetches"):
job_description["fetches"] = job["fetches"]
if job.get("index"):
job_description["index"] = {
"product": job["index"]["product"],
"job-name": job["index"]["job-name"],
"type": job["index"].get("type", "generic"),
}
if job.get("dependencies"):
job_description["dependencies"] = job["dependencies"]
if job.get("env"):
job_description["worker"]["env"] = job["env"]
if job.get("when", {}).get("files-changed"):
job_description.setdefault("when", {})
job_description["when"]["files-changed"] = [job["locales-file"]] + job[
"when"
]["files-changed"]
if "shipping-phase" in job:
job_description["shipping-phase"] = job["shipping-phase"]
if "shipping-product" in job:
job_description["shipping-product"] = job["shipping-product"]
yield job_description
@transforms.add
def add_macos_signing_artifacts(config, jobs):
for job in jobs:
if "macosx" not in job["name"]:
yield job
continue
build_dep = None
for dep_job in get_dependencies(config, job):
if dep_job.kind == "build":
build_dep = dep_job
break
assert build_dep, f"l10n job {job['name']} has no build dependency"
for path, artifact in build_dep.task["payload"]["artifacts"].items():
if path.startswith("public/build/security/"):
job["worker"].setdefault("artifacts", []).append(
{
"name": path,
"path": artifact["path"],
"type": "file",
}
)
yield job
|