File: fetch_util.py

package info (click to toggle)
chromium 138.0.7204.157-1
  • links: PTS, VCS
  • area: main
  • in suites: trixie
  • size: 6,071,864 kB
  • sloc: cpp: 34,936,859; ansic: 7,176,967; javascript: 4,110,704; python: 1,419,953; asm: 946,768; xml: 739,967; pascal: 187,324; sh: 89,623; perl: 88,663; objc: 79,944; sql: 50,304; cs: 41,786; fortran: 24,137; makefile: 21,806; php: 13,980; tcl: 13,166; yacc: 8,925; ruby: 7,485; awk: 3,720; lisp: 3,096; lex: 1,327; ada: 727; jsp: 228; sed: 36
file content (247 lines) | stat: -rw-r--r-- 8,045 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
# Copyright 2025 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import hashlib
import json
import logging
import pathlib
import os
import subprocess
import sys
import zipfile
import re

_SRC_PATH = pathlib.Path(__file__).resolve().parents[2]

sys.path.insert(1, str(_SRC_PATH / 'third_party/depot_tools'))
import gclient_eval


_FETCH_ALL_PATH = _SRC_PATH / 'third_party/android_deps/fetch_all.py'
_HASH_LENGTH = 15
_SKIP_FILES = ('OWNERS', 'cipd.yaml')

_DEFAULT_GENERATED_DISCLAIMER = '''\
// **IMPORTANT**: build.gradle is generated and any changes would be overridden
//                by the autoroller. Please update build.gradle.template
//                instead.
'''

ANDROIDX_CIPD_PACKAGE = 'chromium/third_party/androidx'


def _get_current_cipd_instance():
  with open(os.path.join(_SRC_PATH, 'DEPS'), 'rt') as f:
    gclient_dict = gclient_eval.Exec(f.read())
    return gclient_eval.GetCIPD(gclient_dict, 'src/third_party/androidx/cipd',
                                ANDROIDX_CIPD_PACKAGE)


def _query_cipd_tags(version):
  cipd_output = subprocess.check_output(
      ['cipd', 'describe', ANDROIDX_CIPD_PACKAGE, '-version', version],
      encoding='utf-8')
  # Output looks like:
  # Package:       chromium/third_party/androidx
  # Instance ID:   gUjEawxv5mQO8yfbuC8W-rx4V3zYE-4LTWggXpZHI4sC
  # Registered by: user:chromium-cipd-builder@chops-service-accounts.iam.gserviceaccount.com
  # Registered at: 2025-01-06 17:54:48.034135 +0000 UTC
  # Refs:
  #   latest
  # Tags:
  #   details0:version-cr-012873390
  #   version:cr-012873390
  lines = cipd_output.split('\n')
  tags = {}
  parsing_tags = False
  for line in lines:
    if not line.strip():
      continue
    if line.startswith('Tags:'):
      parsing_tags = True
      continue
    if parsing_tags:
      tag, value = line.strip().split(':', 1)
      tags[tag] = value
  return tags


def get_current_androidx_version():
  cipd_instance = _get_current_cipd_instance()
  cipd_tags = _query_cipd_tags(cipd_instance)
  version_string = cipd_tags['version']
  version = version_string[len('cr-0'):]
  logging.info('Resolved current androidx version to %s', version)
  return version


def make_androidx_maven_url(version):
  return ('https://androidx.dev/snapshots/builds/' + version +
          '/artifacts/repository')


def generate_version_map_str(bom_path, with_hash=False):
  """Generate groovy code to fill the versionCache map.

    Args:
      bom_path: Path to bill_of_materials.json to parse.
      with_hash: Whether to also return a hash of all the packages in the BoM.
    """
  bom = []
  version_map_lines = []
  bom_hash = hashlib.sha256()
  with open(bom_path) as f:
    bom = json.load(f)
  bom.sort(key=lambda x: (x['group'], x['name']))
  for dep in bom:
    group = dep['group']
    name = dep['name']
    version = dep['version']
    bom_hash.update(f'${group}:${name}:${version}'.encode())
    map_line = f"versionCache['{group}:{name}'] = '{version}'"
    version_map_lines.append(map_line)
  version_map_str = '\n'.join(sorted(version_map_lines))
  version_hash = bom_hash.hexdigest()[:_HASH_LENGTH]
  if with_hash:
    return version_map_str, version_hash
  return version_map_str


def hash_files(files):
  """Returns the sha256 hash of |files| contents."""
  content_hash = hashlib.sha256()
  for filepath in files:
    with open(filepath, 'rb') as f:
      content_hash.update(f.read())
  return content_hash.hexdigest()[:_HASH_LENGTH]


def fill_template(template_path, output_path, **kwargs):
  """Fills in a template.

    Args:
      template_path: Path to <file>.template.
      output_path: Path to <file>.
      **kwargs: each kwarg should be a string to replace in the template.
    """
  content = pathlib.Path(template_path).read_text()
  for key, value in kwargs.items():
    replace_string = '{{' + key + '}}'
    if not replace_string in content:
      raise Exception(f'Replace text {replace_string} '
                      f'not found in {template_path}')
    try:
      content = content.replace(replace_string, value)
    except Exception as e:
      raise e from Exception(
          f'Failed to replace {repr(replace_string)} with {repr(value)}')

  content = content.replace(r'{{generated_disclaimer}}',
                            _DEFAULT_GENERATED_DISCLAIMER)

  unreplaced_variable_re = re.compile(r'\{\{(.+)\}\}')
  if matches := unreplaced_variable_re.findall(content):
    unreplaced_variables = ', '.join(repr(match) for match in matches)
    raise Exception('Found unreplaced variables '
                    f'[{unreplaced_variables}] in {template_path}')

  pathlib.Path(output_path).write_text(content)


def write_cipd_yaml(package_root,
                    package_name,
                    version,
                    output_path,
                    experimental=False):
  """Writes cipd.yaml file at the passed-in path."""

  root_libs_dir = package_root / 'libs'
  lib_dirs = os.listdir(root_libs_dir)
  if not lib_dirs:
    raise Exception('No generated libraries in {}'.format(root_libs_dir))

  data_files = [
      'BUILD.gn',
      'VERSION.txt',
      'bill_of_materials.json',
      'additional_readme_paths.json',
      'build.gradle',
      'to_commit.zip',
  ]
  for lib_dir in lib_dirs:
    abs_lib_dir: pathlib.Path = root_libs_dir / lib_dir
    if not abs_lib_dir.is_dir():
      continue

    for lib_file in abs_lib_dir.iterdir():
      if lib_file.name in _SKIP_FILES:
        continue
      data_files.append((abs_lib_dir / lib_file).relative_to(package_root))

  if experimental:
    package_name = (f'experimental/google.com/{os.getlogin()}/{package_name}')
  contents = [
      '# Copyright 2025 The Chromium Authors',
      '# Use of this source code is governed by a BSD-style license that can be',
      '# found in the LICENSE file.',
      f'# version: {version}',
      f'package: {package_name}',
      f'description: CIPD package for {package_name}',
      'data:',
  ]
  contents.extend(f'- file: {str(f)}' for f in data_files)

  with open(output_path, 'w') as out:
    out.write('\n'.join(contents))


def create_to_commit_zip(output_path, package_root, dirnames,
                         absolute_file_map):
  """Generates a to_commit.zip from useful text files inside |package_root|.

    Args:
      output_path: where to output the zipfile.
      package_root: path to gradle/cipd package.
      dirnames: list of subdirs under |package_root| to walk.
      absolute_file_map: List of files to be stored under the absolute prefix
        CHROMIUM_SRC/.
  """
  to_commit_paths = []
  for directory in dirnames:
    for root, _, files in os.walk(package_root / directory):
      for filename in files:
        # Avoid committing actual artifacts.
        if filename.endswith(('.aar', '.jar')):
          continue
        # TODO(mheikal): stop outputting these from gradle since they are not
        # useful.
        if filename in _SKIP_FILES:
          continue
        file_path = pathlib.Path(root) / filename
        file_path_in_zip = file_path.relative_to(package_root)
        to_commit_paths.append((file_path, file_path_in_zip))

  for filename, path_in_repo in absolute_file_map.items():
    file_path = package_root / filename
    path_in_zip = f'CHROMIUM_SRC/{path_in_repo}'
    to_commit_paths.append((file_path, path_in_zip))

  with zipfile.ZipFile(output_path, 'w') as zip_file:
    for filename, arcname in to_commit_paths:
      zip_file.write(filename, arcname=arcname)


def run_fetch_all(android_deps_dir,
                  extra_args,
                  verbose_count=0,
                  output_subdir=None):
  fetch_all_cmd = [_FETCH_ALL_PATH, '--android-deps-dir', android_deps_dir]
  fetch_all_cmd += ['-v'] * verbose_count
  if output_subdir:
    fetch_all_cmd += ['--output-subdir', output_subdir]

  # Filter out -- from the args to pass to fetch_all.py.
  fetch_all_cmd += [a for a in extra_args if a != '--']

  subprocess.run(fetch_all_cmd, check=True)