File: fetch_github_release.py

package info (click to toggle)
chromium 139.0.7258.127-1
  • links: PTS, VCS
  • area: main
  • in suites:
  • size: 6,122,068 kB
  • sloc: cpp: 35,100,771; ansic: 7,163,530; javascript: 4,103,002; python: 1,436,920; asm: 946,517; xml: 746,709; pascal: 187,653; perl: 88,691; sh: 88,436; objc: 79,953; sql: 51,488; cs: 44,583; fortran: 24,137; makefile: 22,147; tcl: 15,277; php: 13,980; yacc: 8,984; ruby: 7,485; awk: 3,720; lisp: 3,096; lex: 1,327; ada: 727; jsp: 228; sed: 36
file content (98 lines) | stat: -rw-r--r-- 3,181 bytes parent folder | download | duplicates (9)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
# Copyright 2023 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import argparse
import hashlib
import json
import os
import pathlib
import re
import sys
from typing import Dict, List
import urllib.request

import scripthash


def _fetch_json(url):
    return json.load(urllib.request.urlopen(url))


def _find_valid_urls(release, artifact_regex):
    urls = [x['browser_download_url'] for x in release['assets']]
    if artifact_regex:
        urls = [x for x in urls if re.search(artifact_regex, x)]
    return urls


def _latest(api_url, install_scripts=None, artifact_regex=None):
    # Make the version change every time this file changes.
    file_hash = scripthash.compute(extra_paths=install_scripts)

    releases: List[Dict] = _fetch_json(f'{api_url}/releases')
    for release in releases:
        tag_name = release['tag_name']
        urls = _find_valid_urls(release, artifact_regex)
        if len(urls) == 1:
            print('{}.{}'.format(tag_name, file_hash))
            return
        print(f'Bad urls={urls} for tag_name={tag_name}, skipping.',
              file=sys.stderr)


def _get_url(api_url,
             artifact_filename=None,
             artifact_extension=None,
             artifact_regex=None):
    # Split off our md5 hash.
    version = os.environ['_3PP_VERSION'].rsplit('.', 1)[0]
    json_dict = _fetch_json(f'{api_url}/releases/tags/{version}')
    urls = _find_valid_urls(json_dict, artifact_regex)

    if len(urls) != 1:
        raise Exception('len(urls) != 1, urls: \n' + '\n'.join(urls))

    partial_manifest = {
        'url': urls,
        'ext': artifact_extension or '',
    }
    if artifact_filename:
        partial_manifest['name'] = [artifact_filename]

    print(json.dumps(partial_manifest))


def main(*,
         project,
         artifact_filename=None,
         artifact_extension=None,
         artifact_regex=None,
         install_scripts=None):
    """The fetch.py script for a 3pp module.

    Args:
      project: GitHub username for the repo. e.g. "google/protobuf".
      artifact_filename: The name for the downloaded file. Required when not
          setting "unpack_archive: true" in 3pp.pb.
      artifact_extension: File extension of file being downloaded. Required when
          setting "unpack_archive: true" in 3pp.pb.
      artifact_regex: A regex to use to identify the desired artifact from the
          list of artifacts on the release.
      install_scripts: List of script to add to the md5 of the version. The main
          module and this module are always included.
    """
    parser = argparse.ArgumentParser()
    parser.add_argument('action', choices=('latest', 'get_url'))
    args = parser.parse_args()

    api_url = f'https://api.github.com/repos/{project}'
    if args.action == 'latest':
        _latest(api_url,
                install_scripts=install_scripts,
                artifact_regex=artifact_regex)
    else:
        _get_url(api_url,
                 artifact_filename=artifact_filename,
                 artifact_extension=artifact_extension,
                 artifact_regex=artifact_regex)