1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179
|
#!/usr/bin/python3
# This file is part of Cockpit.
#
# Copyright (C) 2021 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
import glob
import io
import multiprocessing
import os
import urllib
import subprocess
import sys
import zipfile
import argparse
def message(*args):
print(*args, file=sys.stderr)
def build_dist():
'''Build a dist tarball for CI testing
This supports completely clean git trees, unpacked release tarballs, and already configured trees.
Returns path to built tarball.
'''
if not os.path.exists("Makefile"):
if os.path.exists('./configure'):
# unconfigured release tarball
subprocess.check_call('./configure')
else:
# clean git checkout
subprocess.check_call('./autogen.sh')
# this is for a development build, not a release, so we care about speed, not best size
subprocess.check_call(["make", "--silent", "-j%i" % multiprocessing.cpu_count(),
"NO_DIST_CACHE=1", "XZ_COMPRESS_FLAGS=-0", "dist"])
return subprocess.check_output(["make", "dump-dist"], universal_newlines=True).strip()
def download_dist():
'''Download dists tarball for current git SHA from GitHub
These are produced by .github/workflows/build-dist.yml for every PR and push.
This is a lot faster than having to npm install and run webpack.
Returns path to downloaded tarball, or None if it isn't available.
This can happen because the current directory is not a git checkout, or it is
a SHA which is not pushed/PRed, or there is no ~/.config/github-token available.
'''
try:
sha = subprocess.check_output(["git", "rev-parse", "HEAD"], stderr=subprocess.DEVNULL).decode().strip()
except subprocess.CalledProcessError:
message("make_dist: not a git repository")
return None
if subprocess.call(["git", "diff", "--quiet", "--", ":^test"]) > 0:
message("make_dist: uncommitted local changes, skipping download")
return None
dists = glob.glob(f"cockpit-*{sha[:8]}*.tar.xz")
if dists:
message("make_dist: already downloaded", dists[0])
return os.path.abspath(dists[0])
# autogen.sh does that for build_dist()
if not os.path.exists("bots"):
subprocess.check_call(['tools/make-bots'])
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "bots"))
import task
# task.api defaults to the current checkout's origin, but the artifacts are on cockpit-project
# except if a developer wants to change the artifact building workflow on their fork, support that with $GITHUB_BASE
api = task.github.GitHub(repo=os.getenv("GITHUB_BASE", "cockpit-project/cockpit"))
# downloading GitHub artifacts requires a token
if not api.token:
message("make_dist: no GitHub API token available")
return None
# iterate artifacts and search our SHA
page = 1
batch_size = 100
download_url = None
while not download_url and batch_size == 100:
batch = api.get(f"actions/artifacts?per_page={batch_size}&page={page}")["artifacts"]
for artifact in batch:
if artifact["name"] == "dist-" + sha:
download_url = artifact["archive_download_url"]
break
# if the current batch is < 100, we have the last page and can stop
batch_size = len(batch)
page += 1
if not download_url:
message(f"make_dist: no download available for commit {sha}")
return None
sys.stderr.write(f"make_dist: Downloading dist tarball from {download_url} ...\n")
request = urllib.request.Request(download_url, headers={"Authorization": "token " + api.token})
zipio = io.BytesIO()
try:
with urllib.request.urlopen(request) as response:
if os.isatty(sys.stderr.fileno()):
total_size = 0
else:
total_size = None
MB = 10**6
# read zip into a stringio, as the stream is not seekable and zip requires that
while True:
block = response.read(MB)
if len(block) == 0:
break
if total_size is not None:
total_size += len(block)
sys.stderr.write(f"\r{ total_size // MB } MB")
zipio.write(block)
# clear the download progress in tty mode
if total_size is not None:
sys.stderr.write("\r \r")
except urllib.error.HTTPError as e:
message("make_dist: Download failed:", e)
return None
with zipfile.ZipFile(zipio) as fzip:
names = fzip.namelist()
if len(names) != 1 or not names[0].endswith(".tar.xz"):
message("make_dist: expected zip artifact with exactly one tar.xz member")
return None
tar_path = fzip.extract(names[0])
# Extract node_modules and dist locally for speeding up the build and allowing integration tests to run
unpack_dirs = [d for d in ["dist", "node_modules"] if not os.path.exists(d)]
if unpack_dirs:
message("make_dist: Extracting directories from tarball:", ' '.join(unpack_dirs))
prefix = os.path.basename(tar_path).split('.tar')[0] + '/'
prefixed_unpack_dirs = [prefix + d for d in unpack_dirs]
subprocess.check_call(["tar", "--touch", "--strip-components=1", "-xf", tar_path] + prefixed_unpack_dirs)
return tar_path
def make_dist(download_only=False):
# first try to download a pre-generated dist tarball; this is a lot faster
# but these tarballs are built for production NPM mode
source = None
if os.getenv("NODE_ENV") != "development":
source = download_dist()
if not source:
if not download_only:
source = build_dist()
else:
print("make_dist: Download failed: artifact does not exist")
sys.exit(1)
return source
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Download or build release tarbal")
parser.add_argument('-d', '--download-only', action='store_true', help="Fail instead of build locally if download is not available")
args = parser.parse_args()
print(make_dist(args.download_only))
|