1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55
|
import argparse
import logging
import json
import os
import platform
import struct
import subprocess
def get_platform():
system = platform.system()
is_arm64 = platform.machine() in ("arm64", "aarch64")
if system == "Linux":
if platform.libc_ver()[0] == "glibc":
return "manylinux-aarch64" if is_arm64 else "manylinux-x86_64"
else:
return "musllinux-aarch64" if is_arm64 else "musllinux-x86_64"
elif system == "Darwin":
return "macos-arm64" if is_arm64 else "macos-x86_64"
elif system == "Windows":
return "windows-aarch64" if is_arm64 else "windows-x86_64"
else:
return "unknown"
parser = argparse.ArgumentParser(description="Fetch and extract tarballs")
parser.add_argument("destination_dir")
parser.add_argument("--cache-dir", default="tarballs")
parser.add_argument("--config-file", default=os.path.splitext(__file__)[0] + ".json")
args = parser.parse_args()
logging.basicConfig(level=logging.INFO)
with open(args.config_file) as fp:
config = json.load(fp)
# ensure destination directory exists
logging.info(f"Creating directory {args.destination_dir}")
if not os.path.exists(args.destination_dir):
os.makedirs(args.destination_dir)
tarball_url = config["url"].replace("{platform}", get_platform())
# download tarball
tarball_name = tarball_url.split("/")[-1]
tarball_file = os.path.join(args.cache_dir, tarball_name)
if not os.path.exists(tarball_file):
logging.info(f"Downloading {tarball_url}")
if not os.path.exists(args.cache_dir):
os.mkdir(args.cache_dir)
subprocess.check_call(
["curl", "--location", "--output", tarball_file, "--silent", tarball_url]
)
logging.info(f"Extracting {tarball_name}")
subprocess.check_call(["tar", "-C", args.destination_dir, "-xf", tarball_file])
|