1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
|
from __future__ import absolute_import
import argparse
import gzip
import json
import io
import os
from datetime import datetime, timedelta
from six.moves.urllib.request import urlopen
from .vcs import Git
from . import log
here = os.path.dirname(__file__)
wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
logger = log.get_logger()
def abs_path(path):
return os.path.abspath(os.path.expanduser(path))
def should_download(manifest_path, rebuild_time=timedelta(days=5)):
if not os.path.exists(manifest_path):
return True
mtime = datetime.fromtimestamp(os.path.getmtime(manifest_path))
if mtime < datetime.now() - rebuild_time:
return True
logger.info("Skipping manifest download because existing file is recent")
return False
def merge_pr_tags(repo_root, max_count=50):
git = Git.get_func(repo_root)
tags = []
for line in git("log", "--format=%D", "--max-count=%s" % max_count).split("\n"):
for ref in line.split(", "):
if ref.startswith("tag: merge_pr_"):
tags.append(ref[5:])
return tags
def github_url(tags):
for tag in tags:
url = "https://api.github.com/repos/web-platform-tests/wpt/releases/tags/%s" % tag
try:
resp = urlopen(url)
except Exception:
logger.warning("Fetching %s failed" % url)
continue
if resp.code != 200:
logger.warning("Fetching %s failed; got HTTP status %d" % (url, resp.code))
continue
try:
release = json.load(resp.fp)
except ValueError:
logger.warning("Response was not valid JSON")
return None
for item in release["assets"]:
# Accept both ways of naming the manfest asset, even though
# there's no longer a reason to include the commit sha.
if item["name"].startswith("MANIFEST-") and item["name"].endswith(".json.gz"):
return item["browser_download_url"]
elif item["name"] == "MANIFEST.json.gz":
return item["browser_download_url"]
return None
def download_manifest(manifest_path, tags_func, url_func, force=False):
if not force and not should_download(manifest_path):
return False
tags = tags_func()
url = url_func(tags)
if not url:
logger.warning("No generated manifest found")
return False
logger.info("Downloading manifest from %s" % url)
try:
resp = urlopen(url)
except Exception:
logger.warning("Downloading pregenerated manifest failed")
return False
if resp.code != 200:
logger.warning("Downloading pregenerated manifest failed; got HTTP status %d" %
resp.code)
return False
gzf = gzip.GzipFile(fileobj=io.BytesIO(resp.read()))
try:
decompressed = gzf.read()
except IOError:
logger.warning("Failed to decompress downloaded file")
return False
try:
with open(manifest_path, "w") as f:
f.write(decompressed)
except Exception:
logger.warning("Failed to write manifest")
return False
logger.info("Manifest downloaded")
return True
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"-p", "--path", type=abs_path, help="Path to manifest file.")
parser.add_argument(
"--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
parser.add_argument(
"--force", action="store_true",
help="Always download, even if the existing manifest is recent")
return parser
def download_from_github(path, tests_root, force=False):
return download_manifest(path, lambda: merge_pr_tags(tests_root), github_url,
force=force)
def run(**kwargs):
if kwargs["path"] is None:
path = os.path.join(kwargs["tests_root"], "MANIFEST.json")
else:
path = kwargs["path"]
success = download_from_github(path, kwargs["tests_root"], kwargs["force"])
return 0 if success else 1
|