1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361
|
#!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import asyncio
import glob
import logging
import os
import sys
import xml.etree.ElementTree as ET
from os import path
import aiohttp
logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(message)s")
log = logging.getLogger(__name__)
UV_CACHE_PATH = os.getenv(
"UV_CACHE_PATH", os.path.join(path.dirname(__file__), "../release/updates/cache/")
)
UV_PARALLEL_DOWNLOADS = os.getenv("UV_PARALLEL_DOWNLOADS", 20)
FTP_SERVER_TO = os.getenv("ftp_server_to", "http://stage.mozilla.org/pub/mozilla.org")
FTP_SERVER_FROM = os.getenv(
"ftp_server_from", "http://stage.mozilla.org/pub/mozilla.org"
)
AUS_SERVER = os.getenv("aus_server", "https://aus5.mozilla.org")
def create_cache():
if not os.path.isdir(UV_CACHE_PATH):
os.mkdir(UV_CACHE_PATH)
def remove_cache():
"""
Removes all files in the cache folder
We don't support folders or .dot(hidden) files
By not deleting the cache directory, it allows us to use Docker tmpfs mounts,
which are the only workaround to poor mount r/w performance on MacOS
Bug Reference:
https://forums.docker.com/t/file-access-in-mounted-volumes-extremely-slow-cpu-bound/8076/288
"""
files = glob.glob(f"{UV_CACHE_PATH}/*")
for f in files:
os.remove(f)
def _cachepath(i, ext):
# Helper function: given an index, return a cache file path
return path.join(UV_CACHE_PATH, f"obj_{i:0>5}.{ext}")
async def fetch_url(url, path, connector):
"""
Fetch/download a file to a specific path
Parameters
----------
url : str
URL to be fetched
path : str
Path to save binary
Returns
-------
dict
Request result. If error result['error'] is True
"""
def _result(response, error=False):
data = {
"headers": dict(response.headers),
"status": response.status,
"reason": response.reason,
"_request_info": str(response._request_info),
"url": url,
"path": path,
"error": error,
}
return data
# Set connection timeout to 15 minutes
timeout = aiohttp.ClientTimeout(total=900)
try:
async with aiohttp.ClientSession(
connector=connector, connector_owner=False, timeout=timeout
) as session:
log.info(f"Retrieving {url}")
async with session.get(
url, headers={"Cache-Control": "max-stale=0"}
) as response:
# Any response code > 299 means something went wrong
if response.status > 299:
log.info(f"Failed to download {url} with status {response.status}")
return _result(response, True)
with open(path, "wb") as fd:
while True:
chunk = await response.content.read()
if not chunk:
break
fd.write(chunk)
result = _result(response)
log.info(f'Finished downloading {url}\n{result["headers"]}')
return result
except (
UnicodeDecodeError, # Data parsing
asyncio.TimeoutError, # Async timeout
aiohttp.ClientError, # aiohttp error
) as e:
log.error("=============")
log.error(f"Error downloading {url}")
log.error(e)
log.error("=============")
return {"path": path, "url": url, "error": True}
async def download_multi(targets, sourceFunc):
"""
Download list of targets
Parameters
----------
targets : list
List of urls to download
sourceFunc : str
Source function name (for filename)
Returns
-------
tuple
List of responses (Headers)
"""
targets = set(targets)
amount = len(targets)
connector = aiohttp.TCPConnector(
limit=UV_PARALLEL_DOWNLOADS, # Simultaneous connections, per host
ttl_dns_cache=600, # Cache DNS for 10 mins
)
log.info(f"\nDownloading {amount} files ({UV_PARALLEL_DOWNLOADS} async limit)")
# Transform targets into {url, path} objects
payloads = [
{"url": url, "path": _cachepath(i, sourceFunc)}
for (i, url) in enumerate(targets)
]
downloads = []
fetches = [fetch_url(t["url"], t["path"], connector) for t in payloads]
downloads.extend(await asyncio.gather(*fetches))
connector.close()
results = []
# Remove file if download failed
for fetch in downloads:
# If there's an error, try to remove the file, but keep going if file not present
if fetch["error"]:
try:
os.unlink(fetch.get("path", None))
except (TypeError, FileNotFoundError) as e:
log.info(f"Unable to cleanup error file: {e} continuing...")
continue
results.append(fetch)
return results
async def download_builds(verifyConfig):
"""
Given UpdateVerifyConfig, download and cache all necessary updater files
Include "to" and "from"/"updater_pacakge"
Parameters
----------
verifyConfig : UpdateVerifyConfig
Chunked config
Returns
-------
list : List of file paths and urls to each updater file
"""
updaterUrls = set()
for release in verifyConfig.releases:
ftpServerFrom = release["ftp_server_from"]
ftpServerTo = release["ftp_server_to"]
for locale in release["locales"]:
toUri = verifyConfig.to
if toUri is not None and ftpServerTo is not None:
toUri = toUri.replace("%locale%", locale)
updaterUrls.add(f"{ftpServerTo}{toUri}")
for reference in ("updater_package", "from"):
uri = release.get(reference, None)
if uri is None:
continue
uri = uri.replace("%locale%", locale)
# /ja-JP-mac/ locale is replaced with /ja/ for updater packages
if reference == "updater_package":
uri = uri.replace("ja-JP-mac", "ja")
updaterUrls.add(f"{ftpServerFrom}{uri}")
log.info(f"About to download {len(updaterUrls)} updater packages")
updaterResults = await download_multi(list(updaterUrls), "updater.async.cache")
return updaterResults
def get_mar_urls_from_update(path):
"""
Given an update.xml file, return MAR URLs
If update.xml doesn't have URLs, returns empty list
Parameters
----------
path : str
Path to update.xml file
Returns
-------
list : List of URLs
"""
result = []
root = ET.parse(path).getroot()
for patch in root.findall("update/patch"):
url = patch.get("URL")
if url:
result.append(url)
return result
async def download_mars(updatePaths):
"""
Given list of update.xml paths, download MARs for each
Parameters
----------
update_paths : list
List of paths to update.xml files
"""
patchUrls = set()
for updatePath in updatePaths:
for url in get_mar_urls_from_update(updatePath):
patchUrls.add(url)
log.info(f"About to download {len(patchUrls)} MAR packages")
marResults = await download_multi(list(patchUrls), "mar.async.cache")
return marResults
async def download_update_xml(verifyConfig):
"""
Given UpdateVerifyConfig, download and cache all necessary update.xml files
Parameters
----------
verifyConfig : UpdateVerifyConfig
Chunked config
Returns
-------
list : List of file paths and urls to each update.xml file
"""
xmlUrls = set()
product = verifyConfig.product
urlTemplate = (
"{server}/update/3/{product}/{release}/{build}/{platform}/"
"{locale}/{channel}/default/default/default/update.xml?force=1"
)
for release in verifyConfig.releases:
for locale in release["locales"]:
xmlUrls.add(
urlTemplate.format(
server=AUS_SERVER,
product=product,
release=release["release"],
build=release["build_id"],
platform=release["platform"],
locale=locale,
channel=verifyConfig.channel,
)
)
log.info(f"About to download {len(xmlUrls)} update.xml files")
xmlResults = await download_multi(list(xmlUrls), "update.xml.async.cache")
return xmlResults
async def _download_from_config(verifyConfig):
"""
Given an UpdateVerifyConfig object, download all necessary files to cache
Parameters
----------
verifyConfig : UpdateVerifyConfig
The config - already chunked
"""
remove_cache()
create_cache()
downloadList = []
##################
# Download files #
##################
xmlFiles = await download_update_xml(verifyConfig)
downloadList.extend(xmlFiles)
downloadList += await download_mars(x["path"] for x in xmlFiles)
downloadList += await download_builds(verifyConfig)
#####################
# Create cache.list #
#####################
cacheLinks = []
# Rename files and add to cache_links
for download in downloadList:
cacheLinks.append(download["url"])
fileIndex = len(cacheLinks)
os.rename(download["path"], _cachepath(fileIndex, "cache"))
cacheIndexPath = path.join(UV_CACHE_PATH, "urls.list")
with open(cacheIndexPath, "w") as cache:
cache.writelines(f"{l}\n" for l in cacheLinks)
# Log cache
log.info("Cache index urls.list contents:")
with open(cacheIndexPath) as cache:
for ln, url in enumerate(cache.readlines()):
line = url.replace("\n", "")
log.info(f"Line {ln+1}: {line}")
def download_from_config(verifyConfig):
"""
Given an UpdateVerifyConfig object, download all necessary files to cache
(sync function that calls the async one)
Parameters
----------
verifyConfig : UpdateVerifyConfig
The config - already chunked
"""
return asyncio.run(_download_from_config(verifyConfig))
|