1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
|
#!/usr/bin/env python3
import argparse
import contextlib
from os import mkdir
import os.path as p
import shutil
import tempfile
import hashlib
import urllib.error
import urllib.request
DIR_OF_THIS_SCRIPT = p.dirname( p.abspath( __file__ ) )
DIR_OF_THIRD_PARTY = p.join( DIR_OF_THIS_SCRIPT, 'third_party' )
URL_FORMAT = {
'release': ( "https://github.com/OmniSharp/omnisharp-roslyn/"
"releases/download/{version}/{file_name}" ),
'ci': ( "https://roslynomnisharp.blob.core.windows.net/"
"releases/{version}/{file_name}" ),
}
FILE_NAME = {
'win32': 'omnisharp.http-win-x86.zip',
'win64': 'omnisharp.http-win-x64.zip',
'macos': 'omnisharp.http-osx.tar.gz',
'linux32': 'omnisharp.http-linux-x86.tar.gz',
'linux64': 'omnisharp.http-linux-x64.tar.gz',
}
@contextlib.contextmanager
def TemporaryDirectory():
temp_dir = tempfile.mkdtemp()
try:
yield temp_dir
finally:
shutil.rmtree( temp_dir )
def Download( url ):
print( 'Downloading {}'.format( url.rsplit( '/', 1 )[ -1 ] ) )
with urllib.request.urlopen( url ) as response:
return response.read()
def ParseArguments():
parser = argparse.ArgumentParser()
parser.add_argument( 'version', action='store',
help = 'The Omnisharp version' )
parser.add_argument( '--cache-dir', action='store',
help = 'For testing, directory to cache packages.' )
args = parser.parse_args()
return args
def GetDownloadUrl( version, file_name ):
download_url_key = 'ci' if "-" in version else 'release'
return URL_FORMAT[ download_url_key ].format( version = version,
file_name = file_name )
def FetchAndHash( download_url, output_dir, file_name ):
try:
archive = p.join( output_dir, file_name )
if not p.exists( archive ):
compressed_data = Download( download_url )
with open( archive, 'wb' ) as f:
f.write( compressed_data )
except urllib.error.HTTPError as error:
if error.status != 404:
raise
print( 'Cannot download {}'.format( file_name ) )
return
with open( archive, 'rb' ) as f:
return hashlib.sha256( f.read() ).hexdigest()
def Process( output_dir, version ):
result = {}
for os_name, file_name in FILE_NAME.items():
download_url = GetDownloadUrl( version, file_name )
result[ os_name ] = {
'version': version,
'download_url': download_url,
'file_name': file_name,
'check_sum': FetchAndHash( download_url, output_dir, file_name )
}
return result
def MkDirIfMissing( dir ):
try:
mkdir( dir )
except OSError:
pass
def Main():
args = ParseArguments()
version = args.version
if args.cache_dir:
MkDirIfMissing( args.cache_dir )
cache_dir = p.join( args.cache_dir, version )
MkDirIfMissing( cache_dir )
output = Process( cache_dir, version )
else:
with TemporaryDirectory() as temp_dir:
output = Process( temp_dir, version )
print( "Omnisharp configuration for {} is:".format( version ) )
for os_name, os_data in output.items():
print( " {}: {{".format( repr( os_name ) ) )
for key, value in os_data.items():
line = " {}: {},".format( repr( key ), repr( value ) )
if len( line ) > 80:
line = " {}: ( {} ),".format( repr( key ), repr( value ) )
format_index = line.index( '(' ) + 2
while len( line ) > 80:
print( line[ 0:78 ] + "'" )
line = ( ' ' * format_index ) + "'" + line[ 78: ]
print( line )
print( " }," )
if __name__ == "__main__":
Main()
|