1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243
|
--- a/makeflow/src/makeflow_archive_query
+++ b/makeflow/src/makeflow_archive_query
@@ -88,23 +88,23 @@
self.batch_job_info['exit_signal'] = int(f.readline().rstrip())
def print_immediate_inputs(self):
for f in self.input_files:
- print f.file_path
- print ''
+ print((f.file_path))
+ print('')
def print_all_inputs(self):
queue= [(self, 0)]
while queue:
job, distance = queue.pop()
if job.input_files:
- print distance
+ print(distance)
job.print_immediate_inputs()
for child in job.ancestors:
queue.append((child, distance + 1))
def print_immediate_outputs(self):
for f in self.output_files:
- print f.file_path
- print ''
+ print(f.file_path)
+ print('')
def print_all_outputs(self):
visted = {}
@@ -113,7 +113,7 @@
while queue:
job, distance = queue.pop()
if job.output_files:
- print distance
+ print(distance)
job.print_immediate_outputs()
for child in job.descendants:
if child.archived_path not in visted:
@@ -121,11 +121,11 @@
visted[child.archived_path] = 1
def print_job(self):
- print "file: %s" %(self.local_path)
- print "Created by job archived at path: %s" %(self.archived_path)
- print "Command used to create this file: %s" %(self.wrapped_command)
- print "makeflow file archived at path: %s" %(self.source_makeflow_path)
- print "Inputs:"
+ print("file: %s" %(self.local_path))
+ print("Created by job archived at path: %s" %(self.archived_path))
+ print("Command used to create this file: %s" %(self.wrapped_command))
+ print("makeflow file archived at path: %s" %(self.source_makeflow_path))
+ print("Inputs:")
self.print_immediate_inputs()
def get_makeflow_path(self):
@@ -135,7 +135,7 @@
self.source_makeflow_path = os.path.join(job.archived_path, "source_makeflow")
def usage():
- print """usage: makeflow_recover [options] <file>
+ print("""usage: makeflow_recover [options] <file>
options:
--info print out basic info about the specified file and the associated job
-i, --inputs list immediate input files required to create file
@@ -145,7 +145,7 @@
--outputs-all list both sibling output files and all other files that relied directly or indirectly on the specified file
--path=<path_to_archive> path to search for the makeflow archive (use if when preserving the makeflow you specified a archive path)
- """
+ """)
sys.exit(1)
def parse_args():
@@ -179,7 +179,7 @@
except IndexError:
usage()
if not arg_map['file'] or not os.path.isfile(arg_map['file']):
- print "Cannot find file %s" %(arg_map['file'])
+ print("Cannot find file %s" %(arg_map['file']))
usage()
return arg_map
@@ -201,18 +201,18 @@
resolved_job_path = os.path.realpath(file_path)
job = SimpleDagJob(resolved_job_path, file_name)
if arguments['inputs']:
- print "Inputs"
+ print("Inputs")
job.print_immediate_inputs()
if arguments['outputs']:
- print "Outputs"
+ print("Outputs")
job.print_immediate_outputs()
if arguments['inputs-all']:
- print "Inputs-all"
+ print("Inputs-all")
job.print_all_inputs()
if arguments['outputs-all']:
- print "Outputs-all"
+ print("Outputs-all")
job.print_all_outputs()
if arguments['info']:
job.print_job()
else:
- print "File has not been archived"
+ print("File has not been archived")
--- a/makeflow/src/makeflow_linker_python_driver
+++ b/makeflow/src/makeflow_linker_python_driver
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python3
# Copyright (C) 2022 The University of Notre Dame
# This software is distributed under the GNU General Public License.
--- a/makeflow/src/makeflow_monitor
+++ b/makeflow/src/makeflow_monitor
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python3
# Copyright (c) 2010- The University of Notre Dame.
# This software is distributed under the GNU General Public License.
@@ -269,7 +269,8 @@
tlist.append(ctime)
break
- return ':'.join(reversed(list(map(lambda d: '%02d' % d, tlist))))
+ return ':'.join(reversed(['%02d' % d for d in tlist]))
+
# Main Execution
--- a/makeflow/src/starch
+++ b/makeflow/src/starch
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python3
# Copyright (c) 2010- The University of Notre Dame.
# This software is distributed under the GNU General Public License.
@@ -18,15 +18,9 @@
from tempfile import NamedTemporaryFile
from shutil import copyfile, copyfileobj
-if sys.version_info[0:2] < (2, 6):
- from cStringIO import StringIO as BytesIO
-else:
- from io import BytesIO
-
-if sys.version_info[0:2] < (3, 0):
- from ConfigParser import ConfigParser, NoSectionError, NoOptionError
-else:
- from configparser import ConfigParser, NoSectionError, NoOptionError
+from io import BytesIO
+
+from configparser import ConfigParser, NoSectionError, NoOptionError
# Todo
--- a/work_queue/src/work_queue_graph_log
+++ b/work_queue/src/work_queue_graph_log
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python3
# Copyright (C) 2022 The University of Notre Dame
# This software is distributed under the GNU General Public License.
@@ -76,7 +76,7 @@
prev_line = record
delta = {}
- for key in record.keys():
+ for key in list(record.keys()):
delta[key] = record[key] - prev_line[key];
total_time = reduce( lambda x,y: x+y, [ delta[m] for m in ['time_status_msgs', 'time_internal', 'time_polling', 'time_send', 'time_receive', 'time_application']])
@@ -119,7 +119,7 @@
def sort_time(log_entries):
times = []
- for k in log_entries.keys():
+ for k in list(log_entries.keys()):
times.append(k)
times.sort()
return times
@@ -157,7 +157,7 @@
else:
pout(file, "set key left top\n")
- intervals = [len(log_entries.keys())/x for x in [19,17,13,11,7,5,3]]
+ intervals = [len(list(log_entries.keys()))/x for x in [19,17,13,11,7,5,3]]
pout(file, """
set style line 1 pt 5 lc rgb '#1b9e77' pointinterval {0}
@@ -376,7 +376,7 @@
fields = ['stack_time_other', 'stack_time_application', 'stack_time_receive', 'stack_time_send', 'stack_time_polling', 'stack_time_internal', 'stack_time_status_msgs'],
labels = ['other', 'application', 'receive', 'send', 'polling', 'internal', 'status msgs'])
- for name in plots.keys():
+ for name in list(plots.keys()):
plots[name].plot(prefix + '.' + name + '.' + extension)
except IOError:
--- a/makeflow/src/makeflow_ec2_estimate
+++ b/makeflow/src/makeflow_ec2_estimate
@@ -516,7 +516,7 @@
print("okay")
else:
print("failed")
- print("{}: The \"aws\" command must be in your path to use this script.").format(sys.argv[0])
+ print(("{}: The \"aws\" command must be in your path to use this script.").format(sys.argv[0]))
exit(1)
sys.stdout.write("Checking for aws configuration...")
@@ -524,7 +524,7 @@
print("okay")
else:
print("failed")
- print("{}: You must run \"aws configure\" before using this script.").format(sys.argv[0])
+ print(("{}: You must run \"aws configure\" before using this script.").format(sys.argv[0]))
# os.echo()
exit(1)
@@ -533,7 +533,7 @@
print("okay\n\n")
else:
print("failed")
- print("{}: Your Amazon credentials are not set up correctly. Try \"aws ec2 describe-instances\" to troubleshoot.").format(sys.argv[0])
+ print(("{}: Your Amazon credentials are not set up correctly. Try \"aws ec2 describe-instances\" to troubleshoot.").format(sys.argv[0]))
exit(1)
if args.config_file is not None:
# run instance
@@ -572,7 +572,7 @@
elif app_num == 2:
print('We took the approach #2 where all vms are reused')
elif app_num == 3:
- print('We took the approach #3 where suitable(utilized percentage >= {}) vms are reused'.format(UTIL_PERCENTAGE))
+ print(('We took the approach #3 where suitable(utilized percentage >= {}) vms are reused'.format(UTIL_PERCENTAGE)))
print('Workflow executed for {} seconds'.format(time))
print('AWS total cost is {} dollars\n'.format(my_makeflow_aws_module.aws_config.total_price))
|