From: Bas Couwenberg <sebastic@debian.org>
Date: Tue, 6 Aug 2019 08:25:58 +0000
Subject: Fix support for Python 3.

Forwarded: https://github.com/TUDelftGeodesy/Doris/issues/8
---
 bin/csk_dump_data.py                               | 44 +++++-----
 bin/csk_dump_header2doris.py                       | 18 ++--
 bin/hhmmss2sec.py                                  | 22 ++---
 bin/rs2_dump_data.py                               | 52 ++++++------
 bin/rs2_dump_header2doris.py                       | 20 ++---
 bin/sec2hhmmss.py                                  | 16 ++--
 bin/tsx_dump_data.py                               | 30 +++----
 bin/tsx_dump_header2doris.py                       |  8 +-
 bin/tsx_dump_header2doris_noxpath.py               | 14 ++--
 doris_stack/functions/ESD_functions.py             |  4 +-
 doris_stack/functions/burst_metadata.py            |  4 +-
 doris_stack/functions/concatenate_decatenate.py    |  4 +-
 doris_stack/functions/correct_ESD.py               |  2 +-
 doris_stack/functions/create_image.py              |  1 +
 doris_stack/functions/do_deramp_SLC.py             | 34 ++++----
 doris_stack/functions/do_reramp_SLC.py             | 30 +++----
 doris_stack/functions/get_ramp.py                  | 28 +++----
 doris_stack/functions/get_winpos.py                | 50 +++++------
 doris_stack/functions/load_shape_unzip.py          |  2 +-
 doris_stack/functions/orbit_coordinates.py         |  4 +-
 doris_stack/functions/precise_read.py              |  4 +-
 doris_stack/functions/resdata.py                   | 20 ++---
 .../functions/sentinel_dump_data_function.py       |  6 +-
 doris_stack/functions/stack_cleanup.py             | 18 ++--
 doris_stack/functions/xml_query.py                 |  2 +-
 doris_stack/main_code/doris_parameters.py          | 24 +++---
 doris_stack/main_code/doris_parameters_path.py     |  4 +-
 doris_stack/main_code/doris_sentinel_1.py          |  6 +-
 doris_stack/main_code/dorisparameters.py           | 20 ++---
 doris_stack/main_code/jobs.py                      |  4 +-
 doris_stack/main_code/resdata.py                   | 24 +++---
 doris_stack/main_code/single_master_stack.py       | 98 +++++++++++-----------
 doris_stack/main_code/stack.py                     | 14 ++--
 doris_stack/main_code/swath.py                     |  4 +-
 install/init_cfg.py                                | 14 ++--
 prepare_stack/create_datastack_bash.py             |  6 +-
 prepare_stack/create_dem.py                        | 18 ++--
 prepare_stack/create_doris_input_xml.py            | 28 +++----
 prepare_stack/download_sentinel_data_orbits.py     | 48 +++++------
 prepare_stack/prepare_datastack.py                 |  8 +-
 prepare_stack/prepare_datastack_main.py            |  2 +-
 41 files changed, 380 insertions(+), 379 deletions(-)

diff --git a/bin/csk_dump_data.py b/bin/csk_dump_data.py
index ba54ada..f201650 100755
--- a/bin/csk_dump_data.py
+++ b/bin/csk_dump_data.py
@@ -15,26 +15,26 @@ from array import array
 codeRevision=1.0   # this code revision number
 
 def usage():
-    print 'INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision
-    print
-    print 'Usage   : csk_dump_data.py <inputfile> <outputfile> [l0 lN p0 pN] [-res RESFILE]'
-    print
-    print '          inputfile         is the input Cosmo-skymed HDF5 filename : master.hd5'
-    print '          outputfile        is the output filename                  : master.slc'
-    print '          l0                is the first azimuth line (starting at 1)'
-    print '          lN                is the last azimuth line'
-    print '          p0                is the first range pixel (starting at 1)'
-    print '          pN                is the last range pixel'
-    print '          RESFILE           DORIS result file that is to be updated for crop metadata (optional)'
-    print
-    print '          This software is part of Doris InSAR software package.\n'
-    print '(c) 1999-2010 Delft University of Technology, the Netherlands.\n'
+    print('INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision)
+    print('')
+    print('Usage   : csk_dump_data.py <inputfile> <outputfile> [l0 lN p0 pN] [-res RESFILE]')
+    print()
+    print('          inputfile         is the input Cosmo-skymed HDF5 filename : master.hd5')
+    print('          outputfile        is the output filename                  : master.slc')
+    print('          l0                is the first azimuth line (starting at 1)')
+    print('          lN                is the last azimuth line')
+    print('          p0                is the first range pixel (starting at 1)')
+    print('          pN                is the last range pixel')
+    print('          RESFILE           DORIS result file that is to be updated for crop metadata (optional)')
+    print()
+    print('          This software is part of Doris InSAR software package.\n')
+    print('(c) 1999-2010 Delft University of Technology, the Netherlands.\n')
 
 try:
     inputFileName  = sys.argv[1]
     outputFileName = sys.argv[2]
 except:
-    print '\nError   : Unrecognized input or missing arguments\n\n'
+    print('\nError   : Unrecognized input or missing arguments\n\n')
     usage()
     sys.exit(1)
 
@@ -42,7 +42,7 @@ except:
 f = h5py.File(inputFileName, 'r')
 sbi = f.get('/S01/SBI')
 if f.parent.__contains__('/') == False or f.parent.__contains__('/S01') == False or f.parent.__contains__('/S01/SBI') == False :
-   print 'ERROR: Wrong HDF5 format!'
+   print('ERROR: Wrong HDF5 format!')
 data = array('h')
 data = sbi[:,:,:]
 Number_of_lines_original = sbi.shape[0]
@@ -69,7 +69,7 @@ elif len(sys.argv) > 3 and len(sys.argv) < 9:
     outputWinFirstPix  = int(sys.argv[5])-1    # gdal srcwin starting at 0
     outputWinLastPix   = int(sys.argv[6])      # Lastpix  --> yoff  (later)
 elif len(sys.argv) > 3 and len(sys.argv) < 7:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 else:
@@ -86,13 +86,13 @@ else:
 #    outputWinLastLine  = Number_of_lines_original-1
 #    outputWinFirstPix = 0
 #    outputWinLastPix  = Number_of_pixels_original-1
-#    print 'crop parameters not provided,so cropping the whole image'
+#    print('crop parameters not provided,so cropping the whole image')
 if outputWinFirstLine == None or outputWinLastLine == None or outputWinFirstPix == None or outputWinLastPix == None :
-    print '%s: running failed: crop size unknown !' % (sys.argv[0])
+    print('%s: running failed: crop size unknown !' % (sys.argv[0]))
     sys.exit(1)
 
 if outputWinLastLine-outputWinFirstLine+1 <0  or outputWinLastPix-outputWinFirstPix+1 <=0 :
-    print '%s running failed: crop dimensions are not invalid !' % (sys.argv[0])
+    print('%s running failed: crop dimensions are not invalid !' % (sys.argv[0]))
     sys.exit(1)
 
 # compute crop dimensions
@@ -122,7 +122,7 @@ for n in range(outputWinFirstLine, outputWinLastLine):
        sys.stdout.write('%s...'%(temp*10))
        temp = temp+1      
     # read the hdf5 data and write to file
-    data_line = data[n,range(outputWinFirstPix,outputWinLastPix),:]
+    data_line = data[n,list(range(outputWinFirstPix,outputWinLastPix)),:]
     data_line.tofile(fid)
     data_line = None    
 sys.stdout.write("100% - done.\n")
@@ -142,7 +142,7 @@ headerFileStream.write('END\n')
 
 # check whether the resfile exist!!!
 if resFile is not None:
-    print resFile
+    print(resFile)
     # load header
     headerFileStream = open(os.path.splitext(outputFileName)[0]+'.hdr','r')
     for line in headerFileStream:
diff --git a/bin/csk_dump_header2doris.py b/bin/csk_dump_header2doris.py
index 3bae4f1..a0136ef 100755
--- a/bin/csk_dump_header2doris.py
+++ b/bin/csk_dump_header2doris.py
@@ -14,18 +14,18 @@ import numpy, h5py, sys, math, time, string # numpy and h5py  required for HDF5
 codeRevision=1.0   # this code revision number
 
 def usage():
-    print 'INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision
-    print
-    print 'Usage   : python csk_dump_header2doris.py csk_HDF5_product > OutputFileName'
-    print '                               where csk_HDF5_product is the input filename'
-    print
-    print '          This software is part of Doris InSAR software package.\n'
-    print '(c) 1999-2010 Delft University of Technology, the Netherlands.\n'
+    print('INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision)
+    print()
+    print('Usage   : python csk_dump_header2doris.py csk_HDF5_product > OutputFileName')
+    print('                               where csk_HDF5_product is the input filename')
+    print()
+    print('          This software is part of Doris InSAR software package.\n')
+    print('(c) 1999-2010 Delft University of Technology, the Netherlands.\n')
 
 try:
   inputFileName  = sys.argv[1]
 except:
-    print '\nError   : Unrecognized input or missing arguments\n\n'
+    print('\nError   : Unrecognized input or missing arguments\n\n')
     usage()
     sys.exit(1)
 
@@ -38,7 +38,7 @@ qlk = s01.get('QLK')
 
 
 if f.parent.__contains__('/') == False or f.parent.__contains__('/S01') == False or f.parent.__contains__('/S01/SBI') == False or f.parent.__contains__('/S01/B001') == False or f.parent.__contains__('/S01/QLK') == False :
-   print 'ERROR: Wrong HDF5 format!'
+   print('ERROR: Wrong HDF5 format!')
 
 # reading the attributes  
 VolumeFile = f.attrs.__getitem__('Product Filename')
diff --git a/bin/hhmmss2sec.py b/bin/hhmmss2sec.py
index ad8665b..952f397 100755
--- a/bin/hhmmss2sec.py
+++ b/bin/hhmmss2sec.py
@@ -2,20 +2,20 @@
 import os,sys,time
 
 def usage():
-    print '\nUsage: python hhmmss2sec.py time'
-    print '  where time in the form HH:MM:SS.sss .'
-    print ' '
-    print ' Example '
-    print ' ./hhmmss2sec.py 16:36:40.393 '
-    print '    59800.393000 '
-    print ' '
-    print ' See Also'
-    print ' sec2hhmmss.py'
+    print('\nUsage: python hhmmss2sec.py time')
+    print('  where time in the form HH:MM:SS.sss .')
+    print(' ')
+    print(' Example ')
+    print(' ./hhmmss2sec.py 16:36:40.393 ')
+    print('    59800.393000 ')
+    print(' ')
+    print(' See Also')
+    print(' sec2hhmmss.py')
     
 try:
     timeOfDay  = sys.argv[1] 
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
@@ -25,4 +25,4 @@ mm  = float(timeOfDay[1]);
 ss  = float(timeOfDay[2]);
 
 secOfDay=hh*3600+mm*60+ss
-print "%f" %(secOfDay)
+print( "%f" %(secOfDay))
diff --git a/bin/rs2_dump_data.py b/bin/rs2_dump_data.py
index e8203b2..ef89abe 100755
--- a/bin/rs2_dump_data.py
+++ b/bin/rs2_dump_data.py
@@ -18,26 +18,26 @@ import xml.etree.ElementTree as etree  # parameters required for cropping and fl
 
 codeRevision=1.1   # this code revision number
 def usage():
-    print 'INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision
-    print
-    print 'Usage   : rs2_dump_data.py <inputfile> <outputfile> [l0 lN p0 pN] [-res RESFILE]'
-    print 
-    print '          inputfile         is the input Radarsat-2 geotiff filename : master.tif'
-    print '          outputfile        is the output filename                   : master.slc'
-    print '          l0                is the first azimuth line (starting at 1)'
-    print '          lN                is the last azimuth line'
-    print '          p0                is the first range pixel (starting at 1)'
-    print '          pN                is the last range pixel'
-    print '          RESFILE           DORIS result file that is to be updated for crop metadata (optional)'
-    print
-    print '          This software is part of Doris InSAR software package.\n'
-    print '(c) 1999-2010 Delft University of Technology, the Netherlands.\n'
+    print('INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision)
+    print()
+    print('Usage   : rs2_dump_data.py <inputfile> <outputfile> [l0 lN p0 pN] [-res RESFILE]')
+    print()
+    print('          inputfile         is the input Radarsat-2 geotiff filename : master.tif')
+    print('          outputfile        is the output filename                   : master.slc')
+    print('          l0                is the first azimuth line (starting at 1)')
+    print('          lN                is the last azimuth line')
+    print('          p0                is the first range pixel (starting at 1)')
+    print('          pN                is the last range pixel')
+    print('          RESFILE           DORIS result file that is to be updated for crop metadata (optional)')
+    print()
+    print('          This software is part of Doris InSAR software package.\n')
+    print('(c) 1999-2010 Delft University of Technology, the Netherlands.\n')
 
 try:
     inputFileName  = sys.argv[1] 
     outputFileName = sys.argv[2]
 except:
-    print '\nError   : Unrecognized input or missing arguments\n\n'
+    print('\nError   : Unrecognized input or missing arguments\n\n')
     usage()
     sys.exit(1)
 
@@ -49,7 +49,7 @@ for element in range(len(sys.argv)):
     option = sys.argv[element];
     if option == '-res':
         resFile = str(sys.argv[element+1])
-#        print resFile
+#        print(resFile)
         del sys.argv[element+1]
         del sys.argv[element]
         break
@@ -66,7 +66,7 @@ elif len(sys.argv) > 3 and len(sys.argv) < 9:
     outputWinFirstPix  = int(sys.argv[5])    # Firstpix
     outputWinLastPix   = int(sys.argv[6])    # Lastpix
 elif len(sys.argv) > 3 and len(sys.argv) < 7:
-    print '\nError   : Unrecognized input or missing arguments\n\n'
+    print('\nError   : Unrecognized input or missing arguments\n\n')
     usage()
     sys.exit(1)
 else:
@@ -98,8 +98,8 @@ else:
     print('INFO     : Adjusting first line and last line ') 
     outputWinFirstLine = Nlines-in_LastLine+1
     outputWinLastLine  = Nlines-in_FirstLine+1
-    print Nlines, Npixls
-    print outputWinFirstLine,outputWinLastLine,outputWinFirstPix,outputWinLastPix
+    print(Nlines, Npixls)
+    print(outputWinFirstLine,outputWinLastLine,outputWinFirstPix,outputWinLastPix)
 
 if pixl_order == 'Increasing':
     print('INFO     : Detected a imagery %s pixel time order' % pixl_order) 
@@ -108,8 +108,8 @@ else:
     print('INFO     : Adjusting first pixel and last pixel ') 
     outputWinFirstPix = Npixls-in_LastPix+1
     outputWinLastPix  = Npixls-in_FirstPix+1
-    print Nlines, Npixls
-    print outputWinFirstLine,outputWinLastLine,outputWinFirstPix,outputWinLastPix
+    print(Nlines, Npixls)
+    print(outputWinFirstLine,outputWinLastLine,outputWinFirstPix,outputWinLastPix)
 
 
 # GDAL Extract image matrix using gdal_translate
@@ -129,11 +129,11 @@ if outputWinFirstPix is not None:
     #                                         xoff                  yoff                  xsize = width                     ysize= height
     #                                        1 --> 0               1 --> 0
     cmd = cmd + (' -srcwin %s %s %s %s' % (outputWinFirstPix-1,outputWinFirstLine-1,outputWinLastPix-outputWinFirstPix+1,outputWinLastLine-outputWinFirstLine+1))
-    print cmd 
+    print(cmd)
 
 failure = os.system(cmd)
 if failure:
-    print '%s: running %s failed' % (sys.argv[0],cmd)
+    print('%s: running %s failed' % (sys.argv[0],cmd))
     sys.exit(1)
 #else:
 #    os.rename(os.path.splitext(outputFileName)[0]+'.j00',outputFileName)
@@ -161,7 +161,7 @@ else:
 
 failure = os.system(cmd)
 if failure:
-    print '%s: running %s failed' % (sys.argv[0],cmd)
+    print('%s: running %s failed' % (sys.argv[0],cmd))
     sys.exit(1)
 else:
     os.remove(outputFileName+'.noflip')
@@ -172,7 +172,7 @@ else:
 # check whether the file exist!!!
 if resFile is not None:
 
-    print resFile
+    print(resFile)
 
     # load header
     #headerFileStream = open(os.path.splitext(outputFileName)[0]+'.hdr','r')
@@ -181,7 +181,7 @@ if resFile is not None:
         pair = line.split()
         if len(pair) > 1:
             vars()[pair[0]] = pair[2]   # set IMAGE_LINES and LINE_SAMPLES
-#            print vars()[pair[0]]
+#            print(vars()[pair[0]])
 
     # check whether the file exist
     outStream = open(resFile,'a')
diff --git a/bin/rs2_dump_header2doris.py b/bin/rs2_dump_header2doris.py
index 914a3a3..535d5d5 100755
--- a/bin/rs2_dump_header2doris.py
+++ b/bin/rs2_dump_header2doris.py
@@ -30,20 +30,20 @@ from datetime import datetime
 codeRevision=1.2   # this code revision number
 
 def usage():
-    print 'INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision
-    print
-    print 'Usage   : python rs2_dump_header2doris.py rs2_XML_product > outputfile'
-    print '                           where rs2_XML_product is the input filename'
-    print
-    print '          This software is part of Doris InSAR software package.\n'
-    print '(c) 1999-2010 Delft University of Technology, the Netherlands.\n'
+    print('INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision)
+    print()
+    print('Usage   : python rs2_dump_header2doris.py rs2_XML_product > outputfile')
+    print('                           where rs2_XML_product is the input filename')
+    print()
+    print('          This software is part of Doris InSAR software package.\n')
+    print('(c) 1999-2010 Delft University of Technology, the Netherlands.\n')
 
 try:
     inputFileName  = sys.argv[1]
 #    outputFileName = sys.argv[2]
 #    outStream      = open(outputFileName,'w')
 except:
-    print '\nError   : Unrecognized input or missing arguments\n\n'
+    print('\nError   : Unrecognized input or missing arguments\n\n')
     usage()
     sys.exit(1)
 
@@ -145,7 +145,7 @@ queryList = {
 
 # get variables and parameters from xml
 container = {}
-for key, value in queryList.iteritems():
+for key, value in queryList.items():
     if key.startswith('list_'):
         container[key] = [tag.text for tag in inTree.findall(nsmap_none(value, ns))]
     else:
@@ -184,7 +184,7 @@ if container['sceneBeam'] != 'S3': # Hacky fix for S3 merged images
 
 # ---------------------------------------------------------------------------------------------------------
 
-#print container['mission']
+#print(container['mission'])
 #exit()
 
 dummyVar = 'DUMMY'
diff --git a/bin/sec2hhmmss.py b/bin/sec2hhmmss.py
index 18abb21..6dcc2b4 100755
--- a/bin/sec2hhmmss.py
+++ b/bin/sec2hhmmss.py
@@ -2,17 +2,17 @@
 import os,sys,time
 
 def usage():
-    print '\nUsage: python sec2hhmmss.py time'
-    print '  where time is the time of day in seconds.'
-    print ' '
-    print ' Example '
-    print ' sec2hhmmss.py 59800.445398'
-    print ' 16:36:40.393'
+    print('\nUsage: python sec2hhmmss.py time')
+    print('  where time is the time of day in seconds.')
+    print(' ')
+    print(' Example ')
+    print(' sec2hhmmss.py 59800.445398')
+    print(' 16:36:40.393')
 
 try:
     timeOfDay  = sys.argv[1] 
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
@@ -22,4 +22,4 @@ hh = timeOfDay//3600
 timeOfDay = timeOfDay%3600
 mm = timeOfDay//60
 ss = timeOfDay%60
-print "%d:%d:%f" %(hh,mm,ss)
+print("%d:%d:%f" %(hh,mm,ss))
diff --git a/bin/tsx_dump_data.py b/bin/tsx_dump_data.py
index b522476..eaccaac 100755
--- a/bin/tsx_dump_data.py
+++ b/bin/tsx_dump_data.py
@@ -2,20 +2,20 @@
 import os,sys,time
 
 def usage():
-    print '\nUsage: python tsx_dump_data.py tsx_COSAR_product outputfile [l0 lN p0 pN] -res RESFILE'
-    print '  where tsx_COSAR_product is the input filename'
-    print '        outputfile        is the output filename'
-    print '        l0                is the first azimuth line (starting at 1)'
-    print '        lN                is the last azimuth line'
-    print '        p0                is the first range pixel (starting at 1)'
-    print '        pN                is the last range pixel'
-    print '        RESFILE           DORIS result file that is to be updated for crop metadata'
+    print('\nUsage: python tsx_dump_data.py tsx_COSAR_product outputfile [l0 lN p0 pN] -res RESFILE')
+    print('  where tsx_COSAR_product is the input filename')
+    print('        outputfile        is the output filename')
+    print('        l0                is the first azimuth line (starting at 1)')
+    print('        lN                is the last azimuth line')
+    print('        p0                is the first range pixel (starting at 1)')
+    print('        pN                is the last range pixel')
+    print('        RESFILE           DORIS result file that is to be updated for crop metadata')
 
 try:
     inputFileName  = sys.argv[1]; 
     outputFileName = sys.argv[2]
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
@@ -27,7 +27,7 @@ for element in range(len(sys.argv)):
     option = sys.argv[element];
     if option == '-res':
         resFile = str(sys.argv[element+1])
-#        print resFile
+#        print(resFile)
         del sys.argv[element+1]
         del sys.argv[element]
         break
@@ -43,7 +43,7 @@ elif len(sys.argv) > 3 and len(sys.argv) < 9:
     outputWinFirstPix  = int(sys.argv[5])-1    # gdal srcwin starting at 0
     outputWinLastPix   = int(sys.argv[6])      # Lastpix  --> yoff  (later)
 elif len(sys.argv) > 3 and len(sys.argv) < 7:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 else:
@@ -63,11 +63,11 @@ cmd = '%s %s -ot %s -of %s %s' % (gdalCall,inputFileName,outputDataType,outputDa
 
 if outputWinFirstPix is not None:
     cmd = cmd + (' -srcwin %s %s %s %s' % (outputWinFirstPix,outputWinFirstLine,outputWinLastPix-outputWinFirstPix,outputWinLastLine-outputWinFirstLine))
-    #print cmd 
+    #print(cmd)
 
 failure = os.system(cmd)
 if failure:
-    print '%s: running %s failed' % (sys.argv[0],cmd)
+    print('%s: running %s failed' % (sys.argv[0],cmd))
     sys.exit(1)
 else:
     os.rename(os.path.splitext(outputFileName)[0]+'.j00',outputFileName)
@@ -76,7 +76,7 @@ else:
 # check whether the file exist!!!
 if resFile is not None:
 
-    print resFile
+    print(resFile)
 
     # load header
     headerFileStream = open(os.path.splitext(outputFileName)[0]+'.hdr','r')
@@ -84,7 +84,7 @@ if resFile is not None:
         pair = line.split()
         if len(pair) > 1:
             vars()[pair[0]] = pair[2]   # set IMAGE_LINES and LINE_SAMPLES
-#            print vars()[pair[0]]
+#            print(vars()[pair[0]])
 
     # check whether the file exist
     outStream = open(resFile,'a')
diff --git a/bin/tsx_dump_header2doris.py b/bin/tsx_dump_header2doris.py
index bb06314..ddeb705 100755
--- a/bin/tsx_dump_header2doris.py
+++ b/bin/tsx_dump_header2doris.py
@@ -12,16 +12,16 @@ import string, time, sys
 #import types
 
 def usage():
-    print '\nUsage: python tsx_dump_header2doris.py tsx_XML_product > outputfile'
-    print '  where tsx_XML_product is the input filename'
-#    print '        outputfile      is the output DORIS resultfile'
+    print('\nUsage: python tsx_dump_header2doris.py tsx_XML_product > outputfile')
+    print('  where tsx_XML_product is the input filename')
+#    print('        outputfile      is the output DORIS resultfile')
 
 try:
     inputFileName  = sys.argv[1]
 #    outputFileName = sys.argv[2]
 #    outStream      = open(outputFileName,'w')
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
diff --git a/bin/tsx_dump_header2doris_noxpath.py b/bin/tsx_dump_header2doris_noxpath.py
index 4c5e641..ef6f0d7 100755
--- a/bin/tsx_dump_header2doris_noxpath.py
+++ b/bin/tsx_dump_header2doris_noxpath.py
@@ -12,16 +12,16 @@ import string, time, sys
 #import types
 
 def usage():
-    print '\nUsage: python tsx_dump_header2doris.py tsx_XML_product > outputfile'
-    print '  where tsx_XML_product is the input filename'
-#    print '        outputfile      is the output DORIS resultfile'
+    print('\nUsage: python tsx_dump_header2doris.py tsx_XML_product > outputfile')
+    print('  where tsx_XML_product is the input filename')
+#    print('        outputfile      is the output DORIS resultfile')
 
 try:
     inputFileName  = sys.argv[1]
 #    outputFileName = sys.argv[2]
 #    outStream      = open(outputFileName,'w')
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
@@ -123,7 +123,7 @@ for key in queryList.keys():
 
             vars()[key].append(nodes.text)
             
-            if nodes.attrib.values()[0] == '0':
+            if list(nodes.attrib.values())[0] == '0':
                 keyTemp = 'dopplerCoeff0' # reset key
                 try:
                     vars()[keyTemp];
@@ -131,7 +131,7 @@ for key in queryList.keys():
                     vars()[keyTemp] = [];
                 vars()[keyTemp].append(nodes.text)
 
-            elif nodes.attrib.values()[0] == '1':
+            elif list(nodes.attrib.values())[0] == '1':
                 keyTemp = 'dopplerCoeff1' # reset key
                 try:
                     vars()[keyTemp];
@@ -139,7 +139,7 @@ for key in queryList.keys():
                     vars()[keyTemp] = [];
                 vars()[keyTemp].append(nodes.text)
 
-            elif nodes.attrib.values()[0] == '2':
+            elif list(nodes.attrib.values())[0] == '2':
                 keyTemp = 'dopplerCoeff2' # reset key
                 try:
                     vars()[keyTemp];
diff --git a/doris_stack/functions/ESD_functions.py b/doris_stack/functions/ESD_functions.py
index e3daaa8..301a280 100755
--- a/doris_stack/functions/ESD_functions.py
+++ b/doris_stack/functions/ESD_functions.py
@@ -262,7 +262,7 @@ def apply_ESD_Nida(diffBursts, Df_DC, PRF, threshold = 0.0001):
             ph_res = ph_esd - ph_est
 
             ph_test[k] = np.nanmean(np.angle(exp(1j * ph_res[:]))) # should be ph_test(k) = np.nanmean(exp(1i*ph_res[:]))
-            #print ph_test
+            #print(ph_test)
 
         ind = np.argmin(abs(ph_test))
         D_az_min.append(D_azs[ind])
@@ -278,7 +278,7 @@ def apply_ESD_Nida(diffBursts, Df_DC, PRF, threshold = 0.0001):
         D_azs = np.linspace(D_azs[ind]-D_az_span, D_azs[ind]+D_az_span,num=7)
         del ph_test
 
-    #print 'amount of loops in iteration ' + str(c)
+    #print('amount of loops in iteration ' + str(c))
 
     pix_offset = offset / (PRF/(2*np.pi*np.nanmean(Df_DC[:])))
 
diff --git a/doris_stack/functions/burst_metadata.py b/doris_stack/functions/burst_metadata.py
index e90dfd6..caae0a9 100755
--- a/doris_stack/functions/burst_metadata.py
+++ b/doris_stack/functions/burst_metadata.py
@@ -1,6 +1,6 @@
 # Based on the orbit of the swath the orbits of the individual burst is calculated.
 
-from orbit_coordinates import lph2xyz, xyz2ell, intrp_orbit
+from .orbit_coordinates import lph2xyz, xyz2ell, intrp_orbit
 import os
 import numpy as np
 import collections
@@ -150,4 +150,4 @@ def center_shape_from_res(resfile):
 
     coverage = Polygon([ul, ur, lr, ll])
 
-    return center, coverage
\ No newline at end of file
+    return center, coverage
diff --git a/doris_stack/functions/concatenate_decatenate.py b/doris_stack/functions/concatenate_decatenate.py
index 13efee4..61a414b 100755
--- a/doris_stack/functions/concatenate_decatenate.py
+++ b/doris_stack/functions/concatenate_decatenate.py
@@ -17,7 +17,7 @@ def decatenate(date_folder, image_file, burst_file, datatype, multilooked='none'
     image_res, burst_res = read_res(date_folder, type=res_type)
 
     # Read image size
-    bursts = burst_res.keys()
+    bursts = list(burst_res.keys())
     if multilooked != 'none':
         try:
             no_lines = int(burst_res[bursts[0]].processes['readfiles']['Number_of_ml_lines_output_image'])
@@ -57,7 +57,7 @@ def concatenate(date_folder, image_file, burst_file, datatype, multilooked='none
     image_res, burst_res = read_res(date_folder, type=res_type)
 
     # Read image size
-    bursts = burst_res.keys()
+    bursts = list(burst_res.keys())
     if multilooked != 'none':
         try:
             no_lines = int(burst_res[bursts[0]].processes['readfiles']['Number_of_ml_lines_output_image'])
diff --git a/doris_stack/functions/correct_ESD.py b/doris_stack/functions/correct_ESD.py
index dbf8fae..39a65cc 100755
--- a/doris_stack/functions/correct_ESD.py
+++ b/doris_stack/functions/correct_ESD.py
@@ -9,7 +9,7 @@ if __name__ == "__main__":
     print(folder)
     sys.path.extend([folder])
 
-from resdata import ResData
+from .resdata import ResData
 
 
 def remove_ramp(file, angle_pixel):
diff --git a/doris_stack/functions/create_image.py b/doris_stack/functions/create_image.py
index a3b77f1..eb1fc86 100644
--- a/doris_stack/functions/create_image.py
+++ b/doris_stack/functions/create_image.py
@@ -11,3 +11,4 @@
 # If you want to do multilooking first apply the multilook.py script.
 
 def create_image():
+    pass
diff --git a/doris_stack/functions/do_deramp_SLC.py b/doris_stack/functions/do_deramp_SLC.py
index c63a2e5..7686ece 100755
--- a/doris_stack/functions/do_deramp_SLC.py
+++ b/doris_stack/functions/do_deramp_SLC.py
@@ -1,32 +1,32 @@
 #!/usr/bin/env python
 import numpy as np
 from numpy import *
-from get_ramp import get_ramp
+from .get_ramp import get_ramp
 from doris.doris_stack.functions.ESD_functions import freadbk
 from doris.doris_stack.main_code.resdata import ResData
 import sys
 
 
 def usage():
-    print '\nUsage: python do_deramp_SLC_nom.py dataFilename  resFilename plotFlag'
-    print '  where dataFilename     is the name of burst you want to deramp'
-    print '        resFilename      is the .res file of burst'
-    print '        plotFlag         is a boolean var, to plot only'
-    print '                         default of doPlot is false'
-    print ' This function removes the phase ramp (Doppler centroid variations) from single burst of' 
-    print ' RS2 or S1 TOPS acquisition. The original binary image at path '
-    print " DATAFILENAME is saved in 'DATAFILENAME'.orig, whereas the new instance " 
-    print ' will be characterized by baseband spectrum. The function also requires '
-    print ' the .res file RESFILENAME.                                             '     
-    print '  for example                                                           '
-    print ' python  do_deramp_SLC.py   20140821_iw_2_burst_1.raw slave.res False   '
-    print ' created by Gert Mulder'
-    print ' Part of code adapted from Lorenzo Iannini and Wu Wenhao'
+    print('\nUsage: python do_deramp_SLC_nom.py dataFilename  resFilename plotFlag')
+    print('  where dataFilename     is the name of burst you want to deramp')
+    print('        resFilename      is the .res file of burst')
+    print('        plotFlag         is a boolean var, to plot only')
+    print('                         default of doPlot is false')
+    print(' This function removes the phase ramp (Doppler centroid variations) from single burst of')
+    print(' RS2 or S1 TOPS acquisition. The original binary image at path ')
+    print(" DATAFILENAME is saved in 'DATAFILENAME'.orig, whereas the new instance " )
+    print(' will be characterized by baseband spectrum. The function also requires ')
+    print(' the .res file RESFILENAME.                                             ')
+    print('  for example                                                           ')
+    print(' python  do_deramp_SLC.py   20140821_iw_2_burst_1.raw slave.res False   ')
+    print(' created by Gert Mulder')
+    print(' Part of code adapted from Lorenzo Iannini and Wu Wenhao')
 try:
     dataFilename = sys.argv[1]
     resFilename = sys.argv[2]
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
@@ -78,4 +78,4 @@ else:  # cpxint16
 fid.write(slc_dat)
 fid.close()
 
-print "\nDeramp operation completed\n"
+print("\nDeramp operation completed\n")
diff --git a/doris_stack/functions/do_reramp_SLC.py b/doris_stack/functions/do_reramp_SLC.py
index a7e266e..1afa0cd 100755
--- a/doris_stack/functions/do_reramp_SLC.py
+++ b/doris_stack/functions/do_reramp_SLC.py
@@ -7,24 +7,24 @@ from doris.doris_stack.main_code.resdata import ResData
 import sys
 
 def usage():
-    print '\nUsage: python  do_reramp_SLC.py dataFilename resFilename resampled'
-    print '  where dataFilename        is the name of burst you want to deramp'
-    print '        resFilename         is the .res file of burst              '
-    print ' This python applies the inverse phase ramp to the burst pointed by DATAFILENAME (slc)'
-    print ' and RESFILENAME (res) that was deramped by deramp_SLC.m. The phase screen'
-    print ' must account for the new resampled grids PIXRGGRID and PIXAZGRID    '
-    print ' [Nlines_mst x Nsamples_mst] that contain the time coordinates of the'
-    print ' resampled image into the master grid:                               '
-    print '  for example                                                        '
-    print ' python   do_reramp_SLC.py slave_rsmp.raw slave.res False            '
-    print ' created by Gert Mulder'
-    print ' Part of code adapted from Lorenzo Iannini and Wu Wenhao'
+    print('\nUsage: python  do_reramp_SLC.py dataFilename resFilename resampled')
+    print('  where dataFilename        is the name of burst you want to deramp')
+    print('        resFilename         is the .res file of burst              ')
+    print(' This python applies the inverse phase ramp to the burst pointed by DATAFILENAME (slc)')
+    print(' and RESFILENAME (res) that was deramped by deramp_SLC.m. The phase screen')
+    print(' must account for the new resampled grids PIXRGGRID and PIXAZGRID    ')
+    print(' [Nlines_mst x Nsamples_mst] that contain the time coordinates of the')
+    print(' resampled image into the master grid:                               ')
+    print('  for example                                                        ')
+    print(' python   do_reramp_SLC.py slave_rsmp.raw slave.res False            ')
+    print(' created by Gert Mulder')
+    print(' Part of code adapted from Lorenzo Iannini and Wu Wenhao')
 try:
     dataFilename = sys.argv[1]
     resFilename = sys.argv[2]
      
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 if len(sys.argv) == 3:
@@ -32,7 +32,7 @@ if len(sys.argv) == 3:
 elif len(sys.argv) == 4:
     resampled = sys.argv[3]
 else:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
@@ -96,4 +96,4 @@ else:  # cpxint16
 fid.write(slc_dat)
 fid.close()
 
-print "\nReramp operation completed\n"
+print("\nReramp operation completed\n")
diff --git a/doris_stack/functions/get_ramp.py b/doris_stack/functions/get_ramp.py
index 923a512..c6f872c 100755
--- a/doris_stack/functions/get_ramp.py
+++ b/doris_stack/functions/get_ramp.py
@@ -67,7 +67,7 @@ def get_ramp(res_file, resampled=0, type='chirp'):
             os.remove(Link_DATA)
 
         RAW_DATA_ABSOLUTE_PATH=os.path.abspath(Link_rsmp_orig_slave_pixel)
-        print "RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH
+        print("RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH)
         os.symlink(RAW_DATA_ABSOLUTE_PATH,Link_DATA)
 
         outStream      = open(Path_MFF_HDR,'w')
@@ -99,7 +99,7 @@ def get_ramp(res_file, resampled=0, type='chirp'):
 
 
         RAW_DATA_ABSOLUTE_PATH=os.path.abspath(Link_rsmp_orig_slave_line)
-        print "RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH
+        print("RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH)
         os.symlink(RAW_DATA_ABSOLUTE_PATH,Link_DATA)
 
         outStream      = open(Path_MFF_HDR,'w')
@@ -137,7 +137,7 @@ def get_ramp(res_file, resampled=0, type='chirp'):
         TazGrid = np.tile(Tvect_az, (1, Nrg_res))
         
     else:
-        print 'variable resampled can only be 0 or 1!'
+        print('variable resampled can only be 0 or 1!')
         return
 
     #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -200,7 +200,7 @@ def get_ramp(res_file, resampled=0, type='chirp'):
     elif type == 'DC':
         data = Df_AzCtr + Taz_vec * DR_est 
     else:
-        print 'Choose either chirp or DC for type'
+        print('Choose either chirp or DC for type')
         return
 
     return data
@@ -260,23 +260,23 @@ def freadbk(path_file,line_start=1, pixels_start=1,nofLines1=None,nofPixels1=Non
     gdal.AllRegister()
     thisBurstData_file=gdal.Open(path_file,GA_ReadOnly)
     if thisBurstData_file is None:
-        print 'Could not open'+Path_MFF_HDR
+        print('Could not open'+Path_MFF_HDR)
         sys.exit(1)
-    #print 'Driver: ', thisBurstData_file.GetDriver().ShortName,'/', \
-    #      thisBurstData_file.GetDriver().LongName
-    #print 'Size is ',thisBurstData_file.RasterXSize,'x',thisBurstData_file.RasterYSize, \
-    #      'x',thisBurstData_file.RasterCount
-    #print 'Projection is ',thisBurstData_file.GetProjection()
+    #print('Driver: ', thisBurstData_file.GetDriver().ShortName,'/', \
+    #      thisBurstData_file.GetDriver().LongName)
+    #print('Size is ',thisBurstData_file.RasterXSize,'x',thisBurstData_file.RasterYSize, \
+    #      'x',thisBurstData_file.RasterCount)
+    #print('Projection is ',thisBurstData_file.GetProjection())
     geotransform = thisBurstData_file.GetGeoTransform()
     if not geotransform is None:
-        print 'Origin = (',geotransform[0], ',',geotransform[3],')'
-        print 'Pixel Size = (',geotransform[1], ',',geotransform[5],')'
+        print('Origin = (',geotransform[0], ',',geotransform[3],')')
+        print('Pixel Size = (',geotransform[1], ',',geotransform[5],')')
 
     cint_srd=thisBurstData_file.GetRasterBand(1)
-    #print 'Band Type=',gdal.GetDataTypeName(cint_srd.DataType)
+    #print('Band Type=',gdal.GetDataTypeName(cint_srd.DataType))
 
     if cint_srd.GetOverviewCount() > 0:
-            print 'Band has ', cint_srd.GetOverviewCount(), ' overviews.'
+            print('Band has ', cint_srd.GetOverviewCount(), ' overviews.')
     thisBurstData= cint_srd.ReadAsArray(int(pixels_start-1),int(line_start-1),nofPixels1,nofLines1)
     return thisBurstData
 ##################################################################################
diff --git a/doris_stack/functions/get_winpos.py b/doris_stack/functions/get_winpos.py
index 9cf09d3..719fe95 100755
--- a/doris_stack/functions/get_winpos.py
+++ b/doris_stack/functions/get_winpos.py
@@ -6,15 +6,15 @@ from gdalconst import *
 from scipy import ndimage
 
 def usage():
-    print '\nUsage: python get_winpos.py dataFile resFile Nwin outFile                        '
-    print 'where   dataFile           is the name of burst you want to deramp                 '
-    print '        resFile            is the .res file of burst                               '
-    print '        Nwin               number of windows to be distributed over the total image'
-    print '        outFile            output file name                                        '
-    print '  for example                                                                      '
-    print ' python get_winpos.py 20141003_iw_1_burst_1.raw 20141003_iw_1_burst_1.res 2001 winpos_fine.asc'
-    print ' matlab: TU Delft                                                                  '
-    print ' Python: Wu Wenhao   Wuhan QQ:460249274                                            '
+    print('\nUsage: python get_winpos.py dataFile resFile Nwin outFile                        ')
+    print('where   dataFile           is the name of burst you want to deramp                 ')
+    print('        resFile            is the .res file of burst                               ')
+    print('        Nwin               number of windows to be distributed over the total image')
+    print('        outFile            output file name                                        ')
+    print('  for example                                                                      ')
+    print(' python get_winpos.py 20141003_iw_1_burst_1.raw 20141003_iw_1_burst_1.res 2001 winpos_fine.asc')
+    print(' matlab: TU Delft                                                                  ')
+    print(' Python: Wu Wenhao   Wuhan QQ:460249274                                            ')
 try:
     dataFile   = sys.argv[1]
     resFile    = sys.argv[2]
@@ -22,7 +22,7 @@ try:
     outFile    = sys.argv[4]
   
 except:
-    print 'Unrecognized input'
+    print('Unrecognized input')
     usage()
     sys.exit(1)
 
@@ -79,23 +79,23 @@ def freadbk(path_file,line_start=1, Pixels_start=1,nofLines1=None,nofPixels1=Non
     gdal.AllRegister()
     thisBurstData_file=gdal.Open(path_file,GA_ReadOnly)
     if thisBurstData_file is None:
-        print 'Could not open'+Path_MFF_HDR
+        print('Could not open'+Path_MFF_HDR)
         sys.exit(1)
-    #print 'Driver: ', thisBurstData_file.GetDriver().ShortName,'/', \
-    #      thisBurstData_file.GetDriver().LongName
-    #print 'Size is ',thisBurstData_file.RasterXSize,'x',thisBurstData_file.RasterYSize, \
-    #      'x',thisBurstData_file.RasterCount
-    #print 'Projection is ',thisBurstData_file.GetProjection()
+    #print('Driver: ', thisBurstData_file.GetDriver().ShortName,'/', \
+    #      thisBurstData_file.GetDriver().LongName)
+    #print('Size is ',thisBurstData_file.RasterXSize,'x',thisBurstData_file.RasterYSize, \
+    #      'x',thisBurstData_file.RasterCount)
+    #print('Projection is ',thisBurstData_file.GetProjection())
     geotransform = thisBurstData_file.GetGeoTransform()
     #if not geotransform is None:
-    #    print 'Origin = (',geotransform[0], ',',geotransform[3],')'
-    #    print 'Pixel Size = (',geotransform[1], ',',geotransform[5],')'
+    #    print('Origin = (',geotransform[0], ',',geotransform[3],')')
+    #    print('Pixel Size = (',geotransform[1], ',',geotransform[5],')')
 
     cint_srd=thisBurstData_file.GetRasterBand(1)
-    #print 'Band Type=',gdal.GetDataTypeName(cint_srd.DataType)
+    #print('Band Type=',gdal.GetDataTypeName(cint_srd.DataType))
 
     if cint_srd.GetOverviewCount() > 0:
-            print 'Band has ', cint_srd.GetOverviewCount(), ' overviews.'
+            print('Band has ', cint_srd.GetOverviewCount(), ' overviews.')
     thisBurstData= cint_srd.ReadAsArray(int(Pixels_start-1),int(line_start-1),nofPixels1,nofLines1)
     return thisBurstData
 ###############################################################################
@@ -135,8 +135,8 @@ else:#original data
 # Image size
 Nlines = lN-l0+1;
 Npixels = pN-p0+1;
-print "Nlines =",Nlines 
-print "Npixels =",Npixels
+print("Nlines =",Nlines)
+print("Npixels =",Npixels)
 
 
 Ngrid = float(Nwin)/NwinGrid;
@@ -180,7 +180,7 @@ outStream.close()
 if (os.path.exists(Link_CINT_SRD)):
     os.remove(Link_CINT_SRD)
 RAW_CINT_SRD_ABSOLUTE_PATH=os.path.abspath(RAW_CINT_SRD)
-print "RAW_CINT_SRD_ABSOLUTE_PATH=", RAW_CINT_SRD_ABSOLUTE_PATH
+print("RAW_CINT_SRD_ABSOLUTE_PATH=", RAW_CINT_SRD_ABSOLUTE_PATH)
 os.symlink(RAW_CINT_SRD_ABSOLUTE_PATH,Link_CINT_SRD)
 
 
@@ -211,8 +211,8 @@ for v in range(1,int(Ngrid_az)+1):
 fidRes        = open(outFile,'w')
 cols = winpos.shape[1]
 rows = winpos.shape[0]        
-#print "cols = ",cols
-print "rows = ", rows
+#print("cols = ",cols)
+print("rows = ", rows)
 for i_temp in range(0,rows):       
     fidRes.write( '%d   %d\n' % (winpos[i_temp,0]+1,winpos[i_temp,1]+1))      
 fidRes.close()
diff --git a/doris_stack/functions/load_shape_unzip.py b/doris_stack/functions/load_shape_unzip.py
index e192d56..5f60c24 100755
--- a/doris_stack/functions/load_shape_unzip.py
+++ b/doris_stack/functions/load_shape_unzip.py
@@ -170,7 +170,7 @@ def load_shape(shapefile, buffer=0.02):
         if isinstance(shapefile, list):  # If the coordinates are already loaded. (for example bounding box)
             shp = Polygon(shapefile)
         else:  # It should be a shape file. We always select the first shape.
-            sh = fiona.open(shapefile).next()
+            sh = next(fiona.open(shapefile))
             shp = shape(sh['geometry'])
 
         # Now we have the shape we add a buffer and simplify first to save computation time.
diff --git a/doris_stack/functions/orbit_coordinates.py b/doris_stack/functions/orbit_coordinates.py
index 1a3e598..e7278fa 100755
--- a/doris_stack/functions/orbit_coordinates.py
+++ b/doris_stack/functions/orbit_coordinates.py
@@ -181,7 +181,7 @@ def intrp_orbit(line,container,burst_number):
     acc_x = np.kron(np.ones(len(container['orbitTime'])),np.polyder(np.polyder(coef_x)))
     acc_y = np.kron(np.ones(len(container['orbitTime'])),np.polyder(np.polyder(coef_y)))
     acc_z = np.kron(np.ones(len(container['orbitTime'])),np.polyder(np.polyder(coef_z)))
-    #print 'acc_x.shape=',acc_x.shape
+    #print('acc_x.shape=',acc_x.shape)
 
     # interpolated orbit
     norm_orbit = np.array([orbit_time, orbit_x,orbit_y,orbit_z,vel_x,  vel_y,  vel_z,acc_x,  acc_y,  acc_z]);
@@ -223,4 +223,4 @@ def hms2sec(hmsString,convertFlag='float'):
     elif convertFlag == 'float' :
         return float(secString)
     else:
-        return int(secString)
\ No newline at end of file
+        return int(secString)
diff --git a/doris_stack/functions/precise_read.py b/doris_stack/functions/precise_read.py
index bcb8ffe..9f90367 100755
--- a/doris_stack/functions/precise_read.py
+++ b/doris_stack/functions/precise_read.py
@@ -19,7 +19,7 @@ def orbit_read(input_EOF_FileName):
             from lxml import etree
         except:
             #import xml.etree.ElementTree as etree
-            print 'Failed to load lxml.etree or xml.etree.cElementTree'
+            print('Failed to load lxml.etree or xml.etree.cElementTree')
             sys.exit(1)
 
     inTree = etree.parse(input_EOF_FileName)
@@ -152,4 +152,4 @@ def hms2sec(hmsString,convertFlag='float'):
     elif convertFlag == 'float' :
         return float(secString)
     else:
-        return int(secString)
\ No newline at end of file
+        return int(secString)
diff --git a/doris_stack/functions/resdata.py b/doris_stack/functions/resdata.py
index 507a746..1612fd5 100755
--- a/doris_stack/functions/resdata.py
+++ b/doris_stack/functions/resdata.py
@@ -76,7 +76,7 @@ class ResData(object):
                         temp[name] = [line]
 
                 except:
-                    print 'Error occurred at line: ' + line
+                    print('Error occurred at line: ' + line)
 
     def process_reader(self,processes = ''):
         # This function reads random processes based on standard buildup of processes in res files.
@@ -84,7 +84,7 @@ class ResData(object):
         # If loc is true, it will only return the locations where different processes start.
 
         if not processes:
-            processes = self.process_control.keys()
+            processes = list(self.process_control.keys())
 
         processes.append('leader_datapoints')
         process = ''
@@ -171,7 +171,7 @@ class ResData(object):
                             row += 1
 
                 except:
-                    print 'Error occurred at line: ' + line
+                    print('Error occurred at line: ' + line)
 
     def process_spacing(self,process=''):
 
@@ -195,7 +195,7 @@ class ResData(object):
     def del_process(self,process=''):
         # function deletes one or multiple processes from the corresponding res file
 
-        if isinstance(process, basestring): # one process
+        if isinstance(process, str): # one process
             if not process in self.process_control.keys():
                 warnings.warn('The requested process does not exist! (or processes are not read jet, use self.process_reader): ' + str(process))
                 return
@@ -208,7 +208,7 @@ class ResData(object):
             warnings.warn('process should contain either a string of one process or a list of multiple processes: ' + str(process))
 
         # Now remove the process and write the file again.
-        if isinstance(process, basestring): # Only one process should be removed
+        if isinstance(process, str): # Only one process should be removed
             self.process_control[process] = '0'
             del self.processes[process]
         else:
@@ -283,10 +283,10 @@ class ResData(object):
                 elif process == 'coarse_orbits':  # the coarse orbits output is different from the others.
                     if 'Control point' in line_key: # Special case coarse orbits...
                         f.write((line_key + ' =').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
-                    elif not isinstance(data[line_key], basestring): # Another special case
+                    elif not isinstance(data[line_key], str): # Another special case
                         f.write(line_key.ljust(spacing_row[0]) + (data[line_key][0]).ljust(spacing_row[1]) +
                                 data[line_key][1].ljust(spacing_row[2]) + ' '.join(data[line_key][2:]) + '\n')
-                    elif isinstance(data[line_key], basestring): # Handle as in normal cases
+                    elif isinstance(data[line_key], str): # Handle as in normal cases
                         f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
                 else: # If it consists out of two parts
                     f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
@@ -301,7 +301,7 @@ class ResData(object):
 
     def insert(self,data,process,variable=''):
         # This function inserts a variable or a process which does not exist at the moment
-        processes = self.process_control.keys()
+        processes = list(self.process_control.keys())
         processes.extend(['header','leader_datapoints'])
 
         if process not in processes:
@@ -328,7 +328,7 @@ class ResData(object):
 
     def delete(self,process,variable=''):
         # This function deletes a variable or a process which does exist at the moment
-        processes = self.process_control.keys()
+        processes = list(self.process_control.keys())
         processes.extend(['header','leader_datapoints'])
 
         if process not in processes:
@@ -401,4 +401,4 @@ class ResData(object):
                 warnings.warn('This variable does not exist: ' + str(variable))
                 return
 
-        return data
\ No newline at end of file
+        return data
diff --git a/doris_stack/functions/sentinel_dump_data_function.py b/doris_stack/functions/sentinel_dump_data_function.py
index b4b1688..0f09c93 100755
--- a/doris_stack/functions/sentinel_dump_data_function.py
+++ b/doris_stack/functions/sentinel_dump_data_function.py
@@ -7,7 +7,7 @@ if __name__ == "__main__":
     print(folder)
     sys.path.extend([folder])
 
-import resdata as resdata
+from . import resdata as resdata
 
 
 def dump_data(input_file,res_file, output_file='', coordinates=[]):
@@ -22,7 +22,7 @@ def dump_data(input_file,res_file, output_file='', coordinates=[]):
 
     if not coordinates:
         if res_vars.process_control['crop'] == '0':
-            print 'There is no information available about how to crop this file!'
+            print('There is no information available about how to crop this file!')
             return
         else:
             outputWinFirstPix = int(res_vars.processes['crop']['First_pixel (w.r.t. original_image)'])
@@ -58,7 +58,7 @@ def dump_data(input_file,res_file, output_file='', coordinates=[]):
 
     failure = os.system(cmd)
     if failure:
-        print '%s: running %s failed' % (sys.argv[0],cmd)
+        print('%s: running %s failed' % (sys.argv[0],cmd))
         sys.exit(1)
     else:
         os.rename(os.path.splitext(output_file)[0]+'.j00',output_file)
diff --git a/doris_stack/functions/stack_cleanup.py b/doris_stack/functions/stack_cleanup.py
index 3e1b4d9..7482796 100755
--- a/doris_stack/functions/stack_cleanup.py
+++ b/doris_stack/functions/stack_cleanup.py
@@ -89,8 +89,8 @@ def cleanup(stack_folder, cleanup_ps=True, cleanup_ds=False, full_swath_rm=[], f
     burst_endings = {'b_folder': '', 'b_raw': '.raw', 'b_ras': '.ras', 'b_res': '.res'}
 
     # Finally, make a list of which endings should be deleted
-    swath_remove = [dat for key, dat in swath_endings.iteritems() if swath_clean[key]]
-    burst_remove = [dat for key, dat in burst_endings.iteritems() if burst_clean[key]]
+    swath_remove = [dat for key, dat in swath_endings.items() if swath_clean[key]]
+    burst_remove = [dat for key, dat in burst_endings.items() if burst_clean[key]]
 
     # Check the total ifgs in the stack
     swath_folders = scan_stack(stack_folder)
@@ -118,10 +118,10 @@ def scan_stack(stack_folder):
     # exist.
 
     swath_folders = []
-    root, dirs, files = os.walk(stack_folder).next()
+    root, dirs, files = next(os.walk(stack_folder))
 
     for folder in dirs:
-        r, folders, files = os.walk(os.path.join(root, folder)).next()
+        r, folders, files = next(os.walk(os.path.join(root, folder)))
 
         if 'swath_1' in folders and 'master.res' in files and 'ifgs.res' in files and 'cint.raw' in files:
             swath_folders.append(os.path.join(root, folder))
@@ -133,7 +133,7 @@ def remove_burst_folders(swath_folder, remove):
     # Remove all burst folders from swath folder
 
     folder_names = []
-    root, dirs, files = os.walk(swath_folder).next()
+    root, dirs, files = next(os.walk(swath_folder))
 
     for folder in dirs:
         if folder.startswith('swath'):
@@ -148,7 +148,7 @@ def remove_file(swath_folder, file_endings, remove):
     # Remove the files in the main folder.
 
     file_names = []
-    root, dirs, files = os.walk(swath_folder).next()
+    root, dirs, files = next(os.walk(swath_folder))
 
     for filename in files:
         for end in file_endings:
@@ -164,16 +164,16 @@ def remove_burst_files(swath_folder, file_endings, remove):
     # Remove the files in the burst folders.
 
     file_names = []
-    root1, swaths, files = os.walk(swath_folder).next()
+    root1, swaths, files = next(os.walk(swath_folder))
 
     if len(swaths) == 0:
         'Files seems to be deleted already!'
         return file_names
 
     for swath in swaths:
-        root2, bursts, files = os.walk(os.path.join(root1, swath)).next()
+        root2, bursts, files = next(os.walk(os.path.join(root1, swath)))
         for burst in bursts:
-            root3, burst_fold, files = os.walk(os.path.join(root2, burst)).next()
+            root3, burst_fold, files = next(os.walk(os.path.join(root2, burst)))
             for filename in files:
                 for end in file_endings:
                     if filename.endswith(end) and remove:
diff --git a/doris_stack/functions/xml_query.py b/doris_stack/functions/xml_query.py
index bffe924..d33af13 100755
--- a/doris_stack/functions/xml_query.py
+++ b/doris_stack/functions/xml_query.py
@@ -13,7 +13,7 @@ def xml_query(input_xml):
             from lxml import etree
         except:
             #import xml.etree.ElementTree as etree
-            print 'Failed to load lxml.etree or xml.etree.cElementTree'
+            print('Failed to load lxml.etree or xml.etree.cElementTree')
             sys.exit(1)
 
     inTree = etree.parse(input_xml)
diff --git a/doris_stack/main_code/doris_parameters.py b/doris_stack/main_code/doris_parameters.py
index 266d7aa..59b5d92 100644
--- a/doris_stack/main_code/doris_parameters.py
+++ b/doris_stack/main_code/doris_parameters.py
@@ -1,6 +1,6 @@
 from datetime import datetime
 import os
-from doris_config import DorisConfig
+from .doris_config import DorisConfig
 import xml.etree.ElementTree as ET
 
 
@@ -81,28 +81,28 @@ class DorisParameters():
         # Print parameters, check if paths exist
         #
 
-        print 'self.shape_dat: ' + self.shape_dat
+        print('self.shape_dat: ' + self.shape_dat)
         # self._check_path_exists(self.shape_dat)
-        print 'self.track_dir:	' + self.track_dir
+        print('self.track_dir:	' + self.track_dir)
         self._check_path_exists(self.track_dir)
-        print 'self.stack_path:	' + self.stack_path
+        print('self.stack_path:	' + self.stack_path)
         # self._check_path_exists(self.stack_path)
-        print 'self.precise_orbits:	' + self.precise_orbits
+        print('self.precise_orbits:	' + self.precise_orbits)
         self._check_path_exists(self.precise_orbits)
-        print 'self.input_files:	' + self.input_files
+        print('self.input_files:	' + self.input_files)
         # self._check_path_exists(self.input_files)
-#        print 'self.main_code_folder:	' + self.main_code_folder
+#        print('self.main_code_folder:	' + self.main_code_folder)
 #        self._check_path_exists(self.main_code_folder)
-#        print 'self.script_folder:	' + self.script_folder
+#        print('self.script_folder:	' + self.script_folder)
 #        self._check_path_exists(self.script_folder)
-        print 'self.nr_of_jobs:	' + str(self.nr_of_jobs)
-        print 'self.initialize_flag:	' + str(self.initialize_flag)
-        print 'self.jobHandlerScript:	' + self.job_handler_script
+        print('self.nr_of_jobs:	' + str(self.nr_of_jobs))
+        print('self.initialize_flag:	' + str(self.initialize_flag))
+        print('self.jobHandlerScript:	' + self.job_handler_script)
         self._check_path_exists(self.job_handler_script)
 
     def _check_path_exists(self, path):
         if not(os.path.exists(path)):
-            print 'Error Doris_Parameters: path ' + path + ' does not exist'
+            print('Error Doris_Parameters: path ' + path + ' does not exist')
             
     def _settings_get(self, string):
         return self.settings.find('*/' + string).text
diff --git a/doris_stack/main_code/doris_parameters_path.py b/doris_stack/main_code/doris_parameters_path.py
index 782536a..9b87644 100644
--- a/doris_stack/main_code/doris_parameters_path.py
+++ b/doris_stack/main_code/doris_parameters_path.py
@@ -7,6 +7,6 @@ class DorisParameters_Path(object):
     def set(self, doris_parameters_path):
             if(os.path.exists(doris_parameters_path)):
                 sys.path.append(os.path.split(doris_parameters_path)[0])
-                print 'dorisparameter path: ' + doris_parameters_path
+                print('dorisparameter path: ' + doris_parameters_path)
             else:
-                print 'dorisparameter path: ' + doris_parameters_path + ' not a valid path'
+                print('dorisparameter path: ' + doris_parameters_path + ' not a valid path')
diff --git a/doris_stack/main_code/doris_sentinel_1.py b/doris_stack/main_code/doris_sentinel_1.py
index a94a438..01cbca0 100644
--- a/doris_stack/main_code/doris_sentinel_1.py
+++ b/doris_stack/main_code/doris_sentinel_1.py
@@ -14,7 +14,7 @@ class DorisSentinel1(object):
 
     def run(self, doris_parameters_path, start_date, end_date, master_date):
 
-        print 'start sentinel 1 processing'
+        print('start sentinel 1 processing')
 
         #Set your input variables here. You should use absolute paths.
         dorisParameters = DorisParameters(doris_parameters_path)
@@ -75,7 +75,7 @@ class DorisSentinel1(object):
         # Finally delete unzipped images
         stack.del_unpacked_image()
 
-        import single_master_stack
+        from . import single_master_stack
 
         # Now we import the script to create a single master interferogram
         processing = SingleMaster(master_date=master_date, start_date=start_date,
@@ -175,5 +175,5 @@ class DorisSentinel1(object):
 
         profile.log_time_stamp('end')
 
-        print 'end sentinel 1 processing'
+        print('end sentinel 1 processing')
 
diff --git a/doris_stack/main_code/dorisparameters.py b/doris_stack/main_code/dorisparameters.py
index ba8cadc..339ee91 100644
--- a/doris_stack/main_code/dorisparameters.py
+++ b/doris_stack/main_code/dorisparameters.py
@@ -88,27 +88,27 @@ class DorisParameters():
         # Print parameters, check if paths exist
         #
 
-        print 'self.shape_dat: ' + self.shape_dat
+        print('self.shape_dat: ' + self.shape_dat)
         # self._check_path_exists(self.shape_dat)
-        print 'self.track_dir:	' + self.track_dir
+        print('self.track_dir:	' + self.track_dir)
         self._check_path_exists(self.track_dir)
-        print 'self.stack_path:	' + self.stack_path
+        print('self.stack_path:	' + self.stack_path)
         # self._check_path_exists(self.stack_path)
-        print 'self.precise_orbits:	' + self.precise_orbits
+        print('self.precise_orbits:	' + self.precise_orbits)
         self._check_path_exists(self.precise_orbits)
-        print 'self.input_files:	' + self.input_files
+        print('self.input_files:	' + self.input_files)
         # self._check_path_exists(self.input_files)
-#        print 'self.main_code_folder:	' + self.main_code_folder
+#        print('self.main_code_folder:	' + self.main_code_folder)
 #        self._check_path_exists(self.main_code_folder)
-#        print 'self.script_folder:	' + self.script_folder
+#        print('self.script_folder:	' + self.script_folder)
 #        self._check_path_exists(self.script_folder)
-        print 'self.nr_of_jobs:	' + str(self.nr_of_jobs)
-        print 'self.jobHandlerScript:	' + self.job_handler_script
+        print('self.nr_of_jobs:	' + str(self.nr_of_jobs))
+        print('self.jobHandlerScript:	' + self.job_handler_script)
         self._check_path_exists(self.job_handler_script)
 
     def _check_path_exists(self, path):
         if not(os.path.exists(path)):
-            print 'Error Doris_Parameters: path ' + path + ' does not exist'
+            print('Error Doris_Parameters: path ' + path + ' does not exist')
             
     def _settings_get(self, string):
         return self.settings.find('*/' + string).text
diff --git a/doris_stack/main_code/jobs.py b/doris_stack/main_code/jobs.py
index 828fab9..bdb74ef 100644
--- a/doris_stack/main_code/jobs.py
+++ b/doris_stack/main_code/jobs.py
@@ -109,11 +109,11 @@ class Jobs(object):
         self._start_jobs()
         while len(self.jobs_active):
             if(self.verbose):
-                print time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs busy"
+                print(time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs busy")
             time.sleep(self.between_sleep_time)
             self._check_active_jobs()
             self._start_jobs()
         if (self.verbose):
-            print time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs finished"
+            print(time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs finished")
         time.sleep(self.end_sleep_time)
         self._cleanup_flag_dir()
diff --git a/doris_stack/main_code/resdata.py b/doris_stack/main_code/resdata.py
index cd28c96..38a321d 100644
--- a/doris_stack/main_code/resdata.py
+++ b/doris_stack/main_code/resdata.py
@@ -76,7 +76,7 @@ class ResData(object):
                         temp[name] = [line]
 
                 except:
-                    print 'Error occurred at line: ' + line
+                    print('Error occurred at line: ' + line)
 
     def process_reader(self,processes = ''):
         # This function reads random processes based on standard buildup of processes in res files.
@@ -84,7 +84,7 @@ class ResData(object):
         # If loc is true, it will only return the locations where different processes start.
 
         if not processes:
-            processes = self.process_control.keys()
+            processes = list(self.process_control.keys())
 
         processes.append('leader_datapoints')
         process = ''
@@ -171,7 +171,7 @@ class ResData(object):
                             row += 1
 
                 except:
-                    print 'Error occurred at line: ' + line
+                    print('Error occurred at line: ' + line)
 
     def process_spacing(self,process=''):
 
@@ -195,7 +195,7 @@ class ResData(object):
     def del_process(self,process=''):
         # function deletes one or multiple processes from the corresponding res file
 
-        if isinstance(process, basestring): # one process
+        if isinstance(process, str): # one process
             if not process in self.process_control.keys():
                 warnings.warn('The requested process does not exist! (or processes are not read jet, use self.process_reader): ' + str(process))
                 return
@@ -208,7 +208,7 @@ class ResData(object):
             warnings.warn('process should contain either a string of one process or a list of multiple processes: ' + str(process))
 
         # Now remove the process and write the file again.
-        if isinstance(process, basestring): # Only one process should be removed
+        if isinstance(process, str): # Only one process should be removed
             self.process_control[process] = '0'
             del self.processes[process]
         else:
@@ -283,10 +283,10 @@ class ResData(object):
                 elif process == 'coarse_orbits':  # the coarse orbits output is different from the others.
                     if 'Control point' in line_key: # Special case coarse orbits...
                         f.write((line_key + ' =').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
-                    elif not isinstance(data[line_key], basestring): # Another special case
+                    elif not isinstance(data[line_key], str): # Another special case
                         f.write(line_key.ljust(spacing_row[0]) + (data[line_key][0]).ljust(spacing_row[1]) +
                                 data[line_key][1].ljust(spacing_row[2]) + ' '.join(data[line_key][2:]) + '\n')
-                    elif isinstance(data[line_key], basestring): # Handle as in normal cases
+                    elif isinstance(data[line_key], str): # Handle as in normal cases
                         f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
                 else: # If it consists out of two parts
                     f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
@@ -301,7 +301,7 @@ class ResData(object):
 
     def insert(self,data,process,variable=''):
         # This function inserts a variable or a process which does not exist at the moment
-        processes = self.process_control.keys()
+        processes = list(self.process_control.keys())
         processes.extend(['header','leader_datapoints'])
 
         if process not in processes:
@@ -328,7 +328,7 @@ class ResData(object):
 
     def delete(self,process,variable=''):
         # This function deletes a variable or a process which does exist at the moment
-        processes = self.process_control.keys()
+        processes = list(self.process_control.keys())
         processes.extend(['header','leader_datapoints'])
 
         if process not in processes:
@@ -355,7 +355,7 @@ class ResData(object):
 
     def update(self,data,process,variable=''):
         # This function updates a variable or a process which does exist at the moment
-        processes = self.process_control.keys()
+        processes = list(self.process_control.keys())
         processes.extend(['header','leader_datapoints'])
 
         if not process in processes:
@@ -379,7 +379,7 @@ class ResData(object):
 
     def request(self,process,variable=''):
         # This function updates a variable or a process which does exist at the moment
-        processes = self.process_control.keys()
+        processes = list(self.process_control.keys())
         processes.extend(['header','leader_datapoints'])
 
         if not process in processes:
@@ -401,4 +401,4 @@ class ResData(object):
                 warnings.warn('This variable does not exist: ' + str(variable))
                 return
 
-        return data
\ No newline at end of file
+        return data
diff --git a/doris_stack/main_code/single_master_stack.py b/doris_stack/main_code/single_master_stack.py
index 11f49bc..368515b 100644
--- a/doris_stack/main_code/single_master_stack.py
+++ b/doris_stack/main_code/single_master_stack.py
@@ -7,7 +7,7 @@ from copy import deepcopy
 from doris.doris_stack.main_code.resdata import ResData
 from doris.doris_stack.main_code.dorisparameters import DorisParameters
 import collections
-from jobs import Jobs
+from .jobs import Jobs
 from doris.doris_stack.functions.baselines import baselines
 
 
@@ -122,7 +122,7 @@ class SingleMaster(object):
         self.master_key = self.master_date[:4] + self.master_date[5:7] + self.master_date[8:10]
 
         if not master_date in self.stack.keys():
-            print 'Master date is not part of the datastack. If you do not need to initialize anymore this is not a problem.'
+            print('Master date is not part of the datastack. If you do not need to initialize anymore this is not a problem.')
 
     def baseline(self):
         # Create baseline plot of datastack. Usefull to select the right master
@@ -174,12 +174,12 @@ class SingleMaster(object):
     def create_full_swath(self):
         # Create folders with full swath for individual interferogram.
 
-        dates = self.stack.keys()
+        dates = list(self.stack.keys())
 
         # Change the res files.
         for date in dates:
             print(date)
-            bursts = self.stack[date].keys()
+            bursts = list(self.stack[date].keys())
 
             if 'slave' in self.full_swath[date].keys() and 'master' in self.full_swath[date].keys():
                 continue
@@ -261,7 +261,7 @@ class SingleMaster(object):
         job_list1 = []
         job_list2 = []
 
-        bursts = self.stack[dates[0]].keys()
+        bursts = list(self.stack[dates[0]].keys())
 
         for date in dates:
             for burst in bursts:
@@ -333,7 +333,7 @@ class SingleMaster(object):
 
         for date in self.coreg_dates:
 
-            bursts = self.stack[date].keys()
+            bursts = list(self.stack[date].keys())
             real_trans_p = []
             real_trans_l = []
             crop_shift_p = []
@@ -392,7 +392,7 @@ class SingleMaster(object):
         job_list2 = []
 
         # Deramp slaves
-        bursts = self.stack[self.coreg_dates[0]].keys()
+        bursts = list(self.stack[self.coreg_dates[0]].keys())
 
         for date in self.coreg_dates:
             for burst in bursts:
@@ -499,7 +499,7 @@ class SingleMaster(object):
         for date in self.coreg_dates:
             # We start by adding the windows of the first burst.
             no_offset = 0
-            bursts = self.stack[date].keys()
+            bursts = list(self.stack[date].keys())
             new_icc = copy.deepcopy(self.stack[date][bursts[0]]['ifgs'].processes['fine_coreg'])
 
             im_trans_p = self.full_swath[date]['ifgs'].processes['coarse_orbits']['Coarse_orbits_translation_pixels']
@@ -811,7 +811,7 @@ class SingleMaster(object):
 
         for date in self.coreg_dates:
 
-            bursts = self.stack[date].keys()
+            bursts = list(self.stack[date].keys())
 
             res_dem = deepcopy(self.stack[date][bursts[0]]['ifgs'].processes['dem_assist'])
             master_crop = deepcopy(self.full_swath[date]['master'].processes['crop'])
@@ -890,7 +890,7 @@ class SingleMaster(object):
             return
         self.read_res(dates=self.coreg_dates)
 
-        bursts = self.stack[self.coreg_dates[0]].keys()
+        bursts = list(self.stack[self.coreg_dates[0]].keys())
 
         jobList1 = []
 
@@ -938,8 +938,8 @@ class SingleMaster(object):
         # - add the master original and deramp step same as the slave file.
 
         date = self.master_date
-        date_1 = self.stack.keys()[0]
-        bursts = self.stack[date_1].keys()
+        date_1 = list(self.stack.keys())[0]
+        bursts = list(self.stack[date_1].keys())
         burst_res = dict()
         image_res = dict()
         self.read_res(dates=[self.master_date], bursts=bursts, burst_stack=burst_res, image_stack=image_res)
@@ -1004,8 +1004,8 @@ class SingleMaster(object):
             return
 
         date = self.master_date
-        date_1 = self.stack.keys()[0]
-        bursts = self.stack[date_1].keys()
+        date_1 = list(self.stack.keys())[0]
+        bursts = list(self.stack[date_1].keys())
         burst_res = dict()
         image_res = dict()
         self.read_res()  # Read the information from other steps first.
@@ -1186,7 +1186,7 @@ class SingleMaster(object):
                     pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
                     pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
 
-                    burst = self.stack[date].keys()[0]
+                    burst = list(self.stack[date].keys())[0]
                     res = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['interfero'])
 
                     res['First_line (w.r.t. original_master)'] = line_0
@@ -1230,7 +1230,7 @@ class SingleMaster(object):
             # First make a list of all min max coordinates of all bursts.
 
             x0=[]; x1=[]; y0=[]; y1=[]
-            bursts = self.stack[date].keys()
+            bursts = list(self.stack[date].keys())
             for burst in bursts:
                 y0.append(int(self.stack[date][burst][type].processes['readfiles']['First_line (w.r.t. output_image)']))
                 y1.append(int(self.stack[date][burst][type].processes['readfiles']['Last_line (w.r.t. output_image)']))
@@ -1238,7 +1238,7 @@ class SingleMaster(object):
                 x1.append(int(self.stack[date][burst][type].processes['readfiles']['Last_pixel (w.r.t. output_image)']))
 
             for b1 in range(len(bursts)):
-                print 'hello'
+                print('hello')
 
     def esd(self, esd_type='ps', max_baseline='200'):
 
@@ -1248,8 +1248,8 @@ class SingleMaster(object):
 
         jobList = []
         # First run all the ESD calculations in parallel
-        for date in [self.stack.keys()[0]]:
-            bursts = self.stack[date].keys()
+        for date in [list(self.stack.keys())[0]]:
+            bursts = list(self.stack[date].keys())
             sort_id = [int(dat[6]) * 100 + int(dat[14:]) for dat in bursts]
             bursts = [x for (y, x) in sorted(zip(sort_id, bursts))]
 
@@ -1298,7 +1298,7 @@ class SingleMaster(object):
     def network_esd(self, esd_type='ps', var_calc=False):
         # This function calculates the ESD values using a network approach
 
-        dates = (self.stack.keys())
+        dates = (list(self.stack.keys()))
         dates.append(self.master_date)
         dates = sorted(dates)
 
@@ -1328,18 +1328,18 @@ class SingleMaster(object):
 
         # Find the master date
         master_num = dates.index(self.master_date)
-        slave_nums = range(len(dates))
+        slave_nums = list(range(len(dates)))
         slave_nums.remove(master_num)
 
         # Create the A matrix
         A = np.zeros(shape=(len(m_s[0]), np.max([np.max(m_s[0]), np.max(m_s[1])]) + 1))
-        A[range(len(m_s[0])), m_s[0]] = 1
-        A[range(len(m_s[0])), m_s[1]] = -1
+        A[list(range(len(m_s[0]))), m_s[0]] = 1
+        A[list(range(len(m_s[0]))), m_s[1]] = -1
         A = np.hstack((A[:, :master_num], A[:, master_num + 1:]))
 
         # Create the weight matrix
         W = np.zeros((len(m_s[0]), len(m_s[0])))
-        id = range(len(m_s[0]))
+        id = list(range(len(m_s[0])))
 
         W[id, id] = 1 / weight
         W = np.linalg.inv(W)
@@ -1408,7 +1408,7 @@ class SingleMaster(object):
                 path = self.image_path(date)
                 os.chdir(path)
 
-                burst = self.stack[date].keys()[0]
+                burst = list(self.stack[date].keys())[0]
                 slave_res = copy.deepcopy(self.stack[date][burst]['slave'].processes['resample'])
 
                 # Read number of lines
@@ -1444,8 +1444,8 @@ class SingleMaster(object):
         # symbolic links.
 
         date = self.master_date
-        date_1 = self.stack.keys()[0]
-        bursts = self.stack[date_1].keys()
+        date_1 = list(self.stack.keys())[0]
+        bursts = list(self.stack[date_1].keys())
         burst_res = dict()
         image_res = dict()
 
@@ -1461,7 +1461,7 @@ class SingleMaster(object):
         os.chdir(path)
 
         if image_res[date]['slave'].process_control != '1':
-            burst = burst_res[date].keys()[0]
+            burst = list(burst_res[date].keys())[0]
             slave_res = copy.deepcopy(burst_res[date][burst]['slave'].processes['resample'])
 
             # Read number of lines
@@ -1557,7 +1557,7 @@ class SingleMaster(object):
                     pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
                     pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
 
-                    burst = self.stack[date].keys()[0]
+                    burst = list(self.stack[date].keys())[0]
                     res_1 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['comp_refphase'])
                     res_2 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['subtr_refphase'])
 
@@ -1663,7 +1663,7 @@ class SingleMaster(object):
                     pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
                     pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
 
-                    burst = self.stack[date].keys()[0]
+                    burst = list(self.stack[date].keys())[0]
                     res_1 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['comp_refdem'])
                     res_2 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['subtr_refdem'])
 
@@ -1748,7 +1748,7 @@ class SingleMaster(object):
                     pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
                     pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
 
-                    burst = self.stack[date].keys()[0]
+                    burst = list(self.stack[date].keys())[0]
                     res = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['coherence'])
 
                     res['First_line (w.r.t. original_master)'] = line_0
@@ -1813,7 +1813,7 @@ class SingleMaster(object):
                     pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
                     pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
 
-                    burst = self.stack[date].keys()[0]
+                    burst = list(self.stack[date].keys())[0]
                     res = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['filtphase'])
 
                     res['First_line (w.r.t. original_master)'] = line_0
@@ -1858,7 +1858,7 @@ class SingleMaster(object):
 
             # First create an phase input file for unwrapping
             pixels = self.full_swath[date]['ifgs'].processes['filtphase']['Number of pixels (multilooked)']
-            print pixels
+            print(pixels)
             pha = ' -w ' + pixels + ' -q phase -o float -M 1/1 -f cr4 -l1 ' \
                                     '-p1 -P' + pixels + ' cint_filt_ml.raw > unwrap_input.raw'
             os.system(self.cpxfiddle + pha)
@@ -1879,8 +1879,8 @@ class SingleMaster(object):
 
         # choose date closest to master as reference
         date = self.master_date
-        date_1 = self.stack.keys()[0]
-        bursts = self.stack[date_1].keys()
+        date_1 = list(self.stack.keys())[0]
+        bursts = list(self.stack[date_1].keys())
         burst_res = dict()
         image_res = dict()
 
@@ -2020,7 +2020,7 @@ class SingleMaster(object):
         # This function also accepts cpxint16 datatype
 
         if not dates:
-            dates = self.stack.keys()
+            dates = list(self.stack.keys())
         job_list1 = []
 
         for date in dates:
@@ -2125,7 +2125,7 @@ class SingleMaster(object):
         # Split full swath into different burst products. (to be used for DEM result splitting)
         
         if not dates:
-            dates = self.stack.keys()
+            dates = list(self.stack.keys())
         job_list1 = []
 
         for date in dates:
@@ -2172,7 +2172,7 @@ class SingleMaster(object):
         if stack_folder == False:
             stack_folder = self.stack_folder
         if not dates:
-            dates = stack.keys()
+            dates = list(stack.keys())
 
         paths = []
         for date in dates:
@@ -2200,7 +2200,7 @@ class SingleMaster(object):
         if stack_folder == False:
             stack_folder = self.stack_folder
         if not dates:
-            dates = stack.keys()
+            dates = list(stack.keys())
 
         paths = []
         for date in dates:
@@ -2230,7 +2230,7 @@ class SingleMaster(object):
         if stack_folder == False:
             stack_folder = self.stack_folder
         if not dates:
-            dates = stack.keys()
+            dates = list(stack.keys())
 
         paths = []
         for date in dates:
@@ -2293,14 +2293,14 @@ class SingleMaster(object):
         if not image_stack:
             image_stack = self.full_swath
         if dates == 'default':
-            dates = self.stack.keys()
+            dates = list(self.stack.keys())
         if not stack_folder:
             stack_folder = self.stack_folder
 
         for date in dates:
             for burst in burst_stack[date].keys():
 
-                files = burst_stack[date][burst].keys()
+                files = list(burst_stack[date][burst].keys())
                 if 'slave' in files:
                     slave_res = self.burst_path(date, burst, 'slave.res', stack_folder=stack_folder, full_path=True)
                     burst_stack[date][burst]['slave'].write(new_filename=slave_res)
@@ -2311,7 +2311,7 @@ class SingleMaster(object):
                     ifgs_res = self.burst_path(date,burst,'ifgs.res', stack_folder=stack_folder, full_path=True)
                     burst_stack[date][burst]['ifgs'].write(new_filename=ifgs_res)
 
-            files = image_stack[date].keys()
+            files = list(image_stack[date].keys())
             if 'slave' in files:
                 slave_res = self.image_path(date, 'slave.res', stack_folder=stack_folder)
                 image_stack[date]['slave'].write(new_filename=slave_res)
@@ -2329,15 +2329,15 @@ class SingleMaster(object):
         if image_stack == '':
             image_stack = self.full_swath
         if dates == 'default':
-            dates = self.stack.keys()
+            dates = list(self.stack.keys())
         if not stack_folder:
             stack_folder = self.stack_folder
         if not bursts:
             if dates[0] in burst_stack.keys():
-                bursts = burst_stack[dates[0]].keys()
+                bursts = list(burst_stack[dates[0]].keys())
             else:
-                date_1 = self.stack.keys()[0]
-                bursts = self.stack[date_1].keys()
+                date_1 = list(self.stack.keys())[0]
+                bursts = list(self.stack[date_1].keys())
         # TODO Maybe add search for folders and bursts if no specific date or burst is specified?
 
         for date in dates:
@@ -2375,7 +2375,7 @@ class SingleMaster(object):
     def del_res(self, type='ifgs', images=False, bursts=True, dates='default', stack_folder=''):
 
         if dates == 'default':
-            dates = self.stack.keys()
+            dates = list(self.stack.keys())
 
         for date in dates:
             for burst in self.stack[date].keys():
@@ -2397,7 +2397,7 @@ class SingleMaster(object):
         if not image_stack:
             image_stack = self.full_swath
         if dates == 'default':
-            dates = self.stack.keys()
+            dates = list(self.stack.keys())
 
         self.read_res(dates=dates) # Read data
 
diff --git a/doris_stack/main_code/stack.py b/doris_stack/main_code/stack.py
index f7a2101..71bac02 100644
--- a/doris_stack/main_code/stack.py
+++ b/doris_stack/main_code/stack.py
@@ -12,7 +12,7 @@ import numpy as np
 from shapely.geometry import shape, mapping, box
 from shapely.ops import cascaded_union
 
-import image as image
+from . import image as image
 from doris.doris_stack.functions.load_shape_unzip import extract_kml_preview, shape_im_kml, load_shape
 from doris.doris_stack.main_code.dorisparameters import DorisParameters
 from doris.doris_stack.functions.burst_metadata import center_shape_from_res
@@ -103,7 +103,7 @@ class StackData(object):
             self.end_date = np.datetime64('now').astype('datetime64[s]')
         self.start_date = np.datetime64(start_date).astype('datetime64[s]')
 
-        if isinstance(polarisation, basestring):
+        if isinstance(polarisation, str):
             polarisation = [polarisation]
             for i in polarisation:
                 if not i in ['hh','vv','hv','vh']:
@@ -121,7 +121,7 @@ class StackData(object):
             if os.path.exists(precise_dir):
                 self.precise_orbits = precise_dir
             else:
-                print 'Precise orbit path does not exist'
+                print('Precise orbit path does not exist')
 
     def add_path(self,path):
         # This function adds the output path.
@@ -173,7 +173,7 @@ class StackData(object):
                 track_dir = os.path.join(track_dir, top_dirs[0])
             elif len(top_dirs) > 1:
                 for top_dir in top_dirs:
-                    user_input = raw_input("Do you want to use folder " + top_dir + " as resource folder? (yes/no)").lower()
+                    user_input = input("Do you want to use folder " + top_dir + " as resource folder? (yes/no)").lower()
                     if user_input in ['yes', 'y']:
                         track_dir = os.path.join(track_dir, top_dir)
 
@@ -491,7 +491,7 @@ class StackData(object):
         # coordinates variable
 
         if slaves is True:
-            dates = self.datastack.keys()
+            dates = list(self.datastack.keys())
         else:
             dates = [self.master_date]
 
@@ -507,7 +507,7 @@ class StackData(object):
             for swath in self.datastack[date].keys():
 
                 self.coordinates[date][swath] = OrderedDict()
-                self.coordinates[date][swath]['corners'] = np.zeros([len(self.datastack[date][swath].keys()), 4, 2],dtype='int')
+                self.coordinates[date][swath]['corners'] = np.zeros([len(list(self.datastack[date][swath].keys())), 4, 2],dtype='int')
 
                 b = 0
                 for burst in sorted(self.datastack[date][swath].keys(), key = lambda x: int(x[6:])):
@@ -584,7 +584,7 @@ class StackData(object):
                         read['Number_of_lines_output_image'] = str(max_line)
                         self.datastack[date][swath]['burst_' + str(burst+1)].processes['readfiles'] = read
                     else:
-                        print 'No resfile available, so information is not added to resfile'
+                        print('No resfile available, so information is not added to resfile')
 
     def write_stack(self,write_path='',no_data=False):
         # This function writes the full datastack to a given folder using the dates / swaths / bursts setup. This
diff --git a/doris_stack/main_code/swath.py b/doris_stack/main_code/swath.py
index 6db92e3..5447a91 100644
--- a/doris_stack/main_code/swath.py
+++ b/doris_stack/main_code/swath.py
@@ -58,8 +58,8 @@ class SwathMeta(object):
             data = [os.path.join(path,'measurement',x) for x in data if x[12:14] in pol and x[6] == swath_no]
 
         # Check if the data is there and if the filenames coincide.
-        # print xml + str(len(xml))
-        # print data + str(len(data))
+        # print(xml + str(len(xml)))
+        # print(data + str(len(data)))
 
         if type(xml) is str:
             xml = [xml]
diff --git a/install/init_cfg.py b/install/init_cfg.py
index 29bf004..eb68d4f 100644
--- a/install/init_cfg.py
+++ b/install/init_cfg.py
@@ -13,7 +13,7 @@ def init_cfg():
 
     input = False
     while input == False:
-        user_input = raw_input("Enter the path to doris: ")
+        user_input = input("Enter the path to doris: ")
         if os.path.exists(user_input) and user_input.endswith('doris'):
             settings.find('.doris_path').text = user_input
             input = True
@@ -22,7 +22,7 @@ def init_cfg():
 
     input = False
     while input == False:
-        user_input = raw_input("Enter the path to cpxfiddle: ")
+        user_input = input("Enter the path to cpxfiddle: ")
         if os.path.exists(user_input) and user_input.endswith('cpxfiddle'):
             settings.find('.cpxfiddle_path').text = user_input
             input = True
@@ -31,7 +31,7 @@ def init_cfg():
 
     input = False
     while input == False:
-        user_input = raw_input("Enter the path to snaphu: ")
+        user_input = input("Enter the path to snaphu: ")
         if os.path.exists(user_input) and user_input.endswith('snaphu'):
             settings.find('.snaphu_path').text = user_input
             input = True
@@ -39,25 +39,25 @@ def init_cfg():
             print('The path is incorrect, use another path')
 
     # Now create the password file.
-    user_input = raw_input("Enter your username for scihub (https://scihub.copernicus.eu/dhus/#/self-registration)")
+    user_input = input("Enter your username for scihub (https://scihub.copernicus.eu/dhus/#/self-registration)")
     if len(user_input) > 0:
         settings.find('.scihub_username').text = user_input
     else:
         print('Username field is empty, you can change it later in the doris_config.xml file')
 
-    user_input = raw_input("Enter your password for scihub ")
+    user_input = input("Enter your password for scihub ")
     if len(user_input) > 0:
         settings.find('.scihub_password').text = user_input
     else:
         print('Password field is empty, you can change it later in the doris_config.xml file')
 
-    user_input = raw_input("Enter your username for srtm download (https://urs.earthdata.nasa.gov/users/new/)")
+    user_input = input("Enter your username for srtm download (https://urs.earthdata.nasa.gov/users/new/)")
     if len(user_input) > 0:
         settings.find('.usgs_username').text = user_input
     else:
         print('Username field is empty, you can change it later in the doris_config.xml file')
 
-    user_input = raw_input("Enter your password for srtm download ")
+    user_input = input("Enter your password for srtm download ")
     if len(user_input) > 0:
         settings.find('.usgs_password').text = user_input
     else:
diff --git a/prepare_stack/create_datastack_bash.py b/prepare_stack/create_datastack_bash.py
index 532f25d..3113004 100644
--- a/prepare_stack/create_datastack_bash.py
+++ b/prepare_stack/create_datastack_bash.py
@@ -41,7 +41,7 @@ class CreateBash(object):
         f.close()
 
         # make sure the file is executable
-        os.chmod(file_path, 0744)
+        os.chmod(file_path, 0o744)
 
         # Also create a download and dem creation bash script.
         file_path = os.path.join(stack_folder, 'create_dem.sh')
@@ -58,7 +58,7 @@ class CreateBash(object):
         f.close()
 
         # make sure the file is executable
-        os.chmod(file_path, 0744)
+        os.chmod(file_path, 0o744)
 
         file_path = os.path.join(stack_folder, 'download_sentinel.sh')
         f = open(file_path, 'w')
@@ -74,4 +74,4 @@ class CreateBash(object):
         f.close()
 
         # make sure the file is executable
-        os.chmod(file_path, 0744)
\ No newline at end of file
+        os.chmod(file_path, 0o744)
diff --git a/prepare_stack/create_dem.py b/prepare_stack/create_dem.py
index f8334d2..233e730 100644
--- a/prepare_stack/create_dem.py
+++ b/prepare_stack/create_dem.py
@@ -16,7 +16,7 @@ import numpy as np
 import gdal
 import gdalconst
 import osr
-from HTMLParser import HTMLParser
+from html.parser import HTMLParser
 import pickle
 import requests
 import os
@@ -436,7 +436,7 @@ class CreateDem:
             latlim = [min(lat), max(lat)]
             lonlim = [min(lon), max(lon)]
         else:
-            print 'format not recognized! Pleas creat either a .kml or .shp file.'
+            print('format not recognized! Pleas creat either a .kml or .shp file.')
             return []
 
         return latlim, lonlim
@@ -580,9 +580,9 @@ class CreateDem:
 
             conn = requests.get(server + '/' + folder, auth=(username, password))
             if conn.status_code == 200:
-                print "status200 received ok"
+                print("status200 received ok")
             else:
-                print "an error occurred during connection"
+                print("an error occurred during connection")
 
             data = conn.text
             parser = parseHTMLDirectoryListing()
@@ -691,7 +691,7 @@ class CreateDem:
 # https://svn.openstreetmap.org/applications/utils/import/srtm2wayinfo/python/srtm.py
 class parseHTMLDirectoryListing(HTMLParser):
     def __init__(self):
-        # print "parseHTMLDirectoryListing.__init__"
+        # print("parseHTMLDirectoryListing.__init__")
         HTMLParser.__init__(self)
         self.title = "Undefined"
         self.isDirListing = False
@@ -702,7 +702,7 @@ class parseHTMLDirectoryListing(HTMLParser):
         self.currHref = ""
 
     def handle_starttag(self, tag, attrs):
-        # print "Encountered the beginning of a %s tag" % tag
+        # print("Encountered the beginning of a %s tag" % tag)
         if tag == "title":
             self.inTitle = True
         if tag == "a":
@@ -713,7 +713,7 @@ class parseHTMLDirectoryListing(HTMLParser):
                     self.currHref = attr[1]
 
     def handle_endtag(self, tag):
-        # print "Encountered the end of a %s tag" % tag
+        # print("Encountered the end of a %s tag" % tag)
         if tag == "title":
             self.inTitle = False
         if tag == "a":
@@ -727,9 +727,9 @@ class parseHTMLDirectoryListing(HTMLParser):
     def handle_data(self, data):
         if self.inTitle:
             self.title = data
-            print "title=%s" % data
+            print("title=%s" % data)
             if "Index of" in self.title:
-                # print "it is an index!!!!"
+                # print("it is an index!!!!")
                 self.isDirListing = True
         if self.inHyperLink:
             # We do not include parent directory in listing.
diff --git a/prepare_stack/create_doris_input_xml.py b/prepare_stack/create_doris_input_xml.py
index e63462e..6754257 100644
--- a/prepare_stack/create_doris_input_xml.py
+++ b/prepare_stack/create_doris_input_xml.py
@@ -19,7 +19,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("Enter the path to the archive data folder: ")
+            user_input = input("Enter the path to the archive data folder: ")
             if os.path.exists(user_input):
                 self.input_file_dict['sar_data_folder'] = user_input
                 input = True
@@ -28,7 +28,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("Which polarisation do you want to use (vv,hh,vh,hv): ")
+            user_input = input("Which polarisation do you want to use (vv,hh,vh,hv): ")
             if user_input in ['vv', 'hh', 'vh', 'hv']:
                 self.input_file_dict['polarisation'] = user_input
                 input = True
@@ -37,7 +37,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("Which track do you want to work with? (explore on https://scihub.copernicus.eu/dhus/) : ")
+            user_input = input("Which track do you want to work with? (explore on https://scihub.copernicus.eu/dhus/) : ")
             try:
                 input = str(int(user_input)).zfill(3)
                 self.input_file_dict['track'] = user_input
@@ -47,7 +47,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("Is this track ascending or descending? (asc/dsc) : ")
+            user_input = input("Is this track ascending or descending? (asc/dsc) : ")
             if user_input in ['asc', 'dsc']:
                 self.input_file_dict['direction'] = user_input
                 input = True
@@ -56,7 +56,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            self.input_file_dict['datastack_folder'] = raw_input("Enter the path to the folder of new datastack: ")
+            self.input_file_dict['datastack_folder'] = input("Enter the path to the folder of new datastack: ")
             if os.path.exists(self.input_file_dict['datastack_folder']):
                 input = True
             else:
@@ -64,7 +64,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            self.input_file_dict['shape_file_path'] = raw_input("Enter full path to the shapefile: ")
+            self.input_file_dict['shape_file_path'] = input("Enter full path to the shapefile: ")
             if os.path.exists(self.input_file_dict['shape_file_path']) and self.input_file_dict['shape_file_path'].endswith('.shp'):
                 input = True
             else:
@@ -72,7 +72,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("Enter the path to the folder of the orbit files: ")
+            user_input = input("Enter the path to the folder of the orbit files: ")
             if os.path.exists(user_input):
                 self.input_file_dict['orbits_folder'] = user_input
                 input = True
@@ -81,7 +81,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("Do you want to generate the DEM file automaticly (Yes/No): ").lower()
+            user_input = input("Do you want to generate the DEM file automaticly (Yes/No): ").lower()
             if user_input == 'yes' or user_input == 'no':
                 self.input_file_dict['generate_dem'] = user_input
                 input = True
@@ -90,7 +90,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            self.input_file_dict['dem_processing_folder'] = raw_input("Enter path to the dem folder: ")
+            self.input_file_dict['dem_processing_folder'] = input("Enter path to the dem folder: ")
             self.input_file_dict['dem_folder'] = os.path.join(self.input_file_dict['datastack_folder'], 'dem')
             if os.path.exists(self.input_file_dict['dem_processing_folder']):
                 input = True
@@ -99,7 +99,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("Do you want to use parallel computing (Yes/No): ").lower()
+            user_input = input("Do you want to use parallel computing (Yes/No): ").lower()
             if user_input == 'yes' or user_input == 'no':
                 self.input_file_dict['parallel'] = user_input
                 input = True
@@ -107,12 +107,12 @@ class CreateDorisInputXml(object):
                 print('You should use either yes of no')
 
         if user_input == 'yes':
-            nodes = raw_input("How many cores do you want to use: ")
+            nodes = input("How many cores do you want to use: ")
             self.input_file_dict['cores'] = nodes
 
         input = False
         while input == False:
-            user_input = raw_input("What is the start date of your stack in yyyy-mm-dd (can be changed later): ").lower()
+            user_input = input("What is the start date of your stack in yyyy-mm-dd (can be changed later): ").lower()
             try:
                 date = datetime.strptime(user_input, '%Y-%m-%d')
                 self.input_file_dict['start_date'] = user_input
@@ -123,7 +123,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("What is the end date of your stack in yyyy-mm-dd (can be changed later): ").lower()
+            user_input = input("What is the end date of your stack in yyyy-mm-dd (can be changed later): ").lower()
             try:
                 date = datetime.strptime(user_input, '%Y-%m-%d')
                 self.input_file_dict['end_date'] = user_input
@@ -134,7 +134,7 @@ class CreateDorisInputXml(object):
 
         input = False
         while input == False:
-            user_input = raw_input("What is the master date of your stack in yyyy-mm-dd (can be changed later): ").lower()
+            user_input = input("What is the master date of your stack in yyyy-mm-dd (can be changed later): ").lower()
             try:
                 date = datetime.strptime(user_input, '%Y-%m-%d')
                 self.input_file_dict['master_date'] = user_input
diff --git a/prepare_stack/download_sentinel_data_orbits.py b/prepare_stack/download_sentinel_data_orbits.py
index 4d18010..a434ef5 100644
--- a/prepare_stack/download_sentinel_data_orbits.py
+++ b/prepare_stack/download_sentinel_data_orbits.py
@@ -1,8 +1,8 @@
 # This file contains a function to check which files for sentinel are available, which ones are downloaded and a quality
 # check for the files which are downloaded.
 
-import urllib
-import urllib2
+import urllib.request, urllib.parse, urllib.error
+
 import ssl
 import re
 import os, sys
@@ -56,19 +56,19 @@ def sentinel_available(start_day='', end_day='', sensor_mode='', product='', lev
 
     # Finally we do the query to get the search result.
     string = string[5:] + '&rows=1000'
-    url = 'https://scihub.copernicus.eu/dhus/search?q=' + urllib.quote_plus(string)
+    url = 'https://scihub.copernicus.eu/dhus/search?q=' + urllib.parse.quote_plus(string)
     print(url)
 
     print('Requesting available products: ' + url)
-    request = urllib2.Request(url)
+    request = urllib.request.Request(url)
     base64string = base64.b64encode('%s:%s' % (user, password))
     request.add_header("Authorization", "Basic %s" % base64string)
 
     # connect to server. Hopefully this works at once
     try:
-        dat = urllib2.urlopen(request)
+        dat = urllib.request.urlopen(request)
     except:
-        print 'not possible to connect this time'
+        print('not possible to connect this time')
         return [], [], []
 
     html_dat = ''
@@ -115,7 +115,7 @@ def load_shape_info(shapefile):
             st = st + str(p[0]) + ' ' + str(p[1]) + ','
         st = st[:-1] + ')'
     else:
-        print 'format not recognized! Pleas creat either a .kml or .shp file.'
+        print('format not recognized! Pleas creat either a .kml or .shp file.')
         return []
 
     return st
@@ -128,7 +128,7 @@ def sentinel_check_validity(products=[], destination_folder='', user='', passwor
     invalid_files = []
 
     if not products:
-        print 'Nothing to check'
+        print('Nothing to check')
         return
 
     for product in products:
@@ -183,7 +183,7 @@ def sentinel_download(products=[], xml_only=False,  destination_folder='', proje
     # Download the files which are found by the sentinel_available script.
 
     if not products:
-        print 'No files to download'
+        print('No files to download')
         return
 
     wget_base = 'wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --continue --tries=20 --no-check-certificate --user=' + user + ' --password=' + password + ' '
@@ -192,7 +192,7 @@ def sentinel_download(products=[], xml_only=False,  destination_folder='', proje
         date = str(product.findall('date')[1].text)
         date = datetime.datetime.strptime(date[:19], '%Y-%m-%dT%H:%M:%S')
 
-        url = str('"'+product.findall('link')[0].attrib['href'][:-6]+ urllib.quote_plus('$value') +'"')
+        url = str('"'+product.findall('link')[0].attrib['href'][:-6]+ urllib.parse.quote_plus('$value') +'"')
         name = str(product.find('title').text)
 
         track = str(product.find('int[@name="relativeorbitnumber"]').text)
@@ -246,8 +246,8 @@ def sentinel_download(products=[], xml_only=False,  destination_folder='', proje
         kml = "'map-overlay.kml'"
         dat = "'" + name + ".SAFE'"
 
-        preview_url = url[:-10] + '/Nodes(' + dat + ')/Nodes(' + prev + ')/Nodes(' + png + ')/' + urllib.quote_plus('$value') + '"'
-        kml_url = url[:-10] + '/Nodes(' + dat + ')/Nodes(' + prev + ')/Nodes(' + kml + ')/' + urllib.quote_plus('$value') + '"'
+        preview_url = url[:-10] + '/Nodes(' + dat + ')/Nodes(' + prev + ')/Nodes(' + png + ')/' + urllib.parse.quote_plus('$value') + '"'
+        kml_url = url[:-10] + '/Nodes(' + dat + ')/Nodes(' + prev + ')/Nodes(' + kml + ')/' + urllib.parse.quote_plus('$value') + '"'
 
         # Download data files and create symbolic link
         if xml_only == False: # So we also download the file
@@ -288,16 +288,16 @@ def sentinel_download(products=[], xml_only=False,  destination_folder='', proje
 def sentinel_quality_check(filename, uuid, user, password):
     # Check whether the zip files can be unpacked or not. This is part of the download procedure.
 
-    checksum_url = "https://scihub.copernicus.eu/dhus/odata/v1/Products('" + uuid + "')/Checksum/Value/" + urllib.quote_plus('$value')
-    request = urllib2.Request(checksum_url)
+    checksum_url = "https://scihub.copernicus.eu/dhus/odata/v1/Products('" + uuid + "')/Checksum/Value/" + urllib.parse.quote_plus('$value')
+    request = urllib.request.Request(checksum_url)
     base64string = base64.b64encode('%s:%s' % (user, password))
     request.add_header("Authorization", "Basic %s" % base64string)
 
     # connect to server. Hopefully this works at once
     try:
-        dat = urllib2.urlopen(request)
+        dat = urllib.request.urlopen(request)
     except:
-        print 'not possible to connect this time'
+        print('not possible to connect this time')
         return False
 
     html_dat = ''
@@ -336,9 +336,9 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute
             url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/?page=' + str(i + 1)
             gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
             try:
-                page = urllib2.urlopen(url, context=gcontext)
+                page = urllib.request.urlopen(url, context=gcontext)
             except TypeError:
-                page = urllib2.urlopen(url)
+                page = urllib.request.urlopen(url)
 
             html = page.read().split('\n')
             orb_files = []
@@ -360,9 +360,9 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute
                     url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/' + orb
                     if not os.path.exists(filename):
                         try:
-                            urllib.urlretrieve(url, filename, context=gcontext)
+                            urllib.request.urlretrieve(url, filename, context=gcontext)
                         except TypeError:
-                            urllib.urlretrieve(url, filename)
+                            urllib.request.urlretrieve(url, filename)
                         print(orb + ' downloaded')
                     else:
                         print(orb + ' already downloaded')
@@ -387,9 +387,9 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute
                 url = 'https://qc.sentinel1.eo.esa.int/aux_resorb/?page=' + str(i + 1)
                 gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
                 try:
-                    page = urllib2.urlopen(url, context=gcontext)
+                    page = urllib.request.urlopen(url, context=gcontext)
                 except TypeError:
-                    page = urllib2.urlopen(url)
+                    page = urllib.request.urlopen(url)
 
                 html = page.read().split('\n')
                 orb_files = []
@@ -408,9 +408,9 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute
                         url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/' + orb
                         if not os.path.exists(filename):
                             try:
-                                urllib.urlretrieve(url, filename, context=gcontext)
+                                urllib.request.urlretrieve(url, filename, context=gcontext)
                             except TypeError:
-                                urllib.urlretrieve(url, filename)
+                                urllib.request.urlretrieve(url, filename)
                             print(orb + ' downloaded')
                         else:
                             print(orb + ' already downloaded')
diff --git a/prepare_stack/prepare_datastack.py b/prepare_stack/prepare_datastack.py
index e414b80..d71a817 100644
--- a/prepare_stack/prepare_datastack.py
+++ b/prepare_stack/prepare_datastack.py
@@ -11,10 +11,10 @@
 # - the dem source folder where the intermediate DEM data is stored
 
 import os
-from create_dem import CreateDem
-from create_inputfiles import CreateInputFiles
-from create_doris_input_xml import CreateDorisInputXml
-from create_datastack_bash import CreateBash
+from .create_dem import CreateDem
+from .create_inputfiles import CreateInputFiles
+from .create_doris_input_xml import CreateDorisInputXml
+from .create_datastack_bash import CreateBash
 import xml.etree.ElementTree as ET
 
 class PrepareDatastack(object):
diff --git a/prepare_stack/prepare_datastack_main.py b/prepare_stack/prepare_datastack_main.py
index 8b6277e..b1df959 100644
--- a/prepare_stack/prepare_datastack_main.py
+++ b/prepare_stack/prepare_datastack_main.py
@@ -1,5 +1,5 @@
 import argparse
-from prepare_datastack import PrepareDatastack
+from .prepare_datastack import PrepareDatastack
 
 """Doris prepare datastack
 arguments:  --doris_input_file, -i
