1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843
|
##############################################################################
#
# Copyright (c) 2003-2018 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
#
##############################################################################
EnsureSConsVersion(0,98,1)
EnsurePythonVersion(2,5)
import atexit, sys, os, platform, re
from distutils import sysconfig
from dependencies import *
from site_init import *
print(sys.version)
# Version number to check for in options file. Increment when new features are
# added or existing options changed.
REQUIRED_OPTS_VERSION=203
# MS Windows support, many thanks to PH
IS_WINDOWS = (os.name == 'nt')
if IS_WINDOWS:
IS_OSX = False
else:
IS_OSX = (os.uname()[0] == 'Darwin')
########################## Determine options file ############################
# 1. command line
# 2. scons/<hostname>_options.py
# 3. name as part of a cluster
options_file=ARGUMENTS.get('options_file', None)
if not options_file:
ext_dir = os.path.join(os.getcwd(), 'scons')
hostname = platform.node().split('.')[0]
for name in hostname, effectiveName(hostname):
mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
if os.path.isfile(options_file): break
if not os.path.isfile(options_file):
print("\nWARNING:\nOptions file %s" % options_file)
print("not found! Default options will be used which is most likely suboptimal.")
print("We recommend that you copy the most relavent options file in the scons/template/")
print("subdirectory and customize it to your needs.\n")
options_file = None
############################### Build options ################################
default_prefix='/usr'
mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
netcdf_flavours = ('no', 'off', 'none', 'False', # Must be last of the false alternatives
'yes', 'on', 'True', '3', # Must be last of the version 3 alternatives
'4')
all_domains = ['dudley','finley','ripley','speckley']
#Note that scons construction vars the the following purposes:
# CPPFLAGS -> to the preprocessor
# CCFLAGS -> flags for _both_ C and C++
# CXXFLAGS -> flags for c++ _only_
# CFLAGS -> flags for c only
vars = Variables(options_file, ARGUMENTS)
vars.AddVariables(
PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
BoolVariable('verbose', 'Output full compile/link lines', False),
# Compiler/Linker options
('cxx', 'Path to C++ compiler', 'default'),
('cc_flags', 'Base (C and C++) compiler flags', 'default'),
('cc_optim', 'Additional (C and C++) flags for a non-debug build', 'default'),
('cc_debug', 'Additional (C and C++) flags for a debug build', 'default'),
('cxx_extra', 'Extra C++ compiler flags', ''),
('ld_extra', 'Extra linker flags', ''),
BoolVariable('werror','Treat compiler warnings as errors', True),
BoolVariable('debug', 'Compile with debug flags', False),
BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
('omp_flags', 'OpenMP compiler flags', 'default'),
('omp_ldflags', 'OpenMP linker flags', 'default'),
# Mandatory libraries
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
# Mandatory for tests
('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
# Optional libraries and options
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
BoolVariable('use_gmsh', 'Enable gmsh, if available', True),
EnumVariable('netcdf', 'Enable netCDF file support', False, allowed_values=netcdf_flavours),
('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
('netcdf_libs', 'netCDF libraries to link with', 'DEFAULT'),
BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
BoolVariable('mkl', 'Enable the Math Kernel Library', False),
('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
BoolVariable('umfpack', 'Enable UMFPACK', False),
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
BoolVariable('mumps', 'Enable MUMPS', False),
('mumps_prefix', 'Prefix/Paths to MUMPS installation', default_prefix),
('mumps_libs', 'MUMPS libraries to link with', ['mumps_common','pord','dmumps','zmumps',
'mpiseq','lapack','metis','scotch','esmumps','gfortran']),
TristateVariable('lapack', 'Enable LAPACK', 'auto'),
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
('lapack_libs', 'LAPACK libraries to link with', []),
BoolVariable('silo', 'Enable the Silo file format in weipa', False),
('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
BoolVariable('trilinos', 'Enable the Trilinos solvers', False),
('trilinos_prefix', 'Prefix/Paths to Trilinos installation', default_prefix),
('trilinos_libs', 'Trilinos libraries to link with', []),
BoolVariable('visit', 'Enable the VisIt simulation interface', False),
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
('visit_libs', 'VisIt libraries to link with', ['simV2']),
ListVariable('domains', 'Which domains to build', 'all', all_domains),
BoolVariable('paso', 'Build Paso solver library', True),
BoolVariable('weipa', 'Build Weipa data export library', True),
('mathjax_path', 'Path to MathJax.js file', 'default'),
# Advanced settings
('launcher', 'Launcher command (e.g. mpirun)', 'default'),
('prelaunch', 'Command to execute before launcher (e.g. mpdboot)', 'default'),
('postlaunch', 'Command to execute after launcher (e.g. mpdexit)', 'default'),
#dudley_assemble_flags = -funroll-loops to actually do something
('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
# To enable passing function pointers through python
BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
# An option for specifying the compiler tools
('tools_names', 'Compiler tools to use', ['default']),
('env_export', 'Environment variables to be passed to tools',[]),
TristateVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'auto'),
TristateVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'auto'),
BoolVariable('build_shared', '(deprecated option, ignored)', True),
('sys_libs', 'Extra libraries to link with', []),
('escript_opts_version', 'Version of options file (do not specify on command line)'),
('SVN_VERSION', 'Do not use from options file', -2),
('pythoncmd', 'which python to compile with', sys.executable),
('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
BoolVariable('longindices', 'use long indices (for very large matrices)', False),
BoolVariable('compressed_files','Enables reading from compressed binary files', True),
('compression_libs', 'Compression libraries to link with', ['boost_iostreams']),
BoolVariable('disable_boost_numpy', 'Do not build using boost_numpy, even if it is available', False),
BoolVariable('osx_dependency_fix', 'Fix dependencies for libraries to have absolute paths (OSX)', False),
BoolVariable('stdlocationisprefix', 'Set the prefix as escript root in the launcher', False),
BoolVariable('mpi_no_host', 'Do not specify --host in run-escript launcher (only OPENMPI)', False),
BoolVariable('insane', 'Instructs scons to not run a sanity check after compilation.', False)
)
##################### Create environment and help text #######################
# Intel's compiler uses regular expressions improperly and emits a warning
# about failing to find the compilers. This warning can be safely ignored.
# PATH is needed so the compiler, linker and tools are found if they are not
# in default locations.
env = Environment(tools = ['default'], options = vars,
ENV = {'PATH': os.environ['PATH']})
# set the vars for clang
def mkclang(env):
env['CXX']='clang++'
if env['tools_names'] != ['default']:
zz=env['tools_names']
if 'clang' in zz:
zz.remove('clang')
zz.insert(0, mkclang)
env = Environment(tools = ['default'] + env['tools_names'], options = vars,
ENV = {'PATH' : os.environ['PATH']})
# Covert env['netcdf'] into one of False, 3, 4
# Also choose default values for libraries
pos1=netcdf_flavours.index('False')
pos2=netcdf_flavours.index('3')
mypos=netcdf_flavours.index(env['netcdf'])
if 0 <= mypos <=pos1:
env['netcdf']=0
elif pos1 < mypos <= pos2:
env['netcdf']=3
if env['netcdf_libs']=='DEFAULT':
env['netcdf_libs']=['netcdf_c++', 'netcdf']
else: # netcdf4
env['netcdf']=4
if env['netcdf_libs']=='DEFAULT':
env['netcdf_libs']=['netcdf_c++4']
if options_file:
opts_valid=False
if 'escript_opts_version' in env.Dictionary() and \
int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
opts_valid=True
if opts_valid:
print("Using options in %s." % options_file)
else:
print("\nOptions file %s" % options_file)
print("is outdated! Please update the file after reading scons/templates/README_FIRST")
print("and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
Exit(1)
# Generate help text (scons -h)
Help(vars.GenerateHelpText(env))
# Check for superfluous options
if len(vars.UnknownVariables())>0:
for k in vars.UnknownVariables():
print("Unknown option '%s'" % k)
Exit(1)
if 'dudley' in env['domains']:
env['domains'].append('finley')
env['domains'] = sorted(set(env['domains']))
# create dictionary which will be populated with info for buildvars file
env['buildvars'] = {}
# create list which will be populated with warnings if there are any
env['warnings'] = []
#################### Make sure install directories exist #####################
env['BUILD_DIR'] = Dir(env['build_dir']).abspath
prefix = Dir(env['prefix']).abspath
env['buildvars']['prefix'] = prefix
env['incinstall'] = os.path.join(prefix, 'include')
env['bininstall'] = os.path.join(prefix, 'bin')
if IS_WINDOWS:
env['libinstall'] = env['bininstall']
else:
env['libinstall'] = os.path.join(prefix, 'lib')
env['pyinstall'] = os.path.join(prefix, 'esys')
if not os.path.isdir(env['bininstall']):
os.makedirs(env['bininstall'])
if not os.path.isdir(env['libinstall']):
os.makedirs(env['libinstall'])
if not os.path.isdir(env['pyinstall']):
os.makedirs(env['pyinstall'])
env.Append(CPPPATH = [env['incinstall']])
env.Append(LIBPATH = [env['libinstall']])
################# Fill in compiler options if not set above ##################
if env['cxx'] != 'default':
env['CXX'] = env['cxx']
# default compiler/linker options
cc_flags = '-std=c++11'
cc_optim = ''
cc_debug = ''
omp_flags = ''
omp_ldflags = ''
fatalwarning = '' # switch to turn warnings into errors
sysheaderopt = '' # how to indicate that a header is a system header
# env['CC'] might be a full path
cc_name=os.path.basename(env['CXX'])
if cc_name == 'icpc':
# Intel compiler
# #1478: class "std::auto_ptr<...>" was declared deprecated
# #1875: offsetof applied to non-POD types is nonstandard (in boost)
# removed -std=c99 because icpc doesn't like it and we aren't using c anymore
cc_flags = "-std=c++11 -fPIC -w2 -wd1875 -wd1478 -Wno-unknown-pragmas"
cc_optim = "-Ofast -ftz -fno-alias -xCORE-AVX2 -ipo"
#cc_optim = "-Ofast -ftz -fno-alias -inline-level=2 -ipo -xCORE-AVX2"
#cc_optim = "-O2 -ftz -fno-alias -inline-level=2"
#cc_optim = "-O0 -ftz -fno-alias"
#cc_optim = "-O3 -ftz -fno-alias -inline-level=2 -ipo -xHost"
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK -DSLOWSHARECHECK"
omp_flags = "-qopenmp"
omp_ldflags = "-qopenmp" # removing -openmp-report (which is deprecated) because the replacement outputs to a file
fatalwarning = "-Werror"
elif cc_name[:3] == 'g++':
# GNU C++ on any system
# note that -ffast-math is not used because it breaks isnan(),
# see mantis #691
cc_flags = "-std=c++11 -pedantic -Wall -fPIC -finline-functions"
cc_flags += " -Wno-unknown-pragmas -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing "
cc_flags += " -Wno-unused-function"
cc_flags += " -Wno-stringop-truncation -Wno-deprecated-declarations --param=max-vartrack-size=100000000"
cc_optim = "-O3"
#max-vartrack-size: avoid vartrack limit being exceeded with escriptcpp.cpp
cc_debug = "-g3 -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK -DSLOWSHARECHECK --param=max-vartrack-size=100000000"
#Removed because new netcdf doesn't seem to like it
#cc_debug += ' -D_GLIBCXX_DEBUG '
omp_flags = "-fopenmp"
omp_ldflags = "-fopenmp"
fatalwarning = "-Werror"
sysheaderopt = "-isystem"
elif cc_name == 'cl':
# Microsoft Visual C on Windows
cc_flags = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
cc_optim = "/O2 /Op /W3"
cc_debug = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
fatalwarning = "/WX"
elif cc_name == 'icl':
# Intel C on Windows
cc_flags = '/EHsc /GR /MD'
cc_optim = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
cc_debug = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
elif cc_name == 'clang++':
# Clang++ on any system
cc_flags = "-std=c++11 -Wall -fPIC -fdiagnostics-color=always -Wno-uninitialized "
cc_flags += "-Wno-unused-private-field -Wno-unknown-pragmas "
if env['trilinos'] is True:
cc_flags += "-Wno-unused-variable -Wno-exceptions -Wno-deprecated-declarations"
cc_optim = "-O3"
cc_debug = "-ggdb3 -O0 -fdiagnostics-fixit-info -pedantic "
cc_debug += "-DDOASSERT -DDOPROF -DBOUNDS_CHECK -DSLOWSHARECHECK "
omp_flags = "-fopenmp"
omp_ldflags = "-fopenmp"
fatalwarning = "-Werror"
sysheaderopt = "-isystem"
env['sysheaderopt']=sysheaderopt
# set defaults if not otherwise specified
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra'])
if env['longindices']:
if env['paso']:
env.Append(CPPDEFINES = ['ESYS_INDEXTYPE_LONG'])
else:
env['warnings'].append("The longindices feature requires paso!")
# set up the autolazy values
if env['forcelazy'] == 1:
env.Append(CPPDEFINES=['FAUTOLAZYON'])
elif env['forcelazy'] == 0:
env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
# set up the collective resolve values
if env['forcecollres'] == 1:
env.Append(CPPDEFINES=['FRESCOLLECTON'])
elif env['forcecollres'] == 0:
env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
# allow non-standard C if requested
if env['iknowwhatimdoing']:
env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
# Disable OpenMP if no flags provided
if env['openmp'] and env['omp_flags'] == '':
env['warnings'].append("OpenMP requested but no flags provided - disabling OpenMP!")
env['openmp'] = False
if env['openmp']:
env.Append(CCFLAGS = env['omp_flags'])
if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
else:
env['omp_flags']=''
env['omp_ldflags']=''
env['buildvars']['openmp']=int(env['openmp'])
# add debug/non-debug compiler flags
env['buildvars']['debug']=int(env['debug'])
if env['debug']:
env.Append(CCFLAGS = env['cc_debug'])
else:
env.Append(CCFLAGS = env['cc_optim'])
# always add cc_flags
env.Append(CCFLAGS = env['cc_flags'])
# add system libraries
env.AppendUnique(LIBS = env['sys_libs'])
# determine svn revision
global_revision=ARGUMENTS.get('SVN_VERSION', None)
if global_revision:
global_revision = re.sub(':.*', '', global_revision)
global_revision = re.sub('[^0-9]', '', global_revision)
if global_revision == '': global_revision='-2'
else:
# Get the global Subversion revision number for the getVersion() method
try:
global_revision = os.popen('svnversion -n .').read()
global_revision = re.sub(':.*', '', global_revision)
global_revision = re.sub('[^0-9]', '', global_revision)
if global_revision == '': global_revision='-2'
except:
global_revision = '-1'
env['svn_revision']=global_revision
env['buildvars']['svn_revision']=global_revision
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
# If that failed, try to get the version number from the file svn_version
if global_revision=='-2' or global_revision=='-1':
try:
global_revision=str(os.popen('cat svn_version 2>/dev/null').read())
if global_revision[global_revision.__len__()-1] == '\n':
temp=global_revision[0:(global_revision.__len__()-1)]
else:
temp=global_revision
print("Using svn revision information from file. Got revision = %s" % temp)
except:
global_revision='-2'
if global_revision=='-2' or global_revision=='-1':
env['warnings'].append("Could not detect the svn revision number!")
env['IS_WINDOWS']=IS_WINDOWS
env['IS_OSX']=IS_OSX
###################### Copy required environment vars ########################
# Windows doesn't use LD_LIBRARY_PATH but PATH instead
if IS_WINDOWS:
LD_LIBRARY_PATH_KEY='PATH'
env['ENV']['LD_LIBRARY_PATH']=''
else:
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
env['LD_LIBRARY_PATH_KEY']=LD_LIBRARY_PATH_KEY
# the following env variables are exported for the unit tests
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
try:
env['ENV'][key] = os.environ[key]
except KeyError:
env['ENV'][key] = '1'
env_export=env['env_export']
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP','LD_PRELOAD'])
for key in set(env_export):
try:
env['ENV'][key] = os.environ[key]
except KeyError:
pass
for key in os.environ.keys():
if key.startswith("SLURM_"):
env['ENV'][key] = os.environ[key]
try:
env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
except KeyError:
pass
if IS_OSX:
try:
env.PrependENVPath('DYLD_LIBRARY_PATH', os.environ['DYLD_LIBRARY_PATH'])
except KeyError:
pass
try:
env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
except KeyError:
pass
######################## Add some custom builders ############################
# Takes care of prefix and suffix for Python modules:
def build_python_module(env, target, source):
sl_suffix = '.pyd' if IS_WINDOWS else '.so'
return env.SharedLibrary(target, source, SHLIBPREFIX='', SHLIBSUFFIX=sl_suffix)
env.AddMethod(build_python_module, "PythonModule")
if env['pythoncmd']=='python':
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
else:
py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
env.Append(BUILDERS = {'PyCompile' : py_builder});
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
runPyExample_builder = Builder(action = runPyExample, suffix = '.passed', src_suffic='.py', single_source=True)
env.Append(BUILDERS = {'RunPyExample' : runPyExample_builder});
epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
############################ Dependency checks ###############################
######## Compiler
env=checkCompiler(env)
######## Python headers & library (required)
env=checkPython(env)
######## boost & boost-python (required)
env=checkBoost(env)
######## numpy (required) and numpy headers (optional)
env=checkNumpy(env)
######## CppUnit (required for tests)
env=checkCppUnit(env)
######## optional python modules (sympy, pyproj)
env=checkOptionalModules(env)
######## optional dependencies (netCDF, MKL, UMFPACK, MUMPS, Lapack, Silo, ...)
env=checkOptionalLibraries(env)
######## PDFLaTeX (for documentation)
env=checkPDFLatex(env)
# set defaults for launchers if not otherwise specified
if env['prelaunch'] == 'default':
if env['mpi'] == 'INTELMPI' and env['openmp']:
env['prelaunch'] = "export I_MPI_PIN_DOMAIN=omp"
elif env['mpi'] == 'OPENMPI':
# transform comma-separated list to '-x a -x b -x c ...'
env['prelaunch'] = "EE=$(echo -x %e|sed -e 's/,/ -x /g')"
elif env['mpi'] == 'MPT':
env['prelaunch'] = "export MPI_NUM_MEMORY_REGIONS=0"
elif env['mpi'] == 'MPICH2':
env['prelaunch'] = "mpdboot -n %n -r ssh -f %f"
else:
env['prelaunch'] = ""
if env['launcher'] == 'default':
if env['mpi'] == 'INTELMPI':
env['launcher'] = "mpirun -hostfile %f -n %N -ppn %p %b"
elif env['mpi'] == 'OPENMPI':
if env['mpi_no_host']:
hostoptionstr=''
else:
hostoptionstr='--host %h'
# default to OpenMPI version 1.10 or higher
env['launcher'] = "mpirun ${AGENTOVERRIDE} --gmca mpi_warn_on_fork 0 ${EE} "+hostoptionstr+" --map-by node:pe=%t -bind-to core -np %N %b"
if 'orte_version' in env:
v = [int(s) for s in env['orte_version'].split('.') if s.isdigit()]
major, minor = v[0], v[1]
if major == 1 and minor < 10:
env['launcher'] = "mpirun ${AGENTOVERRIDE} --gmca mpi_warn_on_fork 0 ${EE} "+hostoptionstr+" --cpus-per-rank %t -np %N %b"
elif env['mpi'] == 'MPT':
env['launcher'] = "mpirun %h -np %p %b"
elif env['mpi'] == 'MPICH':
env['launcher'] = "mpirun -machinefile %f -np %N %b"
elif env['mpi'] == 'MPICH2':
env['launcher'] = "mpiexec -genvlist %e -np %N %b"
else:
env['launcher'] = "%b"
if env['postlaunch'] == 'default':
if env['mpi'] == 'MPICH2':
env['postlaunch'] = "mpdallexit"
else:
env['postlaunch'] = ""
# dependency sanity checks
if len(env['domains']) == 0:
env['warnings'].append("No domains have been built, escript will not be very useful!")
# keep some of our install paths first in the list for the unit tests
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
env.PrependENVPath('PYTHONPATH', prefix)
env['ENV']['ESCRIPT_ROOT'] = prefix
if not env['verbose']:
env['CXXCOMSTR'] = "Compiling $TARGET"
env['SHCXXCOMSTR'] = "Compiling $TARGET"
env['ARCOMSTR'] = "Linking $TARGET"
env['LINKCOMSTR'] = "Linking $TARGET"
env['SHLINKCOMSTR'] = "Linking $TARGET"
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
########################### Configure the targets ############################
from grouptest import GroupTest
TestGroups=[]
# keep an environment without warnings-as-errors
dodgy_env=env.Clone()
# now add warnings-as-errors flags. This needs to be done after configuration
# because the scons test files have warnings in them
if ((fatalwarning != '') and (env['werror'])):
env.Append(CCFLAGS = fatalwarning)
Export(
['env',
'dodgy_env',
'IS_WINDOWS',
'TestGroups'
]
)
target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
env.Alias('target_init', [target_init])
# escript can't be turned off
build_all_list = ['build_escript']
install_all_list = ['target_init', 'install_escript']
if env['usempi']:
build_all_list += ['build_pythonMPI', 'build_overlord']
install_all_list += ['install_pythonMPI', 'install_overlord']
env['buildvars']['paso'] = int(env['paso'])
if env['paso']:
env.Append(CPPDEFINES = ['ESYS_HAVE_PASO'])
build_all_list += ['build_paso']
install_all_list += ['install_paso']
env['buildvars']['trilinos'] = int(env['trilinos'])
if env['trilinos']:
build_all_list += ['build_trilinoswrap']
install_all_list += ['install_trilinoswrap']
env['buildvars']['domains'] = ','.join(env['domains'])
for domain in env['domains']:
env.Append(CPPDEFINES = ['ESYS_HAVE_'+domain.upper()])
build_all_list += ['build_%s'%domain]
install_all_list += ['install_%s'%domain]
env['buildvars']['weipa'] = int(env['weipa'])
if env['weipa']:
env.Append(CPPDEFINES = ['ESYS_HAVE_WEIPA'])
build_all_list += ['build_weipa']
install_all_list += ['install_weipa']
if 'finley' in env['domains'] or 'dudley' in env['domains']:
build_all_list += ['build_escriptreader']
install_all_list += ['install_escriptreader']
variant='$BUILD_DIR/$PLATFORM/'
env.SConscript('escriptcore/SConscript', variant_dir=variant+'escriptcore', duplicate=0)
env.SConscript('escript/py_src/SConscript', variant_dir=variant+'escript', duplicate=0)
env.SConscript('pythonMPI/src/SConscript', variant_dir=variant+'pythonMPI', duplicate=0)
env.SConscript('tools/overlord/SConscript', variant_dir=variant+'tools/overlord', duplicate=0)
env.SConscript('paso/SConscript', variant_dir=variant+'paso', duplicate=0)
env.SConscript('trilinoswrap/SConscript', variant_dir=variant+'trilinoswrap', duplicate=0)
env.SConscript('cusplibrary/SConscript')
env.SConscript('dudley/SConscript', variant_dir=variant+'dudley', duplicate=0)
env.SConscript('finley/SConscript', variant_dir=variant+'finley', duplicate=0)
env.SConscript('ripley/SConscript', variant_dir=variant+'ripley', duplicate=0)
env.SConscript('speckley/SConscript', variant_dir=variant+'speckley', duplicate=0)
env.SConscript('weipa/SConscript', variant_dir=variant+'weipa', duplicate=0)
env.SConscript(dirs = ['downunder/py_src'], variant_dir=variant+'downunder', duplicate=0)
env.SConscript(dirs = ['modellib/py_src'], variant_dir=variant+'modellib', duplicate=0)
env.SConscript(dirs = ['pycad/py_src'], variant_dir=variant+'pycad', duplicate=0)
env.SConscript('tools/escriptconvert/SConscript', variant_dir=variant+'tools/escriptconvert', duplicate=0)
env.SConscript('doc/SConscript', variant_dir=variant+'doc', duplicate=0)
env.Alias('build', build_all_list)
install_all_list += ['install_downunder_py']
install_all_list += ['install_modellib_py']
install_all_list += ['install_pycad_py']
install_all_list += [env.Install(Dir('scripts',env['build_dir']), os.path.join('scripts', 'release_sanity.py'))]
if env['osx_dependency_fix']:
print("Require dependency fix")
install_all=env.Command('install', install_all_list, 'scripts/moveall.sh')
else:
install_all=env.Alias('install', install_all_list)
sanity=env.Alias('sanity', env.Command('dummy','',os.path.join(env['prefix'], 'bin', 'run-escript')+' '+os.path.join(env['build_dir'],'scripts', 'release_sanity.py')))
env.Depends('dummy', install_all)
if env['usempi']:
env.Depends('dummy', ['install_pythonMPI'])
# if all domains are built:
if env['domains'] == all_domains and env['insane'] == False:
env.AlwaysBuild('sanity')
env.Default('sanity')
else:
env.Default('install')
################## Targets to build and run the test suite ###################
if not env['cppunit']:
test_msg = env.Command('.dummy.', None, '@echo "Cannot run C++ unit tests, CppUnit not found!";exit 1')
env.Alias('run_tests', test_msg)
env.Alias('build_tests', '')
env.Alias('run_tests', ['install'])
env.Alias('all_tests', ['install', 'run_tests', 'py_tests'])
env.Alias('build_full',['install','build_tests','build_py_tests'])
Requires('py_tests', 'install')
##################### Targets to build the documentation #####################
env.Alias('pdfdocs',['user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
env.Alias('basedocs', ['pdfdocs','examples_tarfile', 'examples_zipfile', 'api_doxygen'])
env.Alias('docs', ['basedocs', 'sphinxdoc'])
env.Alias('release_prep', ['docs', 'install'])
env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install'])
# The test scripts are always generated, this target allows us to
# generate the testscripts without doing a full build
env.Alias('testscripts',[])
generateTestScripts(env, TestGroups)
######################## Populate the buildvars file #########################
write_buildvars(env)
# delete buildvars upon cleanup - target_init is default so use it
env.Clean('target_init', File('buildvars', env['libinstall']))
write_launcher(env)
# remove obsolete files
if not env['usempi']:
Execute(Delete(File(['pythonMPI','pythonMPIredirect'], env['libinstall'])))
Execute(Delete(File('escript-overlord', env['bininstall'])))
######################## Summarize our environment ###########################
def print_summary():
d_list=[]
print("")
print("*** Config Summary (see config.log and <prefix>/lib/buildvars for details) ***")
print("Escript revision %s"%global_revision)
print(" Install prefix: %s"%env['prefix'])
print(" Python: %s (Version %s)"%(env['pythoncmd'],env['python_version']))
print(" boost: %s (Version %s)"%(env['boost_prefix'],env['boost_version']))
if env['have_boost_numpy'] is True:
print(" boost numpy: YES")
else:
print(" boost numpy: NO")
if env['trilinos']:
print(" trilinos: %s (Version %s)" % (env['trilinos_prefix'],env['trilinos_version']))
else:
print(" trilinos: NO")
if env['numpy_h']:
print(" numpy: YES (with headers)")
else:
print(" numpy: YES (without headers)")
if env['usempi']:
if 'orte_version' in env:
print(" MPI: %s (Version %s)"%(env['mpi'], env['orte_version']))
else:
print(" MPI: YES (flavour: %s)"%env['mpi'])
else:
d_list.append('mpi')
if env['parmetis']:
print(" ParMETIS: %s (Version %s)"%(env['parmetis_prefix'],env['parmetis_version']))
else:
d_list.append('parmetis')
if env['uselapack']:
print(" LAPACK: YES (flavour: %s)"%env['lapack'])
else:
d_list.append('lapack')
if env['gmshpy']:
gmshpy=" + python module"
else:
gmshpy=""
if env['gmsh']=='m':
print(" gmsh: YES, MPI-ENABLED"+gmshpy)
elif env['gmsh']=='s':
print(" gmsh: YES"+gmshpy)
else:
if env['gmshpy']:
print(" gmsh: python module only")
else:
d_list.append('gmsh')
if env['compressed_files']:
print(" gzip: YES")
else:
d_list.append('gzip')
solvers = []
direct = []
if env['paso']:
solvers.append('paso')
if env['mkl']:
direct.append('mkl')
if env['umfpack']:
direct.append('umfpack')
if env['mumps']:
direct.append('mumps')
else:
d_list.append('paso')
if env['trilinos']:
solvers.append('trilinos')
direct.append('trilinos')
else:
d_list.append('trilinos')
print(" Solver library: %s"%(", ".join(solvers)))
if len(direct) > 0:
print(" Direct solver: YES (%s)"%(", ".join(direct)))
else:
print(" Direct solver: NONE")
print(" domains: %s"%(", ".join(env['domains'])))
if env['netcdf']==4:
print(" netcdf: YES (4 + 3)")
elif env['netcdf']==3:
print(" netcdf: YES (3)")
else:
print(" netcdf: NO")
e_list=[]
for i in ('weipa','debug','openmp','cppunit','gdal','mkl',
'mumps','pyproj','scipy','silo','sympy','umfpack','visit'):
if env[i]: e_list.append(i)
else: d_list.append(i)
d_list += set(all_domains).difference(env['domains'])
for i in e_list:
print("%16s: YES"%i)
print("\n DISABLED features: %s"%(" ".join(sorted(d_list))))
if ((fatalwarning != '') and (env['werror'])):
print(" Treating warnings as errors")
else:
print(" NOT treating warnings as errors")
print("")
for w in env['warnings']:
print("WARNING: %s"%w)
if len(GetBuildFailures()):
print("\nERROR: build stopped due to errors\n")
else:
print("\nSUCCESS: build complete\n")
atexit.register(print_summary)
|