1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63
|
/////////////////////////////////////////////////////////////
// //
// Copyright (c) 2003-2011 by The University of Queensland //
// Earth Systems Science Computational Centre (ESSCC) //
// http://www.uq.edu.au/esscc //
// //
// Primary Business: Brisbane, Queensland, Australia //
// Licensed under the Open Software License version 3.0 //
// http://www.opensource.org/licenses/osl-3.0.php //
// //
/////////////////////////////////////////////////////////////
#include <config.h>
#include <mpi.h>
#include <Python.h>
#include <iostream>
#include "Foundation/PathUtil.h"
#include <stdexcept>
#include <boost/filesystem/exception.hpp>
#include <boost/filesystem/path.hpp>
using namespace boost;
//--project includes--
#include "Foundation/console.h"
#include "Parallel/SubLatticeControler.h"
int main( int argc, char **argv )
{
esys::lsm::setPathEnv(argc, argv);
int status = MPI_Init(&argc, &argv);
if (status != MPI_SUCCESS) {
std::cerr << argv[0] << ": MPI_Init failed, exiting." << std::endl;
return status;
}
// get rank
int myrank;
MPI_Comm_rank(MPI_COMM_WORLD,&myrank);
if(myrank==0){ // if rank==0 --> master
// start python
// std::cout << "master start\n";
status = Py_Main(argc, argv);
} else { // if rank!=0 --> slave
// start worker
// std::cout << "slave start\n";
CSubLatticeControler SLC;
SLC.initMPI();
SLC.run();
}
MPI_Finalize();
return status;
}
|