File: MpiPythonMain.cpp

package info (click to toggle)
esys-particle 2.3.5%2Bdfsg2-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 13,132 kB
  • sloc: cpp: 81,480; python: 5,872; makefile: 1,259; sh: 313; perl: 225
file content (86 lines) | stat: -rw-r--r-- 2,227 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
/////////////////////////////////////////////////////////////
//                                                         //
// Copyright (c) 2003-2017 by The University of Queensland //
// Centre for Geoscience Computing                         //
// http://earth.uq.edu.au/centre-geoscience-computing      //
//                                                         //
// Primary Business: Brisbane, Queensland, Australia       //
// Licensed under the Open Software License version 3.0    //
// http://www.apache.org/licenses/LICENSE-2.0              //
//                                                         //
/////////////////////////////////////////////////////////////

#if HAVE_CONFIG_H
#include <config.h>
#endif

#include <mpi.h>
#include <Python.h>
#include <iostream>

#include "Foundation/PathUtil.h"
#include <stdexcept>
#include <boost/filesystem/exception.hpp>
#include <boost/filesystem/path.hpp>
#include <patchlevel.h>

using namespace boost;


//--project includes--
#include "Foundation/console.h"
#include "Parallel/SubLatticeControler.h"


int main( int argc, char **argv ) 
{
  esys::lsm::setPathEnv(argc, argv);
  
    int status = MPI_Init(&argc, &argv);
    if (status != MPI_SUCCESS) {
      std::cerr << argv[0] << ": MPI_Init failed, exiting." << std::endl;
      return status;
    } 
    // get rank
    int myrank;
    MPI_Comm_rank(MPI_COMM_WORLD,&myrank);

    if(myrank==0){    // if rank==0 --> master
      // start python
      //     std::cout << "master start\n";
      Py_Initialize();
      
#if PY_VERSION_HEX >= 0x03000000
      wchar_t** wargv = new wchar_t*[argc+1];
      for (int i=0;i<argc;++i)
      {
        int len = strlen(argv[i]);
        wargv[i] = new wchar_t[len+1];
        for (int j=0; j<len;++j)
        {
          wargv[i][j] = wchar_t(argv[i][j]);
        }
        wargv[i][len] = 0;
      }
      wargv[argc] = 0;
      status = Py_Main(argc, wargv);
#else
      status = Py_Main(argc, argv);
#endif
      
      Py_Finalize();
    } else { // if rank!=0 --> slave
      // start worker
      //      std::cout << "slave start\n";

    CSubLatticeControler SLC;

    SLC.initMPI();
    SLC.run();
    }
    
    MPI_Finalize();
 
    return status;
}