1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201
|
/*
Copyright 2023 Equinor.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#define BOOST_TEST_MODULE Parallel_PAvg_Dynamic_Source_Data
#define BOOST_TEST_NO_MAIN
#include <boost/test/unit_test.hpp>
#include <opm/simulators/wells/ParallelPAvgDynamicSourceData.hpp>
#include <opm/simulators/utils/ParallelCommunication.hpp>
#include <opm/input/eclipse/Schedule/Well/PAvgDynamicSourceData.hpp>
#include <dune/common/parallel/mpihelper.hh>
#include <cstddef>
#include <functional>
#include <iostream>
#include <numeric>
#include <string>
#include <string_view>
#include <vector>
namespace {
#if HAVE_MPI
struct MPIError
{
MPIError(std::string_view errstr, const int ec)
: errorstring { errstr }
, errorcode { ec }
{}
std::string errorstring;
int errorcode;
};
void MPI_err_handler(MPI_Comm*, int* err_code, ...)
{
std::vector<char> err_string_vec(MPI_MAX_ERROR_STRING);
auto err_length = 0;
MPI_Error_string(*err_code, err_string_vec.data(), &err_length);
auto err_string = std::string_view {
err_string_vec.data(), static_cast<std::string_view::size_type>(err_length)
};
std::cerr << "An MPI Error ocurred:\n -> " << err_string << '\n';
throw MPIError { err_string, *err_code };
}
#endif // HAVE_MPI
bool init_unit_test_func()
{
return true;
}
std::vector<std::size_t> sourceLocations(const std::size_t numLoc)
{
auto srcLoc = std::vector<std::size_t>(numLoc);
std::iota(srcLoc.begin(), srcLoc.end(), std::size_t{0});
return srcLoc;
}
class LocalCellIndex
{
public:
explicit LocalCellIndex(const std::size_t rank,
const std::size_t size)
: rank_ { rank }
, size_ { size }
{}
int operator()(const std::size_t i) const
{
return ((i % this->size_) == this->rank_)
? static_cast<int>(i / this->size_)
: -1;
}
private:
std::size_t rank_{};
std::size_t size_{};
};
class CalculateSourceTerm
{
public:
using SrcTerm = Opm::PAvgDynamicSourceData<double>::SourceDataSpan<double>;
explicit CalculateSourceTerm(const std::size_t rank)
: rank_ { rank }
{}
void operator()(const int i, SrcTerm source_term) const
{
using Item = typename SrcTerm::Item;
source_term
.set(Item::Pressure , this->rank_*314.15 - i)
.set(Item::PoreVol , this->rank_*172.9 + 10.0*i)
.set(Item::MixtureDensity, this->rank_*852.96 + i);
}
private:
std::size_t rank_{};
};
std::size_t
sourceTermsAreCorrect(const std::size_t comm_size,
const std::size_t num_src,
const Opm::PAvgDynamicSourceData<double>& source_data)
{
using Item = Opm::PAvgDynamicSourceData<double>::SourceDataSpan<const double>::Item;
auto num_correct = 0*num_src;
for (auto srcID = 0*num_src; srcID < num_src; ++srcID) {
const auto rank = srcID % comm_size;
const auto locI = srcID / comm_size;
const auto src = source_data[srcID];
const auto ok =
(src[Item::Pressure] == rank*314.15 - locI) &&
(src[Item::PoreVol] == rank*172.9 + 10*locI) &&
(src[Item::MixtureDensity] == rank*852.96 + locI);
num_correct += ok;
}
return num_correct == num_src;
}
} // Anonymous namespace
BOOST_AUTO_TEST_CASE(Eval_and_collect)
{
auto comm = Opm::Parallel::Communication {
Dune::MPIHelper::getCommunicator()
};
const auto comm_rank = static_cast<std::size_t>(comm.rank());
const auto comm_size = static_cast<std::size_t>(comm.size());
const auto num_src_loc = std::size_t{50};
auto source_data = Opm::ParallelPAvgDynamicSourceData<double> {
comm, sourceLocations(num_src_loc),
LocalCellIndex { comm_rank, comm_size }
};
source_data.collectLocalSources(CalculateSourceTerm { comm_rank });
source_data.synchroniseSources();
const auto num_rank_correct = comm.sum
(sourceTermsAreCorrect(comm_size, num_src_loc, source_data));
if (comm_rank == 0) {
BOOST_CHECK_EQUAL(num_rank_correct, comm_size);
}
}
int main(int argc, char** argv)
{
Dune::MPIHelper::instance(argc, argv);
#if HAVE_MPI
// Register a throwing error handler to allow for debugging with
//
// catch throw
//
// in GDB.
MPI_Errhandler handler{};
MPI_Comm_create_errhandler(MPI_err_handler, &handler);
MPI_Comm_set_errhandler(MPI_COMM_WORLD, handler);
#endif // HAVE_MPI
return boost::unit_test::unit_test_main(&init_unit_test_func, argc, argv);
}
|