19 #ifndef OPM_PARALLELWELLINFO_HEADER_INCLUDED
21 #define OPM_PARALLELWELLINFO_HEADER_INCLUDED
22 #include <dune/common/version.hh>
23 #include <dune/common/parallel/mpihelper.hh>
24 #include <dune/common/parallel/plocalindex.hh>
25 #include <dune/istl/owneroverlapcopy.hh>
27 #include <opm/simulators/utils/ParallelCommunication.hpp>
29 #include <opm/common/ErrorMacros.hpp>
54 using MPIComm =
typename Dune::MPIHelper::MPICommunicator;
55 #if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
56 using Communication = Dune::Communication<MPIComm>;
58 using Communication = Dune::CollectiveCommunication<MPIComm>;
60 using LocalIndex = Dune::ParallelLocalIndex<Attribute>;
61 using IndexSet = Dune::ParallelIndexSet<int,LocalIndex,50>;
63 using RI = Dune::RemoteIndices<IndexSet>;
96 const double* current,
105 const double* current,
115 template<
class RAIterator>
118 if (this->comm_.size() < 2)
120 std::partial_sum(begin, end, begin);
130 using Value =
typename std::iterator_traits<RAIterator>::value_type;
131 std::vector<int> sizes(comm_.size());
132 std::vector<int> displ(comm_.size() + 1, 0);
133 using GlobalIndex =
typename IndexSet::IndexPair::GlobalIndex;
134 using Pair = std::pair<GlobalIndex,Value>;
135 std::vector<Pair> my_pairs;
136 my_pairs.reserve(current_indices_.size());
137 for (
const auto& pair: current_indices_)
139 if (pair.local().attribute() == owner)
141 my_pairs.emplace_back(pair.global(), begin[pair.local()]);
144 int mySize = my_pairs.size();
145 comm_.allgather(&mySize, 1, sizes.data());
146 std::partial_sum(sizes.begin(), sizes.end(), displ.begin()+1);
147 std::vector<Pair> global_pairs(displ.back());
148 comm_.allgatherv(my_pairs.data(), my_pairs.size(), global_pairs.data(), sizes.data(), displ.data());
150 std::sort(global_pairs.begin(), global_pairs.end(),
151 [](
const Pair& p1,
const Pair& p2){ return p1.first < p2.first; } );
152 std::vector<Value> sums(global_pairs.size());
153 std::transform(global_pairs.begin(), global_pairs.end(), sums.begin(),
154 [](
const Pair& p) { return p.second; });
155 std::partial_sum(sums.begin(), sums.end(),sums.begin());
157 auto global_pair = global_pairs.begin();
158 for (
const auto& pair: current_indices_)
160 global_pair = std::lower_bound(global_pair, global_pairs.end(),
162 [](
const Pair& val1,
const GlobalIndex& val2)
163 { return val1.first < val2; });
164 assert(global_pair != global_pairs.end());
165 assert(global_pair->first == pair.global());
166 begin[pair.local()] = sums[global_pair - global_pairs.begin()];
169 OPM_THROW(std::logic_error,
"In a sequential run the size of the communicator should be 1!");
177 int numLocalPerfs()
const;
181 IndexSet current_indices_;
184 IndexSet above_indices_;
186 Dune::Interface interface_;
187 Dune::BufferedCommunicator communicator_;
189 std::size_t num_local_perfs_{};
201 using MPIComm =
typename Dune::MPIHelper::MPICommunicator;
202 #if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
203 using Communication = Dune::Communication<MPIComm>;
205 using Communication = Dune::CollectiveCommunication<MPIComm>;
207 using IndexSet = CommunicateAboveBelow::IndexSet;
208 using Attribute = CommunicateAboveBelow::Attribute;
209 using GlobalIndex =
typename IndexSet::IndexPair::GlobalIndex;
214 int num_local_perfs);
221 std::vector<double>
createGlobal(
const std::vector<double>& local_perf_container,
222 std::size_t num_components)
const;
228 void copyGlobalToLocal(
const std::vector<double>& global, std::vector<double>& local,
229 std::size_t num_components)
const;
231 int numGlobalPerfs()
const;
233 const IndexSet& local_indices_;
235 int num_global_perfs_;
237 std::vector<int> sizes_;
239 std::vector<int> displ_;
241 std::vector<int> map_received_;
245 std::vector<int> perf_ecl_index_;
254 using MPIComm =
typename Dune::MPIHelper::MPICommunicator;
255 #if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
256 using Communication = Dune::Communication<MPIComm>;
258 using Communication = Dune::CollectiveCommunication<MPIComm>;
261 static constexpr
int INVALID_ECL_INDEX = -1;
274 Communication allComm);
276 const Communication& communication()
const
292 if (rankWithFirstPerf_ >= 0) {
294 assert(rankWithFirstPerf_ < comm_->size());
299 comm_->broadcast(&res, 1, rankWithFirstPerf_);
313 const double* current,
314 std::size_t size)
const;
320 const std::vector<double>& current)
const;
328 const double* current,
329 std::size_t size)
const;
335 const std::vector<double>& current)
const;
347 const std::string&
name()
const
355 return hasLocalCells_;
371 template<
typename It>
372 typename std::iterator_traits<It>::value_type
sumPerfValues(It begin, It end)
const
374 using V =
typename std::iterator_traits<It>::value_type;
376 auto local = std::accumulate(begin, end, V());
377 return communication().sum(local);
387 template<
class RAIterator>
390 commAboveBelow_->partialSumPerfValues(begin, end);
407 void operator()(Communication* comm);
418 int rankWithFirstPerf_;
422 std::unique_ptr<Communication, DestroyComm> comm_;
425 std::unique_ptr<CommunicateAboveBelow> commAboveBelow_;
427 std::unique_ptr<GlobalPerfContainerFactory> globalPerfCont_;
445 bool checkAllConnectionsFound();
447 std::vector<std::size_t> foundConnections_;
458 bool operator<(
const std::pair<std::string, bool>& pair,
const ParallelWellInfo& well);
460 bool operator<(
const ParallelWellInfo& well,
const std::pair<std::string, bool>& pair);
462 bool operator==(
const std::pair<std::string, bool>& pair,
const ParallelWellInfo& well);
464 bool operator==(
const ParallelWellInfo& well,
const std::pair<std::string, bool>& pair);
466 bool operator!=(
const std::pair<std::string, bool>& pair,
const ParallelWellInfo& well);
468 bool operator!=(
const ParallelWellInfo& well,
const std::pair<std::string, bool>& pair);
Class checking that all connections are on active cells.
Definition: ParallelWellInfo.hpp:434
void connectionFound(std::size_t index)
Inidicate that the i-th completion was found.
Definition: ParallelWellInfo.cpp:516
Class to facilitate getting values associated with the above/below perforation.
Definition: ParallelWellInfo.hpp:43
void beginReset()
Indicates that we will add the index information.
Definition: ParallelWellInfo.cpp:202
void clear()
Clear all the parallel information.
Definition: ParallelWellInfo.cpp:191
const IndexSet & getIndexSet() const
Get index set for the local perforations.
Definition: ParallelWellInfo.cpp:342
int endReset()
Indicates that the index information is complete.
Definition: ParallelWellInfo.cpp:214
std::vector< double > communicateAbove(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:247
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:116
void pushBackEclIndex(int above, int current, bool owner=true)
Adds information about original index of the perforations in ECL Schedule.
Definition: ParallelWellInfo.cpp:302
std::vector< double > communicateBelow(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:274
A factory for creating a global data representation for distributed wells.
Definition: ParallelWellInfo.hpp:199
void copyGlobalToLocal(const std::vector< double > &global, std::vector< double > &local, std::size_t num_components) const
Copies the values of the global perforation to the local representation.
Definition: ParallelWellInfo.cpp:149
GlobalPerfContainerFactory(const IndexSet &local_indices, const Communication comm, int num_local_perfs)
Constructor.
Definition: ParallelWellInfo.cpp:49
std::vector< double > createGlobal(const std::vector< double > &local_perf_container, std::size_t num_components) const
Creates a container that holds values for all perforations.
Definition: ParallelWellInfo.cpp:100
Class encapsulating some information about parallel wells.
Definition: ParallelWellInfo.hpp:252
bool hasLocalCells() const
Whether local cells are perforated somewhen.
Definition: ParallelWellInfo.hpp:353
const std::string & name() const
Name of the well.
Definition: ParallelWellInfo.hpp:347
void pushBackEclIndex(int above, int current)
Adds information about the ecl indices of the perforations.
Definition: ParallelWellInfo.cpp:390
ParallelWellInfo(const std::pair< std::string, bool > &well_info, Communication allComm)
Constructs object with communication between all rank sharing a well.
void clear()
Free data of communication data structures.
Definition: ParallelWellInfo.cpp:410
ParallelWellInfo(const std::string &name={""}, bool hasLocalCells=true)
Constructs object using MPI_COMM_SELF.
Definition: ParallelWellInfo.cpp:352
void communicateFirstPerforation(bool hasFirst)
Collectively decide which rank has first perforation.
Definition: ParallelWellInfo.cpp:379
const GlobalPerfContainerFactory & getGlobalPerfContainerFactory() const
Get a factor to create a global representation of peforation data.
Definition: ParallelWellInfo.cpp:447
std::vector< double > communicateBelowValues(double last_value, const double *current, std::size_t size) const
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:431
std::vector< double > communicateAboveValues(double first_value, const double *current, std::size_t size) const
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:416
std::iterator_traits< It >::value_type sumPerfValues(It begin, It end) const
Sum all the values of the perforations.
Definition: ParallelWellInfo.hpp:372
T broadcastFirstPerforationValue(const T &t) const
If the well does not have any open connections the member rankWithFirstPerf is not initialized,...
Definition: ParallelWellInfo.hpp:289
void beginReset()
Inidicate that we will reset the ecl index information.
Definition: ParallelWellInfo.cpp:395
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:388
void endReset()
Inidicate completion of reset of the ecl index information.
Definition: ParallelWellInfo.cpp:401
This file contains a set of helper functions used by VFPProd / VFPInj.
Definition: BlackoilPhases.hpp:26