ThunderEgg  1.0.0
Domain.h
Go to the documentation of this file.
1 /***************************************************************************
2  * ThunderEgg, a library for solvers on adaptively refined block-structured
3  * Cartesian grids.
4  *
5  * Copyright (c) 2019-2021 Scott Aiton
6  *
7  * This program is free software: you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation, either version 3 of the License, or
10  * (at your option) any later version.
11  *
12  * This program is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15  * GNU General Public License for more details.
16  *
17  * You should have received a copy of the GNU General Public License
18  * along with this program. If not, see <https://www.gnu.org/licenses/>.
19  ***************************************************************************/
20 
21 #ifndef THUNDEREGG_DOMAIN_H
22 #define THUNDEREGG_DOMAIN_H
23 
28 #include <ThunderEgg/PatchInfo.h>
29 #include <ThunderEgg/Timer.h>
30 #include <map>
31 #include <set>
32 #include <vector>
33 
34 namespace ThunderEgg {
49 template<int D>
50 class Domain
51 {
52 private:
56  Communicator comm;
60  int id = -1;
64  std::array<int, D> ns;
68  int num_ghost_cells;
72  int num_cells_in_patch;
76  int num_cells_in_patch_with_ghost;
81  std::vector<PatchInfo<D>> pinfos;
85  int global_num_patches = 1;
89  mutable std::shared_ptr<Timer> timer;
90 
94  void indexPatchesLocal()
95  {
96  // index patches
97  int curr_index = 0;
98  std::map<int, int> id_to_local_index;
99  for (auto& pinfo : pinfos) {
100  pinfo.local_index = curr_index;
101  id_to_local_index[pinfo.id] = pinfo.local_index;
102  curr_index++;
103  }
104 
105  // set local index in nbrinfo objects
106  for (auto& pinfo : pinfos) {
107  pinfo.setNeighborLocalIndexes(id_to_local_index);
108  }
109  }
113  void indexPatchesGlobal()
114  {
115  // get starting global index
116  int num_local_patches = (int)pinfos.size();
117  int curr_global_index;
118  MPI_Scan(&num_local_patches, &curr_global_index, 1, MPI_INT, MPI_SUM, comm.getMPIComm());
119  curr_global_index -= num_local_patches;
120 
121  // index the patches
122  std::map<int, int> id_to_global_index;
123  for (auto& pinfo : pinfos) {
124  pinfo.global_index = curr_global_index;
125  id_to_global_index[pinfo.id] = pinfo.global_index;
126  curr_global_index++;
127  }
128 
129  std::map<int, std::set<std::pair<int, int>>> ranks_to_ids_and_global_indexes_outgoing;
130  std::map<int, std::set<int>> ranks_to_ids_incoming;
131  for (auto& pinfo : pinfos) {
132  auto ranks = pinfo.getNbrRanks();
133  auto ids = pinfo.getNbrIds();
134  for (size_t idx = 0; idx < ranks.size(); idx++) {
135  int nbr_id = ids[idx];
136  int nbr_rank = ranks[idx];
137  if (nbr_rank != comm.getRank()) {
138  ranks_to_ids_and_global_indexes_outgoing[nbr_rank].insert(
139  std::make_pair(pinfo.id, pinfo.global_index));
140  ranks_to_ids_incoming[nbr_rank].insert(nbr_id);
141  }
142  }
143  }
144 
145  // prepare to recieve data
146  std::vector<MPI_Request> recv_requests;
147 
148  // allocate incoming vectors and post recvs
149  std::map<int, std::vector<int>> rank_to_incoming_data;
150  for (const auto& pair : ranks_to_ids_incoming) {
151  int source_rank = pair.first;
152  std::vector<int>& incoming_data = rank_to_incoming_data[source_rank];
153  incoming_data.resize(pair.second.size());
154 
155  MPI_Request request;
156  MPI_Irecv(incoming_data.data(),
157  (int)incoming_data.size(),
158  MPI_INT,
159  source_rank,
160  0,
161  comm.getMPIComm(),
162  &request);
163  recv_requests.push_back(request);
164  }
165 
166  // prepare outgoing vector of data and send it
167  std::vector<MPI_Request> send_requests;
168 
169  // post sends
170  std::map<int, std::vector<int>> rank_to_outgoing_data;
171  for (const auto& pair : ranks_to_ids_and_global_indexes_outgoing) {
172  int dest_rank = pair.first;
173  // allocate and fill vector
174  std::vector<int>& data = rank_to_outgoing_data[dest_rank];
175  data.reserve(pair.second.size());
176  for (const auto& id_and_global_index : pair.second) {
177  data.push_back(id_and_global_index.second);
178  }
179  MPI_Request request;
180  MPI_Isend(data.data(), (int)data.size(), MPI_INT, dest_rank, 0, comm.getMPIComm(), &request);
181  send_requests.push_back(request);
182  }
183 
184  // add global indexes to map as recvs come in
185  for (size_t i = 0; i < recv_requests.size(); i++) {
186  MPI_Status status;
187  int request_index;
188  MPI_Waitany((int)recv_requests.size(), recv_requests.data(), &request_index, &status);
189 
190  int source_rank = status.MPI_SOURCE;
191  const std::set<int>& incoming_ids = ranks_to_ids_incoming[source_rank];
192  const std::vector<int>& data = rank_to_incoming_data[source_rank];
193 
194  auto curr_id = incoming_ids.cbegin();
195  auto curr_global_index_iter = data.cbegin();
196  while (curr_id != incoming_ids.cend()) {
197  id_to_global_index[*curr_id] = *curr_global_index_iter;
198  curr_id++;
199  curr_global_index_iter++;
200  }
201  }
202 
203  // wait for all the sends to finsh
204  MPI_Waitall((int)send_requests.size(), send_requests.data(), MPI_STATUSES_IGNORE);
205 
206  // update global indexes in nbrinfo objects
207  for (auto& pinfo : pinfos) {
208  pinfo.setNeighborGlobalIndexes(id_to_global_index);
209  }
210  }
211 
212 public:
223  template<class InputIterator>
225  int id,
226  std::array<int, D> ns,
227  int num_ghost_cells,
228  InputIterator first_pinfo,
229  InputIterator last_pinfo)
230  : comm(comm)
231  , id(id)
232  , ns(ns)
233  , num_ghost_cells(num_ghost_cells)
234  , pinfos(first_pinfo, last_pinfo)
235  {
236  num_cells_in_patch = 1;
237  num_cells_in_patch_with_ghost = 1;
238  for (size_t i = 0; i < D; i++) {
239  num_cells_in_patch *= ns[i];
240  num_cells_in_patch_with_ghost *= (ns[i] + 2 * num_ghost_cells);
241  }
242 
243  int num_local_domains = pinfos.size();
244  MPI_Allreduce(&num_local_domains, &global_num_patches, 1, MPI_INT, MPI_SUM, comm.getMPIComm());
245 
246  indexPatchesLocal();
247  indexPatchesGlobal();
248  }
254  const Communicator& getCommunicator() const { return comm; }
259  const std::vector<PatchInfo<D>>& getPatchInfoVector() const { return pinfos; }
264  const std::array<int, D>& getNs() const { return ns; }
268  int getNumGlobalPatches() const { return global_num_patches; }
272  int getNumLocalPatches() const { return (int)pinfos.size(); }
276  int getNumGlobalCells() const { return global_num_patches * num_cells_in_patch; }
280  int getNumLocalCells() const { return ((int)pinfos.size()) * num_cells_in_patch; }
285  {
286  return ((int)pinfos.size()) * num_cells_in_patch_with_ghost;
287  }
291  int getNumCellsInPatch() const { return num_cells_in_patch; }
295  int getNumGhostCells() const { return num_ghost_cells; }
301  double volume() const
302  {
303  double sum = 0;
304  for (auto& pinfo : pinfos) {
305  double patch_vol = 1;
306  for (size_t i = 0; i < D; i++) {
307  patch_vol *= pinfo.spacings[i] * pinfo.ns[i];
308  }
309  sum += patch_vol;
310  }
311  double retval;
312  MPI_Allreduce(&sum, &retval, 1, MPI_DOUBLE, MPI_SUM, comm.getMPIComm());
313  return retval;
314  }
320  void setTimer(std::shared_ptr<Timer> timer) const
321  {
322  this->timer = timer;
323  timer->addDomain(id, *this);
324  }
330  std::shared_ptr<Timer> getTimer() const { return timer; }
335  bool hasTimer() const { return timer != nullptr; }
341  int getId() const { return id; }
342 };
343 
344 template<int D>
345 void
346 to_json(tpl::nlohmann::json& j, const Domain<D>& domain)
347 {
348  for (auto pinfo : domain.getPatchInfoVector()) {
349  j.push_back(pinfo);
350  }
351 }
352 
353 extern template class Domain<2>;
354 extern template class Domain<3>;
355 extern template void
356 to_json<2>(tpl::nlohmann::json& j, const Domain<2>& domain);
357 extern template void
358 to_json<3>(tpl::nlohmann::json& j, const Domain<3>& domain);
359 } // namespace ThunderEgg
360 #endif
ThunderEgg::Domain::hasTimer
bool hasTimer() const
Check if the Domain has a timer associated with it.
Definition: Domain.h:335
ThunderEgg::Domain::getTimer
std::shared_ptr< Timer > getTimer() const
Get the Timer object.
Definition: Domain.h:330
ThunderEgg::Communicator::getMPIComm
MPI_Comm getMPIComm() const
Get the raw MPI_Comm object.
ThunderEgg::Domain::getNs
const std::array< int, D > & getNs() const
Get the number of cells in each direction.
Definition: Domain.h:264
ThunderEgg::Domain::getNumLocalCells
int getNumLocalCells() const
Get get the number of local cells.
Definition: Domain.h:280
ThunderEgg::Domain::getNumGlobalCells
int getNumGlobalCells() const
get the number of global cells
Definition: Domain.h:276
ThunderEgg::Domain
Uses a collection of PatchInfo objects to represent the domain of the problem.
Definition: Domain.h:50
ThunderEgg::Domain::getNumCellsInPatch
int getNumCellsInPatch() const
Get the number of cells in a patch.
Definition: Domain.h:291
ThunderEgg::Domain::getNumLocalPatches
int getNumLocalPatches() const
Get the number of local patches.
Definition: Domain.h:272
ThunderEgg::Domain::getCommunicator
const Communicator & getCommunicator() const
Get the Communicator object associated with this domain.
Definition: Domain.h:254
ThunderEgg::Domain::volume
double volume() const
Get the volume of the domain.
Definition: Domain.h:301
Timer.h
Timer class.
ThunderEgg::Communicator::getRank
int getRank() const
Get the rank of this processor.
ThunderEgg
The ThunderEgg namespace.
Definition: BiLinearGhostFiller.h:31
PatchInfo.h
PatchInfo class.
ThunderEgg::Domain::Domain
Domain(Communicator comm, int id, std::array< int, D > ns, int num_ghost_cells, InputIterator first_pinfo, InputIterator last_pinfo)
Construct a new Domain object.
Definition: Domain.h:224
ThunderEgg::Domain::getNumGhostCells
int getNumGhostCells() const
get the number of ghost cell on each side of a patch
Definition: Domain.h:295
ThunderEgg::Domain::getNumGlobalPatches
int getNumGlobalPatches() const
Get the number of global patches.
Definition: Domain.h:268
ThunderEgg::Communicator
wrapper arount MPI_Comm, provides proper copy operators. Classes that have a communicator are meant t...
Definition: Communicator.h:36
ThunderEgg::Domain::getPatchInfoVector
const std::vector< PatchInfo< D > > & getPatchInfoVector() const
Get a vector of PatchInfo pointers where index in the vector corresponds to the patch's local index.
Definition: Domain.h:259
ThunderEgg::Domain::setTimer
void setTimer(std::shared_ptr< Timer > timer) const
Set the Timer object.
Definition: Domain.h:320
ThunderEgg::Domain::getId
int getId() const
Get the domain's id.
Definition: Domain.h:341
ThunderEgg::Domain::getNumLocalCellsWithGhost
int getNumLocalCellsWithGhost() const
Get get the number of local cells (including ghost cells)
Definition: Domain.h:284