| [de061d] | 1 | /*
 | 
|---|
 | 2 |  *    vmg - a versatile multigrid solver
 | 
|---|
 | 3 |  *    Copyright (C) 2012 Institute for Numerical Simulation, University of Bonn
 | 
|---|
 | 4 |  *
 | 
|---|
 | 5 |  *  vmg is free software: you can redistribute it and/or modify
 | 
|---|
 | 6 |  *  it under the terms of the GNU General Public License as published by
 | 
|---|
 | 7 |  *  the Free Software Foundation, either version 3 of the License, or
 | 
|---|
 | 8 |  *  (at your option) any later version.
 | 
|---|
 | 9 |  *
 | 
|---|
 | 10 |  *  vmg is distributed in the hope that it will be useful,
 | 
|---|
 | 11 |  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|---|
 | 12 |  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|---|
 | 13 |  *  GNU General Public License for more details.
 | 
|---|
 | 14 |  *
 | 
|---|
 | 15 |  *  You should have received a copy of the GNU General Public License
 | 
|---|
 | 16 |  *  along with this program.  If not, see <http://www.gnu.org/licenses/>.
 | 
|---|
 | 17 |  */
 | 
|---|
 | 18 | 
 | 
|---|
 | 19 | /**
 | 
|---|
 | 20 |  * @file   comm_mpi.cpp
 | 
|---|
 | 21 |  * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
 | 
|---|
 | 22 |  * @date   Wed Jun 16 13:21:06 2010
 | 
|---|
 | 23 |  *
 | 
|---|
 | 24 |  * @brief  Class for MPI-based communication.
 | 
|---|
 | 25 |  *
 | 
|---|
 | 26 |  */
 | 
|---|
 | 27 | 
 | 
|---|
 | 28 | #ifdef HAVE_CONFIG_H
 | 
|---|
 | 29 | #include <libvmg_config.h>
 | 
|---|
 | 30 | #endif
 | 
|---|
 | 31 | 
 | 
|---|
 | 32 | #ifdef HAVE_MPI
 | 
|---|
 | 33 | 
 | 
|---|
 | 34 | #include <mpi.h>
 | 
|---|
 | 35 | #ifdef HAVE_MARMOT
 | 
|---|
 | 36 | #include <enhancempicalls.h>
 | 
|---|
 | 37 | #include <sourceinfompicalls.h>
 | 
|---|
 | 38 | #endif
 | 
|---|
 | 39 | 
 | 
|---|
 | 40 | #ifdef HAVE_BOOST_FILESYSTEM_PATH_HPP
 | 
|---|
 | 41 | #include <boost/filesystem.hpp>
 | 
|---|
 | 42 | namespace fs = boost::filesystem;
 | 
|---|
 | 43 | #endif
 | 
|---|
 | 44 | 
 | 
|---|
 | 45 | #ifdef HAVE_VTK
 | 
|---|
 | 46 | #include <vtkAbstractArray.h>
 | 
|---|
 | 47 | #include <vtkImageData.h>
 | 
|---|
 | 48 | #include <vtkPointData.h>
 | 
|---|
 | 49 | #include <vtkSmartPointer.h>
 | 
|---|
 | 50 | #include <vtkXMLImageDataWriter.h>
 | 
|---|
 | 51 | #endif
 | 
|---|
 | 52 | 
 | 
|---|
 | 53 | #include <cstring>
 | 
|---|
 | 54 | #include <sstream>
 | 
|---|
| [9f1c84] | 55 | #include <iomanip>
 | 
|---|
| [de061d] | 56 | 
 | 
|---|
 | 57 | #include "base/helper.hpp"
 | 
|---|
 | 58 | #include "base/tuple.hpp"
 | 
|---|
 | 59 | #include "comm/comm_mpi.hpp"
 | 
|---|
 | 60 | #include "comm/mpi/datatypes_local.hpp"
 | 
|---|
 | 61 | #include "discretization/boundary_value_setter.hpp"
 | 
|---|
 | 62 | #include "grid/grid.hpp"
 | 
|---|
 | 63 | #include "grid/multigrid.hpp"
 | 
|---|
 | 64 | #include "grid/tempgrid.hpp"
 | 
|---|
 | 65 | #include "mg.hpp"
 | 
|---|
 | 66 | #include "base/timer.hpp"
 | 
|---|
 | 67 | 
 | 
|---|
 | 68 | static char print_buffer[512];
 | 
|---|
 | 69 | 
 | 
|---|
 | 70 | using namespace VMG;
 | 
|---|
 | 71 | 
 | 
|---|
 | 72 | void CommMPI::IsendAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
 | 
|---|
 | 73 | {
 | 
|---|
 | 74 |   for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
 | 
|---|
 | 75 |     iter->Isend(grid, tag_start, comm, Request());
 | 
|---|
 | 76 | }
 | 
|---|
 | 77 | 
 | 
|---|
 | 78 | void CommMPI::IrecvAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
 | 
|---|
 | 79 | {
 | 
|---|
 | 80 |   for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
 | 
|---|
 | 81 |     iter->Irecv(grid, tag_start, comm, Request());
 | 
|---|
 | 82 | }
 | 
|---|
 | 83 | 
 | 
|---|
 | 84 | void CommMPI::IsendAllBuffered(const Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
 | 
|---|
 | 85 | {
 | 
|---|
 | 86 |   for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
 | 
|---|
 | 87 |     iter->IsendBuffered(grid, tag_start, comm, Request());
 | 
|---|
 | 88 | }
 | 
|---|
 | 89 | 
 | 
|---|
 | 90 | void CommMPI::IrecvAllBuffered(std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
 | 
|---|
 | 91 | {
 | 
|---|
 | 92 |   for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
 | 
|---|
 | 93 |     iter->IrecvBuffered(tag_start, comm, Request());
 | 
|---|
 | 94 | }
 | 
|---|
 | 95 | 
 | 
|---|
 | 96 | void CommMPI::ReplaceBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
 | 
|---|
 | 97 | {
 | 
|---|
 | 98 |   for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
 | 
|---|
 | 99 |     iter->GridReplace(grid);
 | 
|---|
 | 100 | }
 | 
|---|
 | 101 | 
 | 
|---|
 | 102 | void CommMPI::AddBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
 | 
|---|
 | 103 | {
 | 
|---|
 | 104 |   for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
 | 
|---|
 | 105 |     iter->GridSum(grid);
 | 
|---|
 | 106 | }
 | 
|---|
 | 107 | 
 | 
|---|
 | 108 | void CommMPI::CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
 | 
|---|
 | 109 | {
 | 
|---|
 | 110 |   MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
 | 
|---|
 | 111 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 112 |     VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
 | 
|---|
 | 113 |     IrecvAllBuffered(datatypes.Receive(), comm, 0411);
 | 
|---|
 | 114 |     IsendAllBuffered(grid_old, datatypes.Send(), comm, 0411);
 | 
|---|
 | 115 |     WaitAll();
 | 
|---|
 | 116 |     ReplaceBufferAll(grid_new, datatypes.Receive());
 | 
|---|
 | 117 |   }
 | 
|---|
 | 118 | }
 | 
|---|
 | 119 | 
 | 
|---|
 | 120 | void CommMPI::CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
 | 
|---|
 | 121 | {
 | 
|---|
 | 122 |   MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
 | 
|---|
 | 123 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 124 |     VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
 | 
|---|
 | 125 |     IrecvAllBuffered(datatypes.Receive(), comm, 1806);
 | 
|---|
 | 126 |     IsendAllBuffered(grid_old, datatypes.Send(), comm, 1806);
 | 
|---|
 | 127 |     WaitAll();
 | 
|---|
 | 128 |     AddBufferAll(grid_new, datatypes.Receive());
 | 
|---|
 | 129 |   }
 | 
|---|
 | 130 | }
 | 
|---|
 | 131 | 
 | 
|---|
 | 132 | std::vector<BoundaryValue> CommMPI::CommBoundaryValues()
 | 
|---|
 | 133 | {
 | 
|---|
 | 134 |   MPI_Comm comm = settings.CommunicatorLocal((*MG::GetRhs())(MG::GetRhs()->MaxLevel()));
 | 
|---|
 | 135 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 136 | 
 | 
|---|
 | 137 |     const std::vector<BoundaryValue>& bvs = MG::GetBoundaryValueSetter()->BoundaryValues();
 | 
|---|
 | 138 | 
 | 
|---|
 | 139 |     std::vector<vmg_float> val_buffer; val_buffer.reserve(bvs.size());
 | 
|---|
 | 140 | 
 | 
|---|
 | 141 |     int comm_size, comm_rank;
 | 
|---|
 | 142 |     MPI_Comm_rank(comm, &comm_rank);
 | 
|---|
 | 143 |     MPI_Comm_size(comm, &comm_size);
 | 
|---|
 | 144 | 
 | 
|---|
 | 145 |     std::vector< std::vector<BoundaryValue> > bvs_distributed(comm_size);
 | 
|---|
 | 146 | 
 | 
|---|
 | 147 |     for (unsigned int i=0; i<bvs.size(); ++i)
 | 
|---|
 | 148 |       bvs_distributed[settings.BoundaryRanks()[i]].push_back(bvs[i]);
 | 
|---|
 | 149 | 
 | 
|---|
 | 150 |     int bvs_count[comm_size];
 | 
|---|
 | 151 |     for (int i=0; i<comm_size; ++i) {
 | 
|---|
 | 152 |       bvs_count[i] = bvs_distributed[i].size();
 | 
|---|
 | 153 |       for (unsigned int j=0; j<bvs_distributed[i].size(); ++j)
 | 
|---|
 | 154 |         val_buffer.push_back(bvs_distributed[i][j].Val());
 | 
|---|
 | 155 |     }
 | 
|---|
 | 156 | 
 | 
|---|
 | 157 |     MPI_Reduce_scatter(MPI_IN_PLACE, &val_buffer.front(), bvs_count, MPI_DOUBLE, MPI_SUM, comm);
 | 
|---|
 | 158 | 
 | 
|---|
 | 159 |     int begin = 0;
 | 
|---|
 | 160 |     for (int i=0; i<comm_rank; ++i)
 | 
|---|
 | 161 |       begin += bvs_distributed[i].size();
 | 
|---|
 | 162 | 
 | 
|---|
 | 163 |     for (unsigned int i=0; i<bvs_distributed[comm_rank].size(); ++i)
 | 
|---|
 | 164 |       bvs_distributed[comm_rank][i].Val() = val_buffer[begin+i];
 | 
|---|
 | 165 | 
 | 
|---|
 | 166 |     return bvs_distributed[comm_rank];
 | 
|---|
 | 167 |   }
 | 
|---|
 | 168 | 
 | 
|---|
 | 169 |   return std::vector<BoundaryValue>();
 | 
|---|
 | 170 | }
 | 
|---|
 | 171 | 
 | 
|---|
 | 172 | void CommMPI::CommToGhosts(Grid& grid)
 | 
|---|
 | 173 | {
 | 
|---|
 | 174 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 175 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 176 |     VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
 | 
|---|
 | 177 |     IrecvAllBuffered(types.Halo(), comm, 2310);
 | 
|---|
 | 178 |     IsendAllBuffered(grid, types.NB(), comm, 2310);
 | 
|---|
 | 179 |     WaitAll();
 | 
|---|
 | 180 |     ReplaceBufferAll(grid, types.Halo());
 | 
|---|
 | 181 |   }
 | 
|---|
 | 182 | }
 | 
|---|
 | 183 | 
 | 
|---|
 | 184 | void CommMPI::CommToGhostsAsyncStart(Grid& grid)
 | 
|---|
 | 185 | {
 | 
|---|
 | 186 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 187 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 188 |     VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
 | 
|---|
 | 189 |     IrecvAllBuffered(types.Halo(), comm, 2412);
 | 
|---|
 | 190 |     IsendAllBuffered(grid, types.NB(), comm, 2412);
 | 
|---|
 | 191 |     TestAll();
 | 
|---|
 | 192 |   }
 | 
|---|
 | 193 | }
 | 
|---|
 | 194 | 
 | 
|---|
 | 195 | void CommMPI::CommToGhostsAsyncFinish(Grid& grid)
 | 
|---|
 | 196 | {
 | 
|---|
 | 197 |   WaitAll();
 | 
|---|
 | 198 |   ReplaceBufferAll(grid, settings.DatatypesLocal(grid).Halo());
 | 
|---|
 | 199 | }
 | 
|---|
 | 200 | 
 | 
|---|
 | 201 | void CommMPI::CommFromGhosts(Grid& grid)
 | 
|---|
 | 202 | {
 | 
|---|
 | 203 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 204 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 205 |     VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
 | 
|---|
 | 206 |     IrecvAllBuffered(types.NB(), comm, 1337);
 | 
|---|
 | 207 |     IsendAllBuffered(grid, types.Halo(), comm, 1337);
 | 
|---|
 | 208 |     WaitAll();
 | 
|---|
 | 209 |     AddBufferAll(grid, types.NB());
 | 
|---|
 | 210 |   }
 | 
|---|
 | 211 | }
 | 
|---|
 | 212 | 
 | 
|---|
 | 213 | void CommMPI::CommFromGhostsAsyncStart(Grid& grid)
 | 
|---|
 | 214 | {
 | 
|---|
 | 215 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 216 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 217 |     VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
 | 
|---|
 | 218 |     IrecvAllBuffered(types.NB(), comm, 0xc0ffee);
 | 
|---|
 | 219 |     IsendAllBuffered(grid, types.Halo(), comm, 0xc0ffee);
 | 
|---|
 | 220 |     TestAll();
 | 
|---|
 | 221 |   }
 | 
|---|
 | 222 | }
 | 
|---|
 | 223 | 
 | 
|---|
 | 224 | void CommMPI::CommFromGhostsAsyncFinish(Grid& grid)
 | 
|---|
 | 225 | {
 | 
|---|
 | 226 |   WaitAll();
 | 
|---|
 | 227 |   AddBufferAll(grid, settings.DatatypesLocal(grid).NB());
 | 
|---|
 | 228 | }
 | 
|---|
 | 229 | 
 | 
|---|
 | 230 | void CommMPI::Barrier()
 | 
|---|
 | 231 | {
 | 
|---|
 | 232 |   MPI_Barrier(comm_global);
 | 
|---|
 | 233 | }
 | 
|---|
 | 234 | 
 | 
|---|
 | 235 | vmg_float CommMPI::GlobalSum(vmg_float value)
 | 
|---|
 | 236 | {
 | 
|---|
 | 237 |   vmg_float result = 0;
 | 
|---|
 | 238 |   MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, comm_global);
 | 
|---|
 | 239 |   return result;
 | 
|---|
 | 240 | }
 | 
|---|
 | 241 | 
 | 
|---|
 | 242 | vmg_float CommMPI::GlobalSumRoot(vmg_float value)
 | 
|---|
 | 243 | {
 | 
|---|
 | 244 |   vmg_float result = 0;
 | 
|---|
 | 245 |   MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, 0, comm_global);
 | 
|---|
 | 246 |   return result;
 | 
|---|
 | 247 | }
 | 
|---|
 | 248 | 
 | 
|---|
 | 249 | void CommMPI::GlobalSumArray(vmg_float* array, const vmg_int& size)
 | 
|---|
 | 250 | {
 | 
|---|
 | 251 |   MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm_global);
 | 
|---|
 | 252 | }
 | 
|---|
 | 253 | 
 | 
|---|
 | 254 | void CommMPI::GlobalBroadcast(vmg_float& value)
 | 
|---|
 | 255 | {
 | 
|---|
 | 256 |   MPI_Bcast(&value, 1, MPI_DOUBLE, 0, comm_global);
 | 
|---|
 | 257 | }
 | 
|---|
 | 258 | 
 | 
|---|
 | 259 | void CommMPI::GlobalGather(vmg_float& value, vmg_float* array)
 | 
|---|
 | 260 | {
 | 
|---|
 | 261 |   MPI_Gather(&value, 1, MPI_DOUBLE, array, 1, MPI_DOUBLE, 0, comm_global);
 | 
|---|
 | 262 | }
 | 
|---|
 | 263 | 
 | 
|---|
 | 264 | vmg_int CommMPI::GlobalSum(vmg_int value)
 | 
|---|
 | 265 | {
 | 
|---|
 | 266 |   vmg_int result = 0;
 | 
|---|
 | 267 |   MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_SUM, comm_global);
 | 
|---|
 | 268 |   return result;
 | 
|---|
 | 269 | }
 | 
|---|
 | 270 | 
 | 
|---|
 | 271 | vmg_int CommMPI::GlobalSumRoot(vmg_int value)
 | 
|---|
 | 272 | {
 | 
|---|
 | 273 |   vmg_int result = 0;
 | 
|---|
 | 274 |   MPI_Reduce(&value, &result, 1, MPI_INT, MPI_SUM, 0, comm_global);
 | 
|---|
 | 275 |   return result;
 | 
|---|
 | 276 | }
 | 
|---|
 | 277 | 
 | 
|---|
 | 278 | void CommMPI::GlobalSumArray(vmg_int* array, const vmg_int& size)
 | 
|---|
 | 279 | {
 | 
|---|
 | 280 |   MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm_global);
 | 
|---|
 | 281 | }
 | 
|---|
 | 282 | 
 | 
|---|
 | 283 | vmg_float CommMPI::GlobalMax(vmg_float value)
 | 
|---|
 | 284 | {
 | 
|---|
 | 285 |   vmg_float result = 0.0;
 | 
|---|
 | 286 |   MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_MAX, comm_global);
 | 
|---|
 | 287 |   return result;
 | 
|---|
 | 288 | }
 | 
|---|
 | 289 | 
 | 
|---|
 | 290 | vmg_float CommMPI::GlobalMaxRoot(vmg_float value)
 | 
|---|
 | 291 | {
 | 
|---|
 | 292 |   vmg_float result = 0.0;
 | 
|---|
 | 293 |   MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_MAX, 0, comm_global);
 | 
|---|
 | 294 |   return result;
 | 
|---|
 | 295 | }
 | 
|---|
 | 296 | 
 | 
|---|
 | 297 | void CommMPI::GlobalMaxArray(vmg_float* array, const vmg_int& size)
 | 
|---|
 | 298 | {
 | 
|---|
 | 299 |   MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_MAX, comm_global);
 | 
|---|
 | 300 | }
 | 
|---|
 | 301 | 
 | 
|---|
 | 302 | vmg_int CommMPI::GlobalMax(vmg_int value)
 | 
|---|
 | 303 | {
 | 
|---|
 | 304 |   vmg_int result = 0;
 | 
|---|
 | 305 |   MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_MAX, comm_global);
 | 
|---|
 | 306 |   return result;
 | 
|---|
 | 307 | }
 | 
|---|
 | 308 | 
 | 
|---|
 | 309 | vmg_int CommMPI::GlobalMaxRoot(vmg_int value)
 | 
|---|
 | 310 | {
 | 
|---|
 | 311 |   vmg_int result = 0;
 | 
|---|
 | 312 |   MPI_Reduce(&value, &result, 1, MPI_INT, MPI_MAX, 0, comm_global);
 | 
|---|
 | 313 |   return result;
 | 
|---|
 | 314 | }
 | 
|---|
 | 315 | 
 | 
|---|
 | 316 | void CommMPI::GlobalMaxArray(vmg_int* array, const vmg_int& size)
 | 
|---|
 | 317 | {
 | 
|---|
 | 318 |   MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_MAX, comm_global);
 | 
|---|
 | 319 | }
 | 
|---|
 | 320 | 
 | 
|---|
 | 321 | void CommMPI::GlobalBroadcast(vmg_int& value)
 | 
|---|
 | 322 | {
 | 
|---|
 | 323 |   MPI_Bcast(&value, 1, MPI_INT, 0, comm_global);
 | 
|---|
 | 324 | }
 | 
|---|
 | 325 | 
 | 
|---|
 | 326 | void CommMPI::GlobalGather(vmg_int& value, vmg_int* array)
 | 
|---|
 | 327 | {
 | 
|---|
 | 328 |   MPI_Gather(&value, 1, MPI_INT, array, 1, MPI_INT, 0, comm_global);
 | 
|---|
 | 329 | }
 | 
|---|
 | 330 | 
 | 
|---|
 | 331 | void CommMPI::GlobalBroadcast(char* str)
 | 
|---|
 | 332 | {
 | 
|---|
 | 333 |   int size = std::strlen(str) + 1;
 | 
|---|
 | 334 |   MPI_Bcast(&size, 1, MPI_INT, 0, comm_global);
 | 
|---|
 | 335 |   MPI_Bcast(str, size, MPI_CHAR, 0, comm_global);
 | 
|---|
 | 336 | }
 | 
|---|
 | 337 | 
 | 
|---|
 | 338 | vmg_float CommMPI::LevelSum(const Grid& grid, vmg_float value)
 | 
|---|
 | 339 | {
 | 
|---|
 | 340 |   vmg_float result = 0.0;
 | 
|---|
 | 341 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 342 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 343 |   MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, comm);
 | 
|---|
 | 344 |   return result;
 | 
|---|
 | 345 | }
 | 
|---|
 | 346 | 
 | 
|---|
 | 347 | vmg_float CommMPI::LevelSumRoot(const Grid& grid, vmg_float value)
 | 
|---|
 | 348 | {
 | 
|---|
 | 349 |   vmg_float result = 0.0;
 | 
|---|
 | 350 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 351 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 352 |   MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, 0, comm);
 | 
|---|
 | 353 |   return result;
 | 
|---|
 | 354 | }
 | 
|---|
 | 355 | 
 | 
|---|
 | 356 | void CommMPI::LevelSumArray(const Grid& grid, vmg_float* array, const vmg_int& size)
 | 
|---|
 | 357 | {
 | 
|---|
 | 358 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 359 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 360 |   MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm);
 | 
|---|
 | 361 | }
 | 
|---|
 | 362 | 
 | 
|---|
 | 363 | vmg_int CommMPI::LevelSum(const Grid& grid, vmg_int value)
 | 
|---|
 | 364 | {
 | 
|---|
 | 365 |   vmg_int result = 0;
 | 
|---|
 | 366 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 367 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 368 |   MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_SUM, comm);
 | 
|---|
 | 369 |   return result;
 | 
|---|
 | 370 | }
 | 
|---|
 | 371 | 
 | 
|---|
 | 372 | vmg_int CommMPI::LevelSumRoot(const Grid& grid, vmg_int value)
 | 
|---|
 | 373 | {
 | 
|---|
 | 374 |   vmg_int result = 0;
 | 
|---|
 | 375 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 376 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 377 |   MPI_Reduce(&value, &result, 1, MPI_INT, MPI_SUM, 0, comm);
 | 
|---|
 | 378 |   return result;
 | 
|---|
 | 379 | }
 | 
|---|
 | 380 | 
 | 
|---|
 | 381 | void CommMPI::LevelSumArray(const Grid& grid, vmg_int* array, const vmg_int& size)
 | 
|---|
 | 382 | {
 | 
|---|
 | 383 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 384 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 385 |   MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm);
 | 
|---|
 | 386 | }
 | 
|---|
 | 387 | 
 | 
|---|
 | 388 | //TODO: Variable length buffer
 | 
|---|
 | 389 | void CommMPI::Print(const OutputLevel level, const char* format, ...)
 | 
|---|
 | 390 | {
 | 
|---|
 | 391 |   bool print = (level == Output);
 | 
|---|
 | 392 | 
 | 
|---|
 | 393 | #ifdef OUTPUT_INFO
 | 
|---|
 | 394 |   print |= (level == Info);
 | 
|---|
 | 395 | #endif
 | 
|---|
 | 396 | #ifdef OUTPUT_DEBUG
 | 
|---|
 | 397 |   print |= (level == Debug);
 | 
|---|
 | 398 | #endif
 | 
|---|
 | 399 | #ifdef OUTPUT_TIMING
 | 
|---|
 | 400 |   print |= (level == Timing);
 | 
|---|
 | 401 | #endif
 | 
|---|
 | 402 | 
 | 
|---|
 | 403 |   if (print) {
 | 
|---|
 | 404 |     va_list args;
 | 
|---|
 | 405 |     va_start(args, format);
 | 
|---|
 | 406 |     vsprintf(print_buffer, format, args);
 | 
|---|
 | 407 |     printf("vmg: Rank %d: %s\n", GlobalRank(), print_buffer);
 | 
|---|
 | 408 |     va_end(args);
 | 
|---|
 | 409 |   }
 | 
|---|
 | 410 | }
 | 
|---|
 | 411 | 
 | 
|---|
 | 412 | void CommMPI::PrintOnce(const OutputLevel level, const char* format, ...)
 | 
|---|
 | 413 | {
 | 
|---|
 | 414 |   bool print = (level == Output);
 | 
|---|
 | 415 | 
 | 
|---|
 | 416 | #ifdef OUTPUT_INFO
 | 
|---|
 | 417 |   print |= (level == Info);
 | 
|---|
 | 418 | #endif
 | 
|---|
 | 419 | #ifdef OUTPUT_DEBUG
 | 
|---|
 | 420 |   print |= (level == Debug);
 | 
|---|
 | 421 | #endif
 | 
|---|
 | 422 | #ifdef OUTPUT_TIMING
 | 
|---|
 | 423 |   print |= (level == Timing);
 | 
|---|
 | 424 | #endif
 | 
|---|
 | 425 | 
 | 
|---|
 | 426 |   if (GlobalRank() == 0 && print) {
 | 
|---|
 | 427 |     va_list args;
 | 
|---|
 | 428 |     va_start(args, format);
 | 
|---|
 | 429 |     vsprintf(print_buffer, format, args);
 | 
|---|
 | 430 |     printf("vmg: Rank %d: %s\n", GlobalRank(), print_buffer);
 | 
|---|
 | 431 |     va_end(args);
 | 
|---|
 | 432 |   }
 | 
|---|
 | 433 | }
 | 
|---|
 | 434 | 
 | 
|---|
 | 435 | void CommMPI::PrintXML(const std::string& filename, const std::string& xml_data)
 | 
|---|
 | 436 | {
 | 
|---|
 | 437 |   MPI_File file;
 | 
|---|
 | 438 |   std::stringstream path, xml_header;
 | 
|---|
 | 439 | 
 | 
|---|
 | 440 |   pugi::xml_document doc;
 | 
|---|
 | 441 |   pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
 | 
|---|
 | 442 |   node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
 | 
|---|
 | 443 |   doc.save(xml_header);
 | 
|---|
 | 444 | 
 | 
|---|
 | 445 |   path << OutputPath() << filename;
 | 
|---|
 | 446 | 
 | 
|---|
 | 447 |   char* filename_array = Helper::GetCharArray(path.str());
 | 
|---|
 | 448 |   char* xml_header_array = Helper::GetCharArray(xml_header.str());
 | 
|---|
 | 449 |   char* str_array = Helper::GetCharArray(xml_data);
 | 
|---|
 | 450 | 
 | 
|---|
 | 451 |   MPI_File_open(MPI_COMM_SELF, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
 | 
|---|
 | 452 |   MPI_File_set_size(file, 0);
 | 
|---|
 | 453 |   MPI_File_write(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 454 |   MPI_File_write(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 455 |   MPI_File_close(&file);
 | 
|---|
 | 456 | 
 | 
|---|
 | 457 |   delete [] filename_array;
 | 
|---|
 | 458 |   delete [] xml_header_array;
 | 
|---|
 | 459 |   delete [] str_array;
 | 
|---|
 | 460 | }
 | 
|---|
 | 461 | 
 | 
|---|
 | 462 | void CommMPI::PrintXMLAll(const std::string& filename, const std::string& xml_data)
 | 
|---|
 | 463 | {
 | 
|---|
 | 464 |   MPI_File file;
 | 
|---|
 | 465 |   std::stringstream path;
 | 
|---|
 | 466 | 
 | 
|---|
 | 467 |   path << OutputPath() << filename;
 | 
|---|
 | 468 | 
 | 
|---|
 | 469 |   char* filename_array = Helper::GetCharArray(path.str());
 | 
|---|
 | 470 |   char* str_array = Helper::GetCharArray(xml_data);
 | 
|---|
 | 471 | 
 | 
|---|
 | 472 |   MPI_File_open(comm_global, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
 | 
|---|
 | 473 |   MPI_File_set_size(file, 0);
 | 
|---|
 | 474 | 
 | 
|---|
 | 475 |   if (GlobalRank() == 0) {
 | 
|---|
 | 476 |     std::stringstream xml_header;
 | 
|---|
 | 477 |     pugi::xml_document doc;
 | 
|---|
 | 478 |     pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
 | 
|---|
 | 479 |     node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
 | 
|---|
 | 480 |     doc.save(xml_header);
 | 
|---|
 | 481 | 
 | 
|---|
 | 482 |     char* xml_header_array = Helper::GetCharArray(xml_header.str());
 | 
|---|
 | 483 | 
 | 
|---|
 | 484 |     MPI_File_write_shared(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 485 | 
 | 
|---|
 | 486 |     delete [] xml_header_array;
 | 
|---|
 | 487 |   }
 | 
|---|
 | 488 | 
 | 
|---|
 | 489 |   MPI_File_write_ordered(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 490 |   MPI_File_close(&file);
 | 
|---|
 | 491 | 
 | 
|---|
 | 492 |   delete [] filename_array;
 | 
|---|
 | 493 |   delete [] str_array;
 | 
|---|
 | 494 | }
 | 
|---|
 | 495 | 
 | 
|---|
 | 496 | void CommMPI::PrintGridInformation(const Grid& grid, char* filename, const std::string& name)
 | 
|---|
 | 497 | {
 | 
|---|
 | 498 |   std::stringstream buf;
 | 
|---|
 | 499 |   MPI_File file;
 | 
|---|
 | 500 |   int rank, size;
 | 
|---|
 | 501 |   int size_local, size_local_max;
 | 
|---|
 | 502 | 
 | 
|---|
 | 503 |   MPI_Comm comm = settings.CommunicatorGlobal(grid);
 | 
|---|
 | 504 |   MPI_Comm comm_local = settings.CommunicatorLocal(grid);
 | 
|---|
 | 505 | 
 | 
|---|
 | 506 |   if (comm_local != MPI_COMM_NULL)
 | 
|---|
 | 507 |     MPI_Comm_size(comm_local, &size_local);
 | 
|---|
 | 508 |   else
 | 
|---|
 | 509 |     size_local = 0;
 | 
|---|
 | 510 | 
 | 
|---|
 | 511 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 512 | 
 | 
|---|
 | 513 |     MPI_Reduce(&size_local, &size_local_max, 1, MPI_INT, MPI_MAX, 0, comm);
 | 
|---|
 | 514 | 
 | 
|---|
 | 515 |     MPI_File_open(comm, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
 | 
|---|
 | 516 | 
 | 
|---|
 | 517 |     MPI_Comm_rank(comm, &rank);
 | 
|---|
 | 518 |     MPI_Comm_size(comm, &size);
 | 
|---|
 | 519 | 
 | 
|---|
 | 520 |     if (rank == 0) {
 | 
|---|
 | 521 | 
 | 
|---|
 | 522 |       buf << "###########################################################" << std::endl
 | 
|---|
 | 523 |           << "GLOBAL INFORMATION:" << std::endl
 | 
|---|
 | 524 |           << "  NAME:                 " << name << std::endl
 | 
|---|
 | 525 |           << "  LEVEL:                " << grid.Level() << std::endl
 | 
|---|
 | 526 |           << "  COMM_SIZE_GLOBAL:     " << size << std::endl
 | 
|---|
 | 527 |           << "  COMM_SIZE_LOCAL:      " << size_local_max << std::endl
 | 
|---|
 | 528 |           << "  GLOBAL:" << std::endl
 | 
|---|
 | 529 |           << "    GLOBAL_BEGIN:            " << grid.Global().GlobalBegin() << std::endl
 | 
|---|
 | 530 |           << "    GLOBAL_END:              " << grid.Global().GlobalEnd() << std::endl
 | 
|---|
 | 531 |           << "    GLOBAL_SIZE:             " << grid.Global().GlobalSize() << std::endl
 | 
|---|
 | 532 |           << "    GLOBAL_BEGIN_FINEST:     " << grid.Global().GlobalBeginFinest() << std::endl
 | 
|---|
 | 533 |           << "    GLOBAL_END_FINEST:       " << grid.Global().GlobalEndFinest() << std::endl
 | 
|---|
 | 534 |           << "    GLOBAL_SIZE_FINEST:      " << grid.Global().GlobalSizeFinest() << std::endl
 | 
|---|
 | 535 |           << "  EXTENT:" << std::endl
 | 
|---|
 | 536 |           << "    BEGIN:              " << grid.Extent().Begin() << std::endl
 | 
|---|
 | 537 |           << "    END:                " << grid.Extent().End() << std::endl
 | 
|---|
 | 538 |           << "    SIZE:               " << grid.Extent().Size() << std::endl
 | 
|---|
 | 539 |           << "    MESH_WIDTH:         " << grid.Extent().MeshWidth() << std::endl
 | 
|---|
 | 540 |           << std::endl
 | 
|---|
 | 541 |           << "LOCAL INFORMATION:" << std::endl;
 | 
|---|
 | 542 |     }
 | 
|---|
 | 543 | 
 | 
|---|
 | 544 |     buf << "RANK " << rank << ":" << std::endl
 | 
|---|
 | 545 |         << "  GLOBAL:" << std::endl
 | 
|---|
 | 546 |         << "    LOCAL_BEGIN:          " << grid.Global().LocalBegin() << std::endl
 | 
|---|
 | 547 |         << "    LOCAL_END:            " << grid.Global().LocalEnd() << std::endl
 | 
|---|
 | 548 |         << "    LOCAL_SIZE:           " << grid.Global().LocalSize() << std::endl
 | 
|---|
 | 549 |         << "    BOUNDARY_TYPE:        " << grid.Global().BoundaryType() << std::endl
 | 
|---|
 | 550 |         << "  LOCAL:" << std::endl
 | 
|---|
 | 551 |         << "    BEGIN:                " << grid.Local().Begin() << std::endl
 | 
|---|
 | 552 |         << "    END:                  " << grid.Local().End() << std::endl
 | 
|---|
 | 553 |         << "    SIZE:                 " << grid.Local().Size() << std::endl
 | 
|---|
 | 554 |         << "    SIZE_TOTAL:           " << grid.Local().SizeTotal() << std::endl
 | 
|---|
 | 555 |         << "    HALO_BEGIN_1:         " << grid.Local().HaloBegin1() << std::endl
 | 
|---|
 | 556 |         << "    HALO_END_1:           " << grid.Local().HaloEnd1() << std::endl
 | 
|---|
 | 557 |         << "    HALO_SIZE_1:          " << grid.Local().HaloSize1() << std::endl
 | 
|---|
 | 558 |         << "    HALO_BEGIN_2:         " << grid.Local().HaloBegin2() << std::endl
 | 
|---|
 | 559 |         << "    HALO_END_2:           " << grid.Local().HaloEnd2() << std::endl
 | 
|---|
 | 560 |         << "    HALO_SIZE_2:          " << grid.Local().HaloSize2() << std::endl
 | 
|---|
 | 561 |         << "    BOUNDARY_BEGIN_1:     " << grid.Local().BoundaryBegin1() << std::endl
 | 
|---|
 | 562 |         << "    BOUNDARY_END_1:       " << grid.Local().BoundaryEnd1() << std::endl
 | 
|---|
 | 563 |         << "    BOUNDARY_SIZE_1:      " << grid.Local().BoundarySize1() << std::endl
 | 
|---|
 | 564 |         << "    BOUNDARY_BEGIN_2:     " << grid.Local().BoundaryBegin2() << std::endl
 | 
|---|
 | 565 |         << "    BOUNDARY_END_2:       " << grid.Local().BoundaryEnd2() << std::endl
 | 
|---|
 | 566 |         << "    BOUNDARY_SIZE_2:      " << grid.Local().BoundarySize2() << std::endl;
 | 
|---|
 | 567 | 
 | 
|---|
 | 568 |     if (rank == size-1)
 | 
|---|
 | 569 |       buf << "###########################################################" << std::endl;
 | 
|---|
 | 570 | 
 | 
|---|
 | 571 |     char* char_buf = Helper::GetCharArray(buf.str());
 | 
|---|
 | 572 |     MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 573 |     delete [] char_buf;
 | 
|---|
 | 574 | 
 | 
|---|
 | 575 |     MPI_File_close(&file);
 | 
|---|
 | 576 | 
 | 
|---|
 | 577 |   }
 | 
|---|
 | 578 | }
 | 
|---|
 | 579 | 
 | 
|---|
 | 580 | void CommMPI::PrintDatatypes(char* filename)
 | 
|---|
 | 581 | {
 | 
|---|
 | 582 |   std::stringstream buf;
 | 
|---|
 | 583 |   MPI_File file;
 | 
|---|
 | 584 | 
 | 
|---|
 | 585 |   if (comm_global != MPI_COMM_NULL) {
 | 
|---|
 | 586 | 
 | 
|---|
 | 587 |     MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
 | 
|---|
 | 588 | 
 | 
|---|
 | 589 |     buf << settings << std::endl;
 | 
|---|
 | 590 | 
 | 
|---|
 | 591 |     char* char_buf = Helper::GetCharArray(buf.str());
 | 
|---|
 | 592 |     MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 593 |     delete [] char_buf;
 | 
|---|
 | 594 | 
 | 
|---|
 | 595 |     MPI_File_close(&file);
 | 
|---|
 | 596 | 
 | 
|---|
 | 597 |   }
 | 
|---|
 | 598 | }
 | 
|---|
 | 599 | 
 | 
|---|
 | 600 | void CommMPI::PrintAllSettings()
 | 
|---|
 | 601 | {
 | 
|---|
 | 602 |   std::stringstream buf;
 | 
|---|
 | 603 |   MPI_File file;
 | 
|---|
 | 604 | 
 | 
|---|
 | 605 |   const Multigrid& mg = *MG::GetSol();
 | 
|---|
 | 606 | 
 | 
|---|
 | 607 |   buf << OutputPath() << "settings.txt";
 | 
|---|
 | 608 |   char *filename = Helper::GetCharArray(buf.str());
 | 
|---|
 | 609 | 
 | 
|---|
 | 610 |   MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
 | 
|---|
 | 611 |   MPI_File_set_size(file, 0);
 | 
|---|
 | 612 |   MPI_File_close(&file);
 | 
|---|
 | 613 | 
 | 
|---|
 | 614 |   for (int i=mg.MinLevel(); i<=mg.MaxLevel(); ++i)
 | 
|---|
 | 615 |     PrintGridInformation(mg(i), filename, "MULTIGRID");
 | 
|---|
 | 616 | 
 | 
|---|
 | 617 |   for (int i=mg.MinLevel()+1; i<=mg.MaxLevel(); ++i)
 | 
|---|
 | 618 |     PrintGridInformation(settings.CoarserGrid(mg(i)), filename, "COARSER_GRID");
 | 
|---|
 | 619 | 
 | 
|---|
 | 620 |   for (int i=mg.MinLevel(); i<mg.MaxLevel(); ++i)
 | 
|---|
 | 621 |     PrintGridInformation(settings.FinerGrid(mg(i)), filename, "FINER_GRID");
 | 
|---|
 | 622 | 
 | 
|---|
 | 623 |   PrintDatatypes(filename);
 | 
|---|
 | 624 | 
 | 
|---|
 | 625 |   delete [] filename;
 | 
|---|
 | 626 | 
 | 
|---|
 | 627 | }
 | 
|---|
 | 628 | 
 | 
|---|
 | 629 | void CommMPI::PrintGrid(Grid& grid, const char* information)
 | 
|---|
 | 630 | {
 | 
|---|
 | 631 |   int output_count = OutputCount();
 | 
|---|
 | 632 | 
 | 
|---|
 | 633 | #ifdef HAVE_VTK
 | 
|---|
 | 634 | 
 | 
|---|
 | 635 |   if (settings.CommunicatorLocal(grid) != MPI_COMM_NULL) {
 | 
|---|
 | 636 | 
 | 
|---|
 | 637 |     vtkSmartPointer<vtkImageData> image = vtkSmartPointer<vtkImageData>::New();
 | 
|---|
 | 638 |     image->SetExtent(grid.Global().LocalBegin().X(), grid.Global().LocalEnd().X()-1,
 | 
|---|
 | 639 |                      grid.Global().LocalBegin().Y(), grid.Global().LocalEnd().Y()-1,
 | 
|---|
 | 640 |                      grid.Global().LocalBegin().Z(), grid.Global().LocalEnd().Z()-1);
 | 
|---|
 | 641 | 
 | 
|---|
 | 642 |     image->SetSpacing(grid.Extent().MeshWidth().vec());
 | 
|---|
 | 643 |     image->SetOrigin(grid.Extent().Begin().vec());
 | 
|---|
 | 644 |     image->SetScalarTypeToDouble();
 | 
|---|
 | 645 |     image->SetNumberOfScalarComponents(1);
 | 
|---|
 | 646 |     image->AllocateScalars();
 | 
|---|
 | 647 |     image->GetPointData()->GetScalars()->SetName(information);
 | 
|---|
 | 648 | 
 | 
|---|
 | 649 |     Index i;
 | 
|---|
 | 650 |     Index begin, end, offset;
 | 
|---|
 | 651 | 
 | 
|---|
 | 652 |     for (int j=0; j<3; ++j) {
 | 
|---|
 | 653 |       if (grid.Local().BoundarySize1()[j] > 0) {
 | 
|---|
 | 654 |         begin[j] = grid.Local().BoundaryBegin1()[j];
 | 
|---|
 | 655 |         offset[j] = grid.Global().LocalBegin()[j];
 | 
|---|
 | 656 |       } else {
 | 
|---|
 | 657 |         begin[j] = grid.Local().Begin()[j];
 | 
|---|
 | 658 |         offset[j] =  grid.Global().LocalBegin()[j] - grid.Local().Begin()[j];
 | 
|---|
 | 659 |       }
 | 
|---|
 | 660 |       end[j] = (grid.Local().BoundarySize2()[j] > 0 ? grid.Local().BoundaryEnd2()[j] : grid.Local().End()[j]);
 | 
|---|
 | 661 |     }
 | 
|---|
 | 662 | 
 | 
|---|
 | 663 |     for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
 | 
|---|
 | 664 |       for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
 | 
|---|
 | 665 |         for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
 | 
|---|
 | 666 |           image->SetScalarComponentFromDouble(i.X() + offset.X(),
 | 
|---|
 | 667 |                                               i.Y() + offset.Y(),
 | 
|---|
 | 668 |                                               i.Z() + offset.Z(),
 | 
|---|
 | 669 |                                               0, grid.GetVal(i));
 | 
|---|
 | 670 | 
 | 
|---|
 | 671 |     image->Update();
 | 
|---|
 | 672 | 
 | 
|---|
 | 673 |     int rank, size;
 | 
|---|
 | 674 |     MPI_Comm_rank(comm_global, &rank);
 | 
|---|
 | 675 |     MPI_Comm_size(comm_global, &size);
 | 
|---|
 | 676 | 
 | 
|---|
 | 677 |     std::stringstream filename;
 | 
|---|
 | 678 |     filename << OutputPath() << std::setw(4) << std::setfill('0') << output_count << "_" << rank << ".vti";
 | 
|---|
 | 679 | 
 | 
|---|
 | 680 |     vtkSmartPointer<vtkXMLImageDataWriter> writer = vtkSmartPointer<vtkXMLImageDataWriter>::New();
 | 
|---|
 | 681 |     writer->SetFileName(filename.str().c_str());
 | 
|---|
 | 682 |     writer->SetCompressorTypeToNone();
 | 
|---|
 | 683 |     writer->SetDataModeToAscii();
 | 
|---|
 | 684 |     writer->SetInput(image);
 | 
|---|
 | 685 |     writer->Update();
 | 
|---|
 | 686 |     writer->Write();
 | 
|---|
 | 687 | 
 | 
|---|
 | 688 |   }
 | 
|---|
 | 689 | 
 | 
|---|
 | 690 | //FIXME
 | 
|---|
 | 691 | #else /* HAVE_VTK */
 | 
|---|
 | 692 |   Index i;
 | 
|---|
 | 693 |   std::stringstream buf;
 | 
|---|
 | 694 | 
 | 
|---|
 | 695 |   Index begin, end;
 | 
|---|
 | 696 |   Index begin_local, end_local, begin_global, end_global;
 | 
|---|
 | 697 | 
 | 
|---|
 | 698 |   CommToGhosts(grid);
 | 
|---|
 | 699 | 
 | 
|---|
 | 700 |   for (int i=0; i<3; ++i) {
 | 
|---|
 | 701 |     end[i] = grid.Local().End()[i] + (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 0 : grid.Local().HaloSize1()[i]);
 | 
|---|
 | 702 |     end_local[i] = grid.Global().LocalEnd()[i] - (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 1 : 0);
 | 
|---|
 | 703 |   }
 | 
|---|
 | 704 | 
 | 
|---|
 | 705 |   begin = grid.Local().Begin();
 | 
|---|
 | 706 |   begin_local = grid.Global().LocalBegin();
 | 
|---|
 | 707 |   begin_global = 0;
 | 
|---|
 | 708 |   end_global = grid.Global().GlobalSize()-1;
 | 
|---|
 | 709 | 
 | 
|---|
 | 710 |   for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
 | 
|---|
 | 711 |     for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
 | 
|---|
 | 712 |       for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
 | 
|---|
 | 713 |         buf << std::scientific << grid.GetVal(i) << " ";
 | 
|---|
 | 714 | 
 | 
|---|
 | 715 |   CreateOutputFiles(grid, buf, information,
 | 
|---|
 | 716 |       begin_global, end_global,
 | 
|---|
 | 717 |       begin_local, end_local,
 | 
|---|
 | 718 |       output_count);
 | 
|---|
 | 719 | #endif /* HAVE_VTK */
 | 
|---|
 | 720 | }
 | 
|---|
 | 721 | 
 | 
|---|
 | 722 | void CommMPI::PrintDefect(Grid& sol, Grid& rhs, const char* information)
 | 
|---|
 | 723 | {
 | 
|---|
 | 724 |   TempGrid *temp = MG::GetTempGrid();
 | 
|---|
 | 725 |   temp->SetProperties(sol);
 | 
|---|
 | 726 |   temp->ImportFromResidual(sol, rhs);
 | 
|---|
 | 727 |   PrintGrid(*temp, information);
 | 
|---|
 | 728 | }
 | 
|---|
 | 729 | 
 | 
|---|
 | 730 | void CommMPI::CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
 | 
|---|
 | 731 |     const Index& begin_global, const Index& end_global,
 | 
|---|
 | 732 |     const Index& begin_local, const Index& end_local,
 | 
|---|
 | 733 |     const int& output_count)
 | 
|---|
 | 734 | {
 | 
|---|
 | 735 |   MPI_Comm comm = settings.CommunicatorGlobal(grid);
 | 
|---|
 | 736 | 
 | 
|---|
 | 737 |   if (comm != MPI_COMM_NULL) {
 | 
|---|
 | 738 | 
 | 
|---|
 | 739 |     std::string conv_information = Helper::ReplaceWhitespaces(information, "_");
 | 
|---|
 | 740 | 
 | 
|---|
 | 741 |     CreateParallelOutputFile(grid, comm, output_count, conv_information.c_str(),
 | 
|---|
 | 742 |         begin_global, end_global, begin_local, end_local);
 | 
|---|
 | 743 | 
 | 
|---|
| [9f1c84] | 744 |     MPI_File *file = CreateSerialOutputFile(grid, comm, output_count, conv_information.c_str(),
 | 
|---|
| [de061d] | 745 |         begin_global, end_global, begin_local, end_local);
 | 
|---|
 | 746 | 
 | 
|---|
 | 747 |     char *char_buf = Helper::GetCharArray(serial_data.str());
 | 
|---|
| [9f1c84] | 748 |     MPI_File_write(*file, char_buf, serial_data.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
| [de061d] | 749 |     delete [] char_buf;
 | 
|---|
 | 750 | 
 | 
|---|
| [9f1c84] | 751 |     FinalizeSerialOutputFile(*file);
 | 
|---|
 | 752 | 
 | 
|---|
 | 753 |     free(file);
 | 
|---|
| [de061d] | 754 | 
 | 
|---|
 | 755 |   }
 | 
|---|
 | 756 | }
 | 
|---|
 | 757 | 
 | 
|---|
 | 758 | void CommMPI::CreateParallelOutputFile(const Grid& grid, MPI_Comm& comm,
 | 
|---|
 | 759 |     const int& output_count, const char* information,
 | 
|---|
 | 760 |     const Index& begin_global, const Index& end_global,
 | 
|---|
 | 761 |     const Index& begin_local, const Index& end_local)
 | 
|---|
 | 762 | {
 | 
|---|
 | 763 |   int rank;
 | 
|---|
 | 764 |   MPI_File file;
 | 
|---|
 | 765 |   std::stringstream buf;
 | 
|---|
| [9f1c84] | 766 |   std::stringstream parallel_filename, serial_filename;
 | 
|---|
| [de061d] | 767 | 
 | 
|---|
| [9f1c84] | 768 |   MPI_Comm_rank(comm_global, &rank);
 | 
|---|
| [de061d] | 769 | 
 | 
|---|
| [9f1c84] | 770 |   parallel_filename << OutputPath() << std::setfill('0') << std::setw(4) << output_count << std::setw(0) <<  ".pvti";
 | 
|---|
 | 771 |   serial_filename << std::setfill('0') << std::setw(4) <<  output_count << "_" << rank << std::setw(0) <<  ".pvti";
 | 
|---|
 | 772 |   // sprintf(parallel_filename, "%s%04d.pvti", OutputPath().c_str(), output_count);
 | 
|---|
 | 773 |   // sprintf(serial_filename, "%04d_%d.vti", output_count, rank);
 | 
|---|
| [de061d] | 774 | 
 | 
|---|
| [9f1c84] | 775 |   MPI_File_open(comm_global, parallel_filename.str().c_str(), MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_SEQUENTIAL, MPI_INFO_NULL, &file);
 | 
|---|
| [de061d] | 776 |   MPI_File_set_size(file, 0);
 | 
|---|
 | 777 | 
 | 
|---|
 | 778 |   if (rank == 0) {
 | 
|---|
 | 779 | 
 | 
|---|
 | 780 |     buf << "<?xml version=\"1.0\"?>" << std::endl
 | 
|---|
 | 781 |         << "<VTKFile type=\"PImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
 | 
|---|
 | 782 |         << "  <PImageData WholeExtent=\"";
 | 
|---|
 | 783 | 
 | 
|---|
 | 784 |     for (int i=0; i<3; ++i)
 | 
|---|
 | 785 |       buf << begin_global[i] << " " << end_global[i] << " ";
 | 
|---|
 | 786 | 
 | 
|---|
 | 787 |     buf << "\"" << std::endl
 | 
|---|
 | 788 |         << "              GhostLevel=\"0\" Origin=\"0 0 0\" Spacing=\"";
 | 
|---|
 | 789 | 
 | 
|---|
 | 790 |     for (int i=0; i<3; ++i)
 | 
|---|
 | 791 |       buf << grid.Extent().MeshWidth()[i] << " ";
 | 
|---|
 | 792 | 
 | 
|---|
 | 793 |     buf << "\">" << std::endl
 | 
|---|
 | 794 |         << "    <PPointData Scalars=\"" << information << "\">" << std::endl
 | 
|---|
 | 795 |         << "      <PDataArray type=\"Float32\" Name=\"" << information << "\"/>" << std::endl
 | 
|---|
 | 796 |         << "    </PPointData>" << std::endl;
 | 
|---|
 | 797 | 
 | 
|---|
 | 798 |     char* char_buf = Helper::GetCharArray(buf.str());
 | 
|---|
 | 799 | 
 | 
|---|
 | 800 |     MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 801 | 
 | 
|---|
 | 802 |     delete [] char_buf;
 | 
|---|
 | 803 |   }
 | 
|---|
 | 804 | 
 | 
|---|
 | 805 |   buf.str("");
 | 
|---|
 | 806 | 
 | 
|---|
 | 807 |   if ((end_local-begin_local).Product() > 0) {
 | 
|---|
 | 808 |     buf << "    <Piece Extent=\"";
 | 
|---|
 | 809 | 
 | 
|---|
 | 810 |     for (int i=0; i<3; ++i)
 | 
|---|
 | 811 |       buf << begin_local[i] << " " << end_local[i]  << " ";
 | 
|---|
 | 812 | 
 | 
|---|
| [9f1c84] | 813 |     buf << "\" Source=\"" << serial_filename.str() << "\"/>" << std::endl;
 | 
|---|
| [de061d] | 814 |   }
 | 
|---|
 | 815 | 
 | 
|---|
 | 816 |   char* char_buf = Helper::GetCharArray(buf.str());
 | 
|---|
 | 817 | 
 | 
|---|
 | 818 |   MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 819 | 
 | 
|---|
 | 820 |   delete [] char_buf;
 | 
|---|
 | 821 | 
 | 
|---|
 | 822 |   if (rank == 0) {
 | 
|---|
 | 823 | 
 | 
|---|
 | 824 |     buf.str("");
 | 
|---|
 | 825 | 
 | 
|---|
 | 826 |     buf << "  </PImageData>" << std::endl
 | 
|---|
 | 827 |         << "</VTKFile>" << std::endl;
 | 
|---|
 | 828 | 
 | 
|---|
 | 829 |     char* char_buf = Helper::GetCharArray(buf.str());
 | 
|---|
 | 830 | 
 | 
|---|
 | 831 |     MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 832 | 
 | 
|---|
 | 833 |     delete [] char_buf;
 | 
|---|
 | 834 |   }
 | 
|---|
 | 835 | 
 | 
|---|
 | 836 |   MPI_File_close(&file);
 | 
|---|
 | 837 | }
 | 
|---|
 | 838 | 
 | 
|---|
| [9f1c84] | 839 | MPI_File* CommMPI::CreateSerialOutputFile(const Grid& grid, MPI_Comm& comm,
 | 
|---|
| [de061d] | 840 |     const int& output_count, const char* information,
 | 
|---|
 | 841 |     const Index& begin_global, const Index& end_global,
 | 
|---|
 | 842 |     const Index& begin_local, const Index& end_local)
 | 
|---|
 | 843 | {
 | 
|---|
 | 844 |   char serial_filename[513];
 | 
|---|
 | 845 |   int rank;
 | 
|---|
| [9f1c84] | 846 |   MPI_File *file = (MPI_File *)malloc(sizeof(MPI_File));
 | 
|---|
| [de061d] | 847 |   std::stringstream buf;
 | 
|---|
 | 848 | 
 | 
|---|
 | 849 |   MPI_Comm_rank(comm_global, &rank);
 | 
|---|
 | 850 | 
 | 
|---|
 | 851 |   sprintf(serial_filename, "%s%04d_%d.vti", OutputPath().c_str(), output_count, rank);
 | 
|---|
 | 852 | 
 | 
|---|
| [9f1c84] | 853 |   MPI_File_open(MPI_COMM_SELF, serial_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, file);
 | 
|---|
| [de061d] | 854 | 
 | 
|---|
 | 855 |   buf << "<?xml version=\"1.0\"?>" << std::endl
 | 
|---|
 | 856 |       << "<VTKFile type=\"ImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
 | 
|---|
 | 857 |       << "  <ImageData WholeExtent=\"";
 | 
|---|
 | 858 | 
 | 
|---|
 | 859 |   for (int i=0; i<3; ++i)
 | 
|---|
 | 860 |     buf << begin_global[i] << " " << end_global[i] << " ";
 | 
|---|
 | 861 | 
 | 
|---|
 | 862 |   buf << "\"" << std::endl
 | 
|---|
 | 863 |       << "             Origin=\"0 0 0\" Spacing=\"";
 | 
|---|
 | 864 | 
 | 
|---|
 | 865 |   for (int i=0; i<3; ++i)
 | 
|---|
 | 866 |     buf << grid.Extent().MeshWidth()[i] << " ";
 | 
|---|
 | 867 | 
 | 
|---|
 | 868 |   buf << "\">" << std::endl
 | 
|---|
 | 869 |       << "    <Piece Extent=\"";
 | 
|---|
 | 870 | 
 | 
|---|
 | 871 |   for (int i=0; i<3; ++i)
 | 
|---|
 | 872 |     buf << begin_local[i] << " " << end_local[i] << " ";
 | 
|---|
 | 873 | 
 | 
|---|
 | 874 |   buf << "\">" << std::endl
 | 
|---|
 | 875 |       << "      <PointData Scalars=\"" << information << "\">" << std::endl
 | 
|---|
 | 876 |       << "        <DataArray type=\"Float32\" Name=\"" << information << "\" format=\"ascii\">" << std::endl
 | 
|---|
 | 877 |       << "          ";
 | 
|---|
 | 878 | 
 | 
|---|
 | 879 |   char* char_buf = Helper::GetCharArray(buf.str());
 | 
|---|
| [9f1c84] | 880 |   MPI_File_write(*file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
| [de061d] | 881 |   delete [] char_buf;
 | 
|---|
 | 882 | 
 | 
|---|
 | 883 |   return file;
 | 
|---|
 | 884 | }
 | 
|---|
 | 885 | 
 | 
|---|
 | 886 | void CommMPI::FinalizeSerialOutputFile(MPI_File& file)
 | 
|---|
 | 887 | {
 | 
|---|
 | 888 |   std::stringstream buf;
 | 
|---|
 | 889 | 
 | 
|---|
 | 890 |   buf << std::endl
 | 
|---|
 | 891 |       << "        </DataArray>" << std::endl
 | 
|---|
 | 892 |       << "      </PointData>" << std::endl
 | 
|---|
 | 893 |       << "    </Piece>" << std::endl
 | 
|---|
 | 894 |       << "  </ImageData>" << std::endl
 | 
|---|
 | 895 |       << "</VTKFile>" << std::endl;
 | 
|---|
 | 896 | 
 | 
|---|
 | 897 |   char* char_buf = Helper::GetCharArray(buf.str());
 | 
|---|
 | 898 |   MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
 | 
|---|
 | 899 |   delete [] char_buf;
 | 
|---|
 | 900 | 
 | 
|---|
 | 901 |   MPI_File_close(&file);
 | 
|---|
 | 902 | }
 | 
|---|
 | 903 | 
 | 
|---|
 | 904 | int CommMPI::GlobalRank() const
 | 
|---|
 | 905 | {
 | 
|---|
 | 906 |   int rank;
 | 
|---|
 | 907 |   MPI_Comm_rank(comm_global, &rank);
 | 
|---|
 | 908 |   return rank;
 | 
|---|
 | 909 | }
 | 
|---|
 | 910 | 
 | 
|---|
 | 911 | int CommMPI::GlobalSize() const
 | 
|---|
 | 912 | {
 | 
|---|
 | 913 |   int size;
 | 
|---|
 | 914 |   MPI_Comm_size(comm_global, &size);
 | 
|---|
 | 915 |   return size;
 | 
|---|
 | 916 | }
 | 
|---|
 | 917 | 
 | 
|---|
 | 918 | Index CommMPI::GlobalPos() const
 | 
|---|
 | 919 | {
 | 
|---|
 | 920 |   Index dims, periods, coords;
 | 
|---|
 | 921 |   MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
 | 
|---|
 | 922 |   return coords;
 | 
|---|
 | 923 | }
 | 
|---|
 | 924 | 
 | 
|---|
 | 925 | Index CommMPI::GlobalProcs() const
 | 
|---|
 | 926 | {
 | 
|---|
 | 927 |   Index dims, periods, coords;
 | 
|---|
 | 928 |   MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
 | 
|---|
 | 929 |   return dims;
 | 
|---|
 | 930 | }
 | 
|---|
 | 931 | 
 | 
|---|
 | 932 | int CommMPI::Rank(const Grid& grid) const
 | 
|---|
 | 933 | {
 | 
|---|
 | 934 |   int rank;
 | 
|---|
 | 935 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 936 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 937 |   MPI_Comm_rank(comm, &rank);
 | 
|---|
 | 938 |   return rank;
 | 
|---|
 | 939 | }
 | 
|---|
 | 940 | 
 | 
|---|
 | 941 | int CommMPI::Size(const Grid& grid) const
 | 
|---|
 | 942 | {
 | 
|---|
 | 943 |   int size;
 | 
|---|
 | 944 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 945 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 946 |   MPI_Comm_size(comm, &size);
 | 
|---|
 | 947 |   return size;
 | 
|---|
 | 948 | }
 | 
|---|
 | 949 | 
 | 
|---|
 | 950 | Index CommMPI::Pos(const Grid& grid) const
 | 
|---|
 | 951 | {
 | 
|---|
 | 952 |   Index dims, periods, coords;
 | 
|---|
 | 953 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 954 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 955 |   MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
 | 
|---|
 | 956 |   return coords;
 | 
|---|
 | 957 | }
 | 
|---|
 | 958 | 
 | 
|---|
 | 959 | Index CommMPI::Procs(const Grid& grid) const
 | 
|---|
 | 960 | {
 | 
|---|
 | 961 |   Index dims, periods, coords;
 | 
|---|
 | 962 |   MPI_Comm comm = settings.CommunicatorLocal(grid);
 | 
|---|
 | 963 |   assert(comm != MPI_COMM_NULL);
 | 
|---|
 | 964 |   MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
 | 
|---|
 | 965 |   return dims;
 | 
|---|
 | 966 | }
 | 
|---|
 | 967 | 
 | 
|---|
 | 968 | void CommMPI::InitCommMPI(const MPI_Comm& comm)
 | 
|---|
 | 969 | {
 | 
|---|
 | 970 |   int status, size, rank;
 | 
|---|
 | 971 |   int dims[3] = {0, 0, 0};
 | 
|---|
 | 972 |   int periods[3];
 | 
|---|
 | 973 | 
 | 
|---|
 | 974 |   for (int i=0; i<3; ++i)
 | 
|---|
 | 975 |     periods[i] = (BoundaryConditions()[i] == Periodic ? 1 : 0);
 | 
|---|
 | 976 | 
 | 
|---|
 | 977 |   MPI_Comm_size(comm, &size);
 | 
|---|
 | 978 |   MPI_Comm_rank(comm, &rank);
 | 
|---|
 | 979 | 
 | 
|---|
 | 980 |   MPI_Topo_test(comm, &status);
 | 
|---|
 | 981 | 
 | 
|---|
 | 982 |   if (status == MPI_CART) {
 | 
|---|
 | 983 | 
 | 
|---|
 | 984 |     comm_global = comm;
 | 
|---|
 | 985 | 
 | 
|---|
 | 986 |   }else {
 | 
|---|
 | 987 | 
 | 
|---|
 | 988 |     const int log2 = Helper::log_2(size);
 | 
|---|
 | 989 | 
 | 
|---|
 | 990 |     if (Helper::intpow(2, log2) == size) {
 | 
|---|
 | 991 |       for (int i=0; i<3; ++i)
 | 
|---|
 | 992 |         dims[i] = Helper::intpow(2, log2 / 3 + (log2%3 > i ? 1 : 0));
 | 
|---|
 | 993 |     }else {
 | 
|---|
 | 994 |       MPI_Dims_create(size, 3, dims);
 | 
|---|
 | 995 |     }
 | 
|---|
 | 996 | 
 | 
|---|
 | 997 | #ifdef OUTPUT_DEBUG
 | 
|---|
 | 998 |     if (rank == 0)
 | 
|---|
 | 999 |       std::printf("Process grid: %d %d %d\n", dims[0], dims[1], dims[2]);
 | 
|---|
 | 1000 | #endif
 | 
|---|
 | 1001 | 
 | 
|---|
 | 1002 |     MPI_Cart_create(comm, 3, dims, periods, 1, &comm_global);
 | 
|---|
 | 1003 | 
 | 
|---|
 | 1004 |   }
 | 
|---|
 | 1005 | 
 | 
|---|
 | 1006 |   MPI_Info_create(&info);
 | 
|---|
 | 1007 |   char key[] = "no_locks";
 | 
|---|
 | 1008 |   char val[] = "true";
 | 
|---|
 | 1009 |   MPI_Info_set(info, key, val);
 | 
|---|
 | 1010 | 
 | 
|---|
 | 1011 | }
 | 
|---|
 | 1012 | 
 | 
|---|
 | 1013 | CommMPI::~CommMPI()
 | 
|---|
 | 1014 | {
 | 
|---|
 | 1015 |   MPI_Comm_free(&comm_global);
 | 
|---|
 | 1016 | #ifdef VMG_ONE_SIDED
 | 
|---|
 | 1017 |   if (win_created)
 | 
|---|
 | 1018 |     MPI_Win_free(&win);
 | 
|---|
 | 1019 | #endif
 | 
|---|
 | 1020 |   MPI_Info_free(&info);
 | 
|---|
 | 1021 | }
 | 
|---|
 | 1022 | 
 | 
|---|
 | 1023 | Grid& CommMPI::GetCoarserGrid(Multigrid& multigrid)
 | 
|---|
 | 1024 | {
 | 
|---|
 | 1025 |   return settings.CoarserGrid(multigrid(multigrid.Level()));
 | 
|---|
 | 1026 | }
 | 
|---|
 | 1027 | 
 | 
|---|
 | 1028 | Grid& CommMPI::GetFinerGrid(Multigrid& multigrid)
 | 
|---|
 | 1029 | {
 | 
|---|
 | 1030 |   return settings.FinerGrid(multigrid(multigrid.Level()));
 | 
|---|
 | 1031 | }
 | 
|---|
 | 1032 | 
 | 
|---|
 | 1033 | std::string CommMPI::CreateOutputDirectory()
 | 
|---|
 | 1034 | {
 | 
|---|
 | 1035 | #ifdef HAVE_BOOST_FILESYSTEM_PATH_HPP
 | 
|---|
 | 1036 |   std::string path, unique_path;
 | 
|---|
 | 1037 |   std::stringstream unique_suffix;
 | 
|---|
 | 1038 |   int suffix_counter = 0;
 | 
|---|
 | 1039 |   char buffer[129];
 | 
|---|
 | 1040 |   time_t rawtime;
 | 
|---|
 | 1041 |   struct tm *timeinfo;
 | 
|---|
 | 1042 |   int path_size;
 | 
|---|
 | 1043 |   char* path_array;
 | 
|---|
 | 1044 | 
 | 
|---|
 | 1045 |   if (GlobalRank() == 0) {
 | 
|---|
 | 1046 | 
 | 
|---|
 | 1047 |     time(&rawtime);
 | 
|---|
 | 1048 |     timeinfo = localtime(&rawtime);
 | 
|---|
 | 1049 |     strftime(buffer, 128, "./output/%Y_%m_%d_%H_%M_%S/", timeinfo);
 | 
|---|
 | 1050 |     path = buffer;
 | 
|---|
 | 1051 | 
 | 
|---|
 | 1052 |     if (!fs::exists("output"))
 | 
|---|
 | 1053 |       fs::create_directory("output");
 | 
|---|
 | 1054 | 
 | 
|---|
 | 1055 |     unique_path = path;
 | 
|---|
 | 1056 | 
 | 
|---|
 | 1057 |     while (fs::exists(unique_path.c_str())) {
 | 
|---|
 | 1058 | 
 | 
|---|
 | 1059 |       unique_suffix.str("");
 | 
|---|
 | 1060 |       unique_suffix << "_" << suffix_counter++ << "/";
 | 
|---|
 | 1061 | 
 | 
|---|
 | 1062 |       unique_path = path;
 | 
|---|
 | 1063 |       unique_path.replace(unique_path.size()-1, 1, unique_suffix.str());
 | 
|---|
 | 1064 | 
 | 
|---|
 | 1065 |     }
 | 
|---|
 | 1066 | 
 | 
|---|
 | 1067 |     fs::create_directory(unique_path.c_str());
 | 
|---|
 | 1068 | 
 | 
|---|
 | 1069 |     path_size = unique_path.size() + 1;
 | 
|---|
 | 1070 |     path_array = Helper::GetCharArray(unique_path);
 | 
|---|
 | 1071 | 
 | 
|---|
 | 1072 |     MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
 | 
|---|
 | 1073 | 
 | 
|---|
 | 1074 |   }else {
 | 
|---|
 | 1075 | 
 | 
|---|
 | 1076 |     MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
 | 
|---|
 | 1077 |     path_array = new char[path_size];
 | 
|---|
 | 1078 | 
 | 
|---|
 | 1079 |   }
 | 
|---|
 | 1080 | 
 | 
|---|
 | 1081 |   MPI_Bcast(path_array, path_size, MPI_CHAR, 0, comm_global);
 | 
|---|
 | 1082 | 
 | 
|---|
 | 1083 |   unique_path = path_array;
 | 
|---|
 | 1084 | 
 | 
|---|
 | 1085 |   delete [] path_array;
 | 
|---|
 | 1086 | 
 | 
|---|
 | 1087 |   return unique_path;
 | 
|---|
 | 1088 | 
 | 
|---|
 | 1089 | #else
 | 
|---|
 | 1090 | 
 | 
|---|
 | 1091 |   return "./";
 | 
|---|
 | 1092 | 
 | 
|---|
 | 1093 | #endif
 | 
|---|
 | 1094 | }
 | 
|---|
 | 1095 | 
 | 
|---|
 | 1096 | 
 | 
|---|
 | 1097 | #endif /* HAVE_MPI */
 | 
|---|