Page MenuHomec4science

communicator_mpi_linear.hh
No OneTemporary

File Metadata

Created
Sun, Aug 4, 06:29

communicator_mpi_linear.hh

/**
* @file communicator_mpi_linear.hh
*
* @author Guillaume Anciaux <guillaume.anciaux@epfl.ch>
*
* @date Wed Nov 06 20:35:11 2013
*
* @brief This is the implementation of the LM communicator using MPI where models are distributed over processors
*
* @section LICENSE
*
* Copyright INRIA and CEA
*
* The LibMultiScale is a C++ parallel framework for the multiscale
* coupling methods dedicated to material simulations. This framework
* provides an API which makes it possible to program coupled simulations
* and integration of already existing codes.
*
* This Project was initiated in a collaboration between INRIA Futurs Bordeaux
* within ScAlApplix team and CEA/DPTA Ile de France.
* The project is now continued at the Ecole Polytechnique Fédérale de Lausanne
* within the LSMS/ENAC laboratory.
*
* This software is governed by the CeCILL-C license under French law and
* abiding by the rules of distribution of free software. You can use,
* modify and/ or redistribute the software under the terms of the CeCILL-C
* license as circulated by CEA, CNRS and INRIA at the following URL
* "http://www.cecill.info".
*
* As a counterpart to the access to the source code and rights to copy,
* modify and redistribute granted by the license, users are provided only
* with a limited warranty and the software's author, the holder of the
* economic rights, and the successive licensors have only limited
* liability.
*
* In this respect, the user's attention is drawn to the risks associated
* with loading, using, modifying and/or developing or reproducing the
* software by the user in light of its specific status of free software,
* that may mean that it is complicated to manipulate, and that also
* therefore means that it is reserved for developers and experienced
* professionals having in-depth computer knowledge. Users are therefore
* encouraged to load and test the software's suitability as regards their
* requirements in conditions enabling the security of their systems and/or
* data to be ensured and, more generally, to use and operate it in the
* same conditions as regards security.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL-C license and that you accept its terms.
*
*/
#ifndef __LIBMULTISCALE_COMMUNICATOR_MPI_LINEAR_HH__
#define __LIBMULTISCALE_COMMUNICATOR_MPI_LINEAR_HH__
/* -------------------------------------------------------------------------- */
#define TAILLE_GROUPS 50
/* -------------------------------------------------------------------------- */
#include "communicator.hh"
/* -------------------------------------------------------------------------- */
__BEGIN_LIBMULTISCALE__
typedef struct mpi_geom_ {
UInt type;
UInt Dim;
Real center[3];
//l'information est le rayon min et max et c'est tout (boule)
Real rmin;
Real rmax;
//l'information sont les dimensions et c'est c'est tout (cube)
Real xmin;
Real xmax;
Real ymin;
Real ymax;
Real zmin;
Real zmax;
}mpi_geom;
/* -------------------------------------------------------------------------- */
class LinearMPI : public Communicator {
public:
/* ------------------------------------------------------------------------ */
/* Constructors/Destructors */
/* ------------------------------------------------------------------------ */
LinearMPI();
~LinearMPI();
/* ------------------------------------------------------------------------ */
/* Accessors */
/* ------------------------------------------------------------------------ */
inline UInt getNBprocsOnGroup(CommGroup group);
inline UInt getNBGroups();
inline bool amIinGroup(CommGroup i);
inline bool isInGroup(UInt i , CommGroup group);
inline MPI_Comm getMpiGroup(CommGroup i);
inline UInt realRank( UInt i, CommGroup group);
inline UInt groupRank( UInt i, CommGroup group);
/* ------------------------------------------------------------------------ */
/* Methods */
/* ------------------------------------------------------------------------ */
void printself(std::ostream & stream);
/* ------------------------------------------------------------------------ */
/* Communication Methods */
/* ------------------------------------------------------------------------ */
inline void sendLocalGeometriesToGroup(Geometry & geom,CommGroup group);
inline void receiveLocalGeometriesFromGroup(Geometry ** geom,CommGroup group);
inline void sendCommunicationTable(std::vector<UInt> & com_with,CommGroup destgroup);
inline void receiveCommunicationTable(std::vector<UInt> & com_with,CommGroup fromgroup);
inline void sendReals(CommBuffer<Real> & d,UInt nb,UInt dest,CommGroup group,
const std::string & buf);
inline void receiveReals(CommBuffer<Real> & d,UInt nb,UInt from,CommGroup group,
const std::string & buf);
inline void sendUInts(CommBuffer<UInt> & i,UInt nb,UInt dest,CommGroup group,
const std::string & buf);
inline void receiveUInts(CommBuffer<UInt> & i,UInt nb, UInt from,CommGroup group,
const std::string & buf);
inline void reduceUInt(CommBuffer<UInt> & contrib,UInt nb,CommGroup group,
const std::string & comment, Operator op);
inline void reduceReal(CommBuffer<Real> & contrib,UInt nb,CommGroup group,
const std::string & comment, Operator op);
inline void allReduceUInt(CommBuffer<UInt> & contrib,UInt nb,CommGroup group,
const std::string & comment, Operator op);
inline void allReduceReal(CommBuffer<Real> & contrib,UInt nb,CommGroup group,
const std::string & comment, Operator op);
inline void waitForPendingComs();
inline UInt addGroup(UInt nb_procs);
inline void synchronize(CommGroup group_index);
/* ------------------------------------------------------------------------ */
/* Class Members */
/* ------------------------------------------------------------------------ */
protected:
/* variables to treat MPI comms */
MPI_Comm groups[TAILLE_GROUPS];
UInt free_procs;
int n_procs;
UInt nb_groups;
UInt taille_groups[TAILLE_GROUPS];
UInt goffsets[TAILLE_GROUPS];
/* types for transport of objects */
MPI_Datatype geom_type;
std::vector<MPI_Request> requests;
std::vector<int> sequence_number;
};
__END_LIBMULTISCALE__
#endif /* __LIBMULTISCALE_COMMUNICATOR_MPI_LINEAR_HH__ */

Event Timeline