Page MenuHomec4science

reference_manager.hh
No OneTemporary

File Metadata

Created
Wed, Jun 19, 02:07

reference_manager.hh

/**
* @file reference_manager.hh
*
* @author Guillaume Anciaux <guillaume.anciaux@epfl.ch>
*
* @date Mon Sep 08 23:40:22 2014
*
* @brief This is the manager of reference and coherency with migrations
*
* @section LICENSE
*
* Copyright INRIA and CEA
*
* The LibMultiScale is a C++ parallel framework for the multiscale
* coupling methods dedicated to material simulations. This framework
* provides an API which makes it possible to program coupled simulations
* and integration of already existing codes.
*
* This Project was initiated in a collaboration between INRIA Futurs Bordeaux
* within ScAlApplix team and CEA/DPTA Ile de France.
* The project is now continued at the Ecole Polytechnique Fédérale de Lausanne
* within the LSMS/ENAC laboratory.
*
* This software is governed by the CeCILL-C license under French law and
* abiding by the rules of distribution of free software. You can use,
* modify and/ or redistribute the software under the terms of the CeCILL-C
* license as circulated by CEA, CNRS and INRIA at the following URL
* "http://www.cecill.info".
*
* As a counterpart to the access to the source code and rights to copy,
* modify and redistribute granted by the license, users are provided only
* with a limited warranty and the software's author, the holder of the
* economic rights, and the successive licensors have only limited
* liability.
*
* In this respect, the user's attention is drawn to the risks associated
* with loading, using, modifying and/or developing or reproducing the
* software by the user in light of its specific status of free software,
* that may mean that it is complicated to manipulate, and that also
* therefore means that it is reserved for developers and experienced
* professionals having in-depth computer knowledge. Users are therefore
* encouraged to load and test the software's suitability as regards their
* requirements in conditions enabling the security of their systems and/or
* data to be ensured and, more generally, to use and operate it in the
* same conditions as regards security.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL-C license and that you accept its terms.
*
*/
#ifndef __LIBMULTISCALE_REFERENCE_MANAGER_HH__
#define __LIBMULTISCALE_REFERENCE_MANAGER_HH__
/* -------------------------------------------------------------------------- */
#include <map>
#include "reference_manager_interface.hh"
#include "ref_subset.hh"
#include "ref_set.hh"
#include "ref_point_data.hh"
#include <mpi.h>
/* -------------------------------------------------------------------------- */
__BEGIN_LIBMULTISCALE__
template <typename Ref>
class ReferenceManager : public ReferenceManagerInterface {
public:
/* ------------------------------------------------------------------------ */
/* Typedefs */
/* ------------------------------------------------------------------------ */
typedef typename Ref::Domain::ContainerPoints GlobalContainer;
typedef std::map<UInt,PackBuffer> BufferMap;
typedef std::map<Ref,UInt,typename Ref::RefComparator> MapRefToUInt;
typedef std::map<Ref,Ref,typename Ref::RefComparator> MapRefToRef;
typedef std::map<UInt,std::vector<Ref> > MapUIntToRefList;
/* ------------------------------------------------------------------------ */
/* Constructors/Destructors */
/* ------------------------------------------------------------------------ */
virtual ~ReferenceManager(){
for (UInt i = 0 ; i < subsets.size(); ++i)
if (subsets[i])
delete subsets[i];
};
ReferenceManager(typename Ref::Domain::ContainerPoints & global_container):
global_set(global_container,"global-container"),
have_changed(false){
};
/* ------------------------------------------------------------------------ */
/* Methods */
/* ------------------------------------------------------------------------ */
//! function that start the updating process of all attached structures
void updateRefSubSets();
//! set the mpi communicator
void setMPIComm(MPI_Comm comm){
worldCom = comm;
if (worldCom != MPI_COMM_NULL)
MPI_Comm_rank(worldCom,&rank);
}
//! request to manage a subset
void addSubSet(const std::string & name, ContainerArray<Ref> & sub);
//! request to remove a subset
// void removeSubSet(const std::string & name);
//! request attaching a given vector v with a given container c
void attachVector(std::vector<Real> & v,ContainerArray<Ref> & c,UInt nb_components=1);
//! request attaching a given vector v with the global container
void attachVector(std::vector<Real> & v,GlobalContainer & cont, UInt nb_components=1);
//! request detaching a given vector v with a given container c
void detachVector(std::vector<Real> & v,ContainerArray<Ref> & c);
//! request detaching a given vector v with the global container
void detachVector(std::vector<Real> & v, GlobalContainer & cont);
//! request attaching a generic AttachedObject with a given container c
void attachObject(AttachedObject & obj,ContainerArray<Ref> & c);
//! request detaching a generic AttachedObject with a given container c
void detachObject(AttachedObject & obj,ContainerArray<Ref> & c);
//! print bilan for concerned container : used only to debug
void printBilan();
protected:
//!generic communication routine to send/recv a bunch of buffers
void exchangeBuffers(BufferMap & toSend,BufferMap & toRecv);
//! translate the references for moved atoms
void translateMovingReferences();
//! pack masks to a BufferMap
void packMasks(MapRefToUInt & masks);
//! unpack masks to a BufferMap
void unpackMasks(MapRefToUInt & masks);
//! clear the buffers and/or create one entry per proc I should com with
void clearPackBuffers();
/* ------------------------------------------------------------------------ */
/* Class Members */
/* ------------------------------------------------------------------------ */
public:
protected:
//! global set
RefSet<GlobalContainer> global_set;
//!subset array
std::vector<RefSubset<ContainerArray<Ref> > *> subsets;
//! mapping between sent atom ref and new proc owner
MapRefToUInt sent;
//! inverse mapping : sent atoms sorted by receiving processors
MapUIntToRefList sent_byproc;
//! new atoms generated by migration (received)
MapUIntToRefList newatoms;
//! communication buffers to be sent
BufferMap buffers_tosend;
//! communication buffers to be received
BufferMap buffers_torecv;
//! mapping between moved atoms old ref and new ref
MapRefToRef moved;
//! flag to notify if updating of attached references needed
bool have_changed;
//! for debug purpose
void printPackBuffersStatus();
private:
MPI_Comm worldCom;
int rank;
};
/* -------------------------------------------------------------------------- */
template <UInt Dim>
class ReferenceManager<RefPointData<Dim> > : public ReferenceManagerInterface {
public:
// /* ------------------------------------------------------------------------ */
// /* Typedefs */
// /* ------------------------------------------------------------------------ */
// typedef typename Ref::Domain::ContainerPoints GlobalContainer;
// typedef std::map<UInt,PackBuffer> BufferMap;
// typedef std::map<Ref,UInt,typename Ref::RefComparator> MapRefToUInt;
// typedef std::map<Ref,Ref,typename Ref::RefComparator> MapRefToRef;
// typedef std::map<UInt,std::vector<Ref> > MapUIntToRefList;
// /* ------------------------------------------------------------------------ */
// /* Constructors/Destructors */
// /* ------------------------------------------------------------------------ */
// virtual ~ReferenceManager(){
// for (UInt i = 0 ; i < subsets.size(); ++i)
// if (subsets[i])
// delete subsets[i];
// };
// ReferenceManager(typename Ref::Domain::ContainerPoints & global_container):
// global_set(global_container,"global-container"),
// have_changed(false){
// };
// /* ------------------------------------------------------------------------ */
// /* Methods */
// /* ------------------------------------------------------------------------ */
// //! function that start the updating process of all attached structures
// void updateRefSubSets();
// //! set the mpi communicator
// void setMPIComm(MPI_Comm comm){
// worldCom = comm;
// if (worldCom != MPI_COMM_NULL)
// MPI_Comm_rank(worldCom,&rank);
// }
// //! request to manage a subset
// void addSubSet(const std::string & name, ContainerArray<Ref> & sub);
// //! request attaching a given vector v with a given container c
// void attachVector(std::vector<Real> & v,ContainerArray<Ref> & c,UInt nb_components=1);
// //! request attaching a given vector v with the global container
// void attachVector(std::vector<Real> & v,GlobalContainer & cont, UInt nb_components=1);
// //! request detaching a given vector v with a given container c
// void detachVector(std::vector<Real> & v,ContainerArray<Ref> & c);
// //! request detaching a given vector v with the global container
// void detachVector(std::vector<Real> & v, GlobalContainer & cont);
// //! request attaching a generic AttachedObject with a given container c
// void attachObject(AttachedObject & obj,ContainerArray<Ref> & c);
// //! request detaching a generic AttachedObject with a given container c
// void detachObject(AttachedObject & obj,ContainerArray<Ref> & c);
// //! print bilan for concerned container : used only to debug
// void printBilan();
// protected:
// //!generic communication routine to send/recv a bunch of buffers
// void exchangeBuffers(BufferMap & toSend,BufferMap & toRecv);
// //! translate the references for moved atoms
// void translateMovingReferences();
// //! pack masks to a BufferMap
// void packMasks(MapRefToUInt & masks);
// //! unpack masks to a BufferMap
// void unpackMasks(MapRefToUInt & masks);
// //! clear the buffers and/or create one entry per proc I should com with
// void clearPackBuffers();
// /* ------------------------------------------------------------------------ */
// /* Class Members */
// /* ------------------------------------------------------------------------ */
// public:
// protected:
// //! global set
// RefSet<GlobalContainer> global_set;
// //!subset array
// std::vector<RefSubset<ContainerArray<Ref> > *> subsets;
// //! mapping between sent atom ref and new proc owner
// MapRefToUInt sent;
// //! inverse mapping : sent atoms sorted by receiving processors
// MapUIntToRefList sent_byproc;
// //! new atoms generated by migration (received)
// MapUIntToRefList newatoms;
// //! communication buffers to be sent
// BufferMap buffers_tosend;
// //! communication buffers to be received
// BufferMap buffers_torecv;
// //! mapping between moved atoms old ref and new ref
// MapRefToRef moved;
// //! flag to notify if updating of attached references needed
// bool have_changed;
// private:
// MPI_Comm worldCom;
// int rank;
};
/* -------------------------------------------------------------------------- */
__END_LIBMULTISCALE__
#endif /* __LIBMULTISCALE_REFERENCE_MANAGER_HH__ */

Event Timeline