#define GADGETVERSION "2.0" /*!< code version string */
#define TIMEBASE (1<<28) /*!< The simulated timespan is mapped onto the integer interval [0,TIMESPAN],
* where TIMESPAN needs to be a power of 2. Note that (1<<28) corresponds to 2^29
*/
#define MAXTOPNODES 200000 /*!< Maximum number of nodes in the top-level tree used for domain decomposition */
typedeflonglongpeanokey;/*!< defines the variable type used for Peano-Hilbert keys */
#define BITS_PER_DIMENSION 18 /*!< Bits per dimension available for Peano-Hilbert order.
Note: If peanokey is defined as type int, the allowed maximum is 10.
If 64-bit integers are used, the maximum is 21 */
#define PEANOCELLS (((peanokey)1)<<(3*BITS_PER_DIMENSION)) /*!< The number of different Peano-Hilbert cells */
#define RNDTABLE 3000 /*!< gives the length of a table with random numbers, refreshed at every timestep.
This is used to allow application of random numbers to a specific particle
in a way that is independent of the number of processors used. */
#define MAX_REAL_NUMBER 1e37
#define MIN_REAL_NUMBER 1e-37
#define MAXLEN_FILENAME 100 /*!< Maximum number of characters for filenames (including the full path) */
#ifdef ISOTHERM_EQS
#define GAMMA (1.0) /*!< index for isothermal gas */
#else
#define GAMMA (5.0/3) /*!< adiabatic index of simulated gas */
#endif
#define GAMMA_MINUS1 (GAMMA-1)
#define HYDROGEN_MASSFRAC 0.76 /*!< mass fraction of hydrogen, relevant only for radiative cooling */
/* Some physical constants in cgs units */
#define GRAVITY 6.672e-8 /*!< Gravitational constant (in cgs units) */
#define SOLAR_MASS 1.989e33
#define SOLAR_LUM 3.826e33
#define RAD_CONST 7.565e-15
#define AVOGADRO 6.0222e23
#define BOLTZMANN 1.3806e-16
#define GAS_CONST 8.31425e7
#define C 2.9979e10
#define PLANCK 6.6262e-27
#define CM_PER_MPC 3.085678e24
#define PROTONMASS 1.6726e-24
#define ELECTRONMASS 9.10953e-28
#define THOMPSON 6.65245e-25
#define ELECTRONCHARGE 4.8032e-10
#define HUBBLE 3.2407789e-18 /* in h/sec */
#define YEAR_IN_SECOND 31536000.0 /* year in sec */
#define FEH_SOLAR 0.00181 /* used only if cooling with metal is on and chimie is off */
#define KPC_IN_CM 3.085678e+21
#define PI 3.1415926535897931
#define TWOPI 6.2831853071795862
/* Some conversion factors */
#define SEC_PER_MEGAYEAR 3.155e13
#define SEC_PER_YEAR 3.155e7
#ifndef ASMTH
#define ASMTH 1.25 /*!< ASMTH gives the scale of the short-range/long-range force split in units of FFT-mesh cells */
#endif
#ifndef RCUT
#define RCUT 4.5 /*!< RCUT gives the maximum distance (in units of the scale used for the force split) out to
which short-range forces are evaluated in the short-range tree walk. */
#endif
#define MAX_NGB 20000 /*!< defines maximum length of neighbour list */
#define MAXLEN_OUTPUTLIST 10000 /*!< maxmimum number of entries in list of snapshot output times */
#define DRIFT_TABLE_LENGTH 1000 /*!< length of the lookup table used to hold the drift and kick factors */
#ifdef COSMICTIME
#define COSMICTIME_TABLE_LENGTH 1000 /*!< length of the lookup table used for the cosmic time computation */
#endif
#define MAXITER 1000 /*!< maxmimum number of steps for SPH neighbour iteration */
#ifdef DOUBLEPRECISION /*!< If defined, the variable type FLOAT is set to "double", otherwise to FLOAT */
#define FLOAT double
#else
#define FLOAT float
#endif
#ifndef TWODIMS
#define NUMDIMS 3 /*!< For 3D-normalized kernel */
#define KERNEL_COEFF_1 2.546479089470 /*!< Coefficients for SPH spline kernel and its derivative */
#define KERNEL_COEFF_2 15.278874536822
#define KERNEL_COEFF_3 45.836623610466
#define KERNEL_COEFF_4 30.557749073644
#define KERNEL_COEFF_5 5.092958178941
#define KERNEL_COEFF_6 (-15.278874536822)
#define NORM_COEFF 4.188790204786 /*!< Coefficient for kernel normalization. Note: 4.0/3 * PI = 4.188790204786 */
#else
#define NUMDIMS 2 /*!< For 2D-normalized kernel */
#define KERNEL_COEFF_1 (5.0/7*2.546479089470) /*!< Coefficients for SPH spline kernel and its derivative */
#define KERNEL_COEFF_2 (5.0/7*15.278874536822)
#define KERNEL_COEFF_3 (5.0/7*45.836623610466)
#define KERNEL_COEFF_4 (5.0/7*30.557749073644)
#define KERNEL_COEFF_5 (5.0/7*5.092958178941)
#define KERNEL_COEFF_6 (5.0/7*(-15.278874536822))
#define NORM_COEFF M_PI /*!< Coefficient for kernel normalization. */
#endif
#ifdef MULTIPHASE
#define GAS_SPH 0
#define GAS_STICKY 1
#define GAS_DARK 2
#endif
#if defined(SFR) || defined(STELLAR_PROP)
#define ST 1
#endif
#ifdef CHIMIE
#define NELEMENTS 10
#define MAXNELEMENTS 64
#endif
#ifdef COOLING
#define COOLING_NMETALICITIES 9
#define COOLING_NTEMPERATURES 171
#endif
#ifdef COMPUTE_VELOCITY_DISPERSION
#define VELOCITY_DISPERSION_SIZE 3
#endif
#ifdef CHIMIE
externintFE;
externintMETALS;
#endif
externintSetMinTimeStepForActives;
externintThisTask;/*!< the rank of the local processor */
externintNTask;/*!< number of processors */
externintPTask;/*!< smallest integer such that NTask <= 2^PTask */
externintNumPart;/*!< number of particles on the LOCAL processor */
externintN_gas;/*!< number of gas particles on the LOCAL processor */
#if defined(SFR) || defined(STELLAR_PROP)
externintN_stars;/*!< number of stars particle on the LOCAL processor */
#endif
#ifdef MULTIPHASE
externintN_sph;
externintN_sticky;
externintN_stickyflaged;
externintN_dark;
externintNumColPotLocal;/*!< local number of potentially collisional particles */
externintNumColPot;/*!< total number of potentially collisional particles */
externintNumColLocal;/*!< local number of collisions */
externintNumCol;/*!< total number of collisions */
externintNumNoColLocal;
externintNumNoCol;
#endif
#ifdef GAS_ACCRETION
externintNumPart_acc;
externintN_gas_acc;
#ifdef STELLAR_PROP
externintN_stars_acc;
#endif
#endif
externlonglongNtype[6];/*!< total number of particles of each type */
externintNtypeLocal[6];/*!< local number of particles of each type */
externintNumForceUpdate;/*!< number of active particles on local processor in current timestep */
externintNumSphUpdate;/*!< number of active SPH particles on local processor in current timestep */
#ifdef CHIMIE
externintNumStUpdate;
#endif
#ifdef TESSEL
externintNumPTUpdate;
#endif
externdoubleCPUThisRun;/*!< Sums the CPU time for the process (current submission only) */
#ifdef SPLIT_DOMAIN_USING_TIME
externdoubleCPU_Gravity;
#endif
externintRestartFlag;/*!< taken from command line used to start code. 0 is normal start-up from
initial conditions, 1 is resuming a run from a set of restart files, while 2
marks a restart from a snapshot file. */
externchar*Exportflag;/*!< Buffer used for flagging whether a particle needs to be exported to another process */
externint*Ngblist;/*!< Buffer to hold indices of neighbours retrieved by the neighbour search routines */
externintTreeReconstructFlag;/*!< Signals that a new tree needs to be constructed */
#ifdef SFR
externintRearrangeParticlesFlag;/*!< Signals that particles must be rearanged */
#endif
externintFlag_FullStep;/*!< This flag signals that the current step involves all particles */
externgsl_rng*random_generator;/*!< the employed random number generator of the GSL library */
externdoubleRndTable[RNDTABLE];/*!< Hold a table with random numbers, refreshed every timestep */
#ifdef SFR
externdoubleStarFormationRndTable[RNDTABLE];/*!< Hold a table with random numbers, refreshed every timestep */
#endif
#ifdef FEEDBACK_WIND
externdoubleFeedbackWindRndTable[RNDTABLE];/*!< Hold a table with random numbers, refreshed every timestep */
#endif
#ifdef CHIMIE
externdoubleChimieRndTable[RNDTABLE];/*!< Hold a table with random numbers, refreshed every timestep */
#endif
#ifdef CHIMIE_KINETIC_FEEDBACK
externdoubleChimieKineticFeedbackRndTable[RNDTABLE];/*!< Hold a table with random numbers, refreshed every timestep */
#endif
#ifdef GAS_ACCRETION
externdoublegasAccretionRndTable[RNDTABLE];/*!< Hold a table with random numbers, refreshed every timestep */
#endif
#ifdef AB_TURB
//Ornstein-Uhlenbeck variables
externdoubleStOUVar;
externdouble*StOUPhases;
externgsl_rng*StRng;
//forcing field in fourie space
externdouble*StAmpl;
externdouble*StAka;//phases (real part)
externdouble*StAkb;//phases (imag part)
externdouble*StMode;
externintStNModes;
//integertime StTPrev; (yr : ask ?)
externintStTPrev;
externdoubleStSolWeightNorm;
#endif
#ifdef PY_INTERFACE
externintNumPartQ;
externintN_gasQ;
externlonglongNtypeQ[6];/*!< total number of particles of each type */
externintNtypeLocalQ[6];/*!< local number of particles of each type */
externdoubleDomainCornerQ[3];/*!< gives the lower left corner of simulation volume */
externdoubleDomainCenterQ[3];/*!< gives the center of simulation volume */
externdoubleDomainLenQ;/*!< gives the (maximum) side-length of simulation volume */
externdoubleDomainFacQ;/*!< factor used for converting particle coordinates to a Peano-Hilbert mesh covering the simulation volume */
externintDomainMyStartQ;/*!< first domain mesh cell that resides on the local processor */
externintDomainMyLastQ;/*!< last domain mesh cell that resides on the local processor */
externint*DomainStartListQ;/*!< a table that lists the first domain mesh cell for all processors */
externint*DomainEndListQ;/*!< a table that lists the last domain mesh cell for all processors */
externdouble*DomainWorkQ;/*!< a table that gives the total "work" due to the particles stored by each processor */
externint*DomainCountQ;/*!< a table that gives the total number of particles held by each processor */
externint*DomainCountSphQ;/*!< a table that gives the total number of SPH particles held by each processor */
externint*DomainTaskQ;/*!< this table gives for each leaf of the top-level tree the processor it was assigned to */
externpeanokey*DomainKeyBufQ;/*!< this points to a buffer used during the exchange of particle data */
externintNTopnodesQ;/*!< total number of nodes in top-level tree */
externintNTopleavesQ;/*!< number of leaves in top-level tree. Each leaf can be assigned to a different processor */
externvoid*CommBufferQ;/*!< points to communication buffer, which is used in the domain decomposition, the
parallel tree-force computation, the SPH routines, etc. */
#endif
externdoubleDomainCorner[3];/*!< gives the lower left corner of simulation volume */
externdoubleDomainCenter[3];/*!< gives the center of simulation volume */
externdoubleDomainLen;/*!< gives the (maximum) side-length of simulation volume */
externdoubleDomainFac;/*!< factor used for converting particle coordinates to a Peano-Hilbert mesh covering the simulation volume */
externintDomainMyStart;/*!< first domain mesh cell that resides on the local processor */
externintDomainMyLast;/*!< last domain mesh cell that resides on the local processor */
externint*DomainStartList;/*!< a table that lists the first domain mesh cell for all processors */
externint*DomainEndList;/*!< a table that lists the last domain mesh cell for all processors */
externdouble*DomainWork;/*!< a table that gives the total "work" due to the particles stored by each processor */
externint*DomainCount;/*!< a table that gives the total number of particles held by each processor */
externint*DomainCountSph;/*!< a table that gives the total number of SPH particles held by each processor */
externint*DomainTask;/*!< this table gives for each leaf of the top-level tree the processor it was assigned to */
externint*DomainNodeIndex;/*!< this table gives for each leaf of the top-level tree the corresponding node of the gravitational tree */
externFLOAT*DomainTreeNodeLen;/*!< this table gives for each leaf of the top-level tree the side-length of the corresponding node of the gravitational tree */
externFLOAT*DomainHmax;/*!< this table gives for each leaf of the top-level tree the maximum SPH smoothing length among the particles of the corresponding node of the gravitational tree */
externstructDomainNODE
{
FLOATs[3];/*!< center-of-mass coordinates */
FLOATvs[3];/*!< center-of-mass velocities */
FLOATmass;/*!< mass of node */
#ifdef STELLAR_FLUX
FLOATstarlum;/*!< star luminosity of node */
#endif
#ifdef UNEQUALSOFTENINGS
#ifndef ADAPTIVE_GRAVSOFT_FORGAS
intbitflags;/*!< this bit-field encodes the particle type with the largest softening among the particles of the nodes, and whether there are particles with different softening in the node */
#else
FLOATmaxsoft;/*!< hold the maximum gravitational softening of particles in the
node if the ADAPTIVE_GRAVSOFT_FORGAS option is selected */
#endif
#endif
}
*DomainMoment;/*!< this table stores for each node of the top-level tree corresponding node data from the gravitational tree */
externpeanokey*DomainKeyBuf;/*!< this points to a buffer used during the exchange of particle data */
externpeanokey*Key;/*!< a table used for storing Peano-Hilbert keys for particles */
externpeanokey*KeySorted;/*!< holds a sorted table of Peano-Hilbert keys for all particles, used to construct top-level tree */
externintNTopnodes;/*!< total number of nodes in top-level tree */
externintNTopleaves;/*!< number of leaves in top-level tree. Each leaf can be assigned to a different processor */
externstructtopnode_data
{
intDaughter;/*!< index of first daughter cell (out of 8) of top-level node */
intPstart;/*!< for the present top-level node, this gives the index of the first node in the concatenated list of topnodes collected from all processors */
intBlocks;/*!< for the present top-level node, this gives the number of corresponding nodes in the concatenated list of topnodes collected from all processors */
intLeaf;/*!< if the node is a leaf, this gives its number when all leaves are traversed in Peano-Hilbert order */
peanokeySize;/*!< number of Peano-Hilbert mesh-cells represented by top-level node */
peanokeyStartKey;/*!< first Peano-Hilbert key in top-level node */
longlongCount;/*!< counts the number of particles in this top-level node */
}
#ifdef PY_INTERFACE
*TopNodesQ,
#endif
*TopNodes;/*!< points to the root node of the top-level tree */
externdoubleTimeOfLastTreeConstruction;/*!< holds what it says, only used in connection with FORCETEST */
/* variables for input/output, usually only used on process 0 */
externcharParameterFile[MAXLEN_FILENAME];/*!< file name of parameterfile used for starting the simulation */
externFILE*FdInfo;/*!< file handle for info.txt log-file. */
externFILE*FdLog;/*!< file handle for log.txt log-file. */
externFILE*FdEnergy;/*!< file handle for energy.txt log-file. */
#ifdef SYSTEMSTATISTICS
externFILE*FdSystem;
#endif
externFILE*FdTimings;/*!< file handle for timings.txt log-file. */
externFILE*FdCPU;/*!< file handle for cpu.txt log-file. */
#ifdef FORCETEST
externFILE*FdForceTest;/*!< file handle for forcetest.txt log-file. */
#endif
#ifdef SFR
externFILE*FdSfr;/*!< file handle for sfr.txt log-file. */
#endif
#ifdef CHIMIE
externFILE*FdChimie;/*!< file handle for chimie log-file. */
#ifdef CHIMIE_STATS
externFILE*FdChimieStatsSNs;/*!< file handle for chimie stats-file. */
externFILE*FdChimieStatsGas;/*!< file handle for chimie stats-file. */
#endif
#endif
#ifdef MULTIPHASE
externFILE*FdPhase;/*!< file handle for pase.txt log-file. */
externFILE*FdSticky;/*!< file handle for sticky.txt log-file. */
#endif
#ifdef AGN_ACCRETION
externFILE*FdAccretion;/*!< file handle for accretion.txt log-file. */
#endif
#ifdef BONDI_ACCRETION
externFILE*FdBondi;/*!< file handle for bondi.txt log-file. */
#endif
#ifdef BUBBLES
externFILE*FdBubble;/*!< file handle for bubble.txt log-file. */
#endif
#ifdef GAS_ACCRETION
externFILE*FdGasAccretion;/*!< file handle for gas_accretion.txt log-file. */
#endif
externdoubleDriftTable[DRIFT_TABLE_LENGTH];/*!< table for the cosmological drift factors */
externdoubleGravKickTable[DRIFT_TABLE_LENGTH];/*!< table for the cosmological kick factor for gravitational forces */
externdoubleHydroKickTable[DRIFT_TABLE_LENGTH];/*!< table for the cosmological kick factor for hydrodynmical forces */
#ifdef COSMICTIME
externdoubleCosmicTimeTable[COSMICTIME_TABLE_LENGTH];/*!< table for the computation of cosmic time */
externdoubleFullCosmicTimeTable[COSMICTIME_TABLE_LENGTH];/*!< table for the computation of cosmic time */
externdoubleFullCosmicTimeTableInv[COSMICTIME_TABLE_LENGTH];/*!< table for the computation of cosmic time */
#endif
externvoid*CommBuffer;/*!< points to communication buffer, which is used in the domain decomposition, the
parallel tree-force computation, the SPH routines, etc. */
/*! This structure contains data which is the SAME for all tasks (mostly code parameters read from the
* parameter file). Holding this data in a structure is convenient for writing/reading the restart file, and
* it allows the introduction of new global variables in a simple way. The only thing to do is to introduce
* them into this structure.
*/
externstructglobal_data_all_processes
{
longlongTotNumPart;/*!< total particle numbers (global value) */
longlongTotN_gas;/*!< total gas particle number (global value) */
#ifdef GAS_ACCRETION
longlongTotNumPart_acc;
longlongTotN_gas_acc;
#endif
#ifdef PY_INTERFACE
longlongTotNumPartQ;/*!< total particle numbers (global value) */
longlongTotN_gasQ;/*!< total gas particle number (global value) */
intMaxPartQ;/*!< This gives the maxmimum number of particles that can be stored on one processor. */
intMaxPartSphQ;/*!< This gives the maxmimum number of SPH particles that can be stored on one processor. */
intBunchSizeSph;
intBunchSizeDensitySph;
doubleForceSofteningQ;
#endif
#if defined(SFR) || defined(STELLAR_PROP)
longlongTotN_stars;/*!< total stars particle number (global value) */
#endif
#ifdef MULTIPHASE
longlongTotN_sph;/*!< total sph particle number (global value) */
longlongTotN_sticky;/*!< total sticky particle number (global value) */
longlongTotN_stickyflaged;/*!< total sticky flaged particle number (global value) */
longlongTotN_stickyactive;/*!< total sticky active particle number (global value) */
longlongTotN_dark;/*!< total dark particle number (global value) */
#endif
intMaxPart;/*!< This gives the maxmimum number of particles that can be stored on one processor. */
intMaxPartSph;/*!< This gives the maxmimum number of SPH particles that can be stored on one processor. */
#ifdef TESSEL
intMaxgPart;
#endif
#ifdef STELLAR_PROP
intMaxPartStars;/*!< This gives the maxmimum number of Star particles that can be stored on one processor. */
#endif
doubleBoxSize;/*!< Boxsize in case periodic boundary conditions are used */
intICFormat;/*!< selects different versions of IC file-format */
intSnapFormat;/*!< selects different versions of snapshot file-formats */
intNumFilesPerSnapshot;/*!< number of files in multi-file snapshot dumps */
intNumFilesWrittenInParallel;/*!< maximum number of files that may be written simultaneously when
writing/reading restart-files, or when writing snapshot files */
intBufferSize;/*!< size of communication buffer in MB */
intBunchSizeForce;/*!< number of particles fitting into the buffer in the parallel tree-force algorithm */
intBunchSizeDensity;/*!< number of particles fitting into the communication buffer in the density computation */
intBunchSizeHydro;/*!< number of particles fitting into the communication buffer in the SPH hydrodynamical force computation */
intBunchSizeDomain;/*!< number of particles fitting into the communication buffer in the domain decomposition */
#ifdef MULTIPHASE
intBunchSizeSticky;/*!< number of particles fitting into the communication buffer in the Chimie computation */
#endif
#ifdef CHIMIE
intBunchSizeChimie;/*!< number of particles fitting into the communication buffer in the Chimie computation */
intBunchSizeStarsDensity;/*!< number of particles fitting into the communication buffer in the star density computation */
#endif
#ifdef SYNCHRONIZE_NGB_TIMESTEP
intBunchSizeSynchronizeNgBTimestep;
#endif
#ifdef DISSIPATION_FORCES
intBunchSizeDissipationForces;
#endif
#ifdef FOF
intBunchSizeFOF;
#endif
#ifdef TESSEL
intBunchSizeGhost;
#endif
doublePartAllocFactor;/*!< in order to maintain work-load balance, the particle load will usually
NOT be balanced. Each processor allocates memory for PartAllocFactor times
the average number of particles to allow for that */
doubleTreeAllocFactor;/*!< Each processor allocates a number of nodes which is TreeAllocFactor times
the maximum(!) number of particles. Note: A typical local tree for N
particles needs usually about ~0.65*N nodes. */
#ifdef SFR
doubleStarsAllocFactor;/*!< Estimated fraction of gas particles that will form stars during the simulation
This allow to reduce the memory stored for stellar particles */
#endif
/* some SPH parameters */
doubleDesNumNgb;/*!< Desired number of SPH neighbours */
doubleMaxNumNgbDeviation;/*!< Maximum allowed deviation neighbour number */
doubleArtBulkViscConst;/*!< Sets the parameter \f$\alpha\f$ of the artificial viscosity */
#ifdef ART_CONDUCTIVITY
doubleArtCondConst;/*!< Sets the parameter \f$\alpha\f$ of the artificial conductivity */
doubleArtCondThreshold;
#endif
doubleInitGasTemp;/*!< may be used to set the temperature in the IC's */
doubleMinGasTemp;/*!< may be used to set a floor for the gas temperature */
doubleMinEgySpec;/*!< the minimum allowed temperature expressed as energy per unit mass */
longlongTotNumOfForces;/*!< counts total number of force computations */
longlongNumForcesSinceLastDomainDecomp;/*!< count particle updates since last domain decomposition */
/* system of units */
doubleG;/*!< Gravity-constant in internal units */
doubleUnitTime_in_s;/*!< factor to convert internal time unit to seconds/h */
doubleUnitMass_in_g;/*!< factor to convert internal mass unit to grams/h */
doubleUnitVelocity_in_cm_per_s;/*!< factor to convert intqernal velocity unit to cm/sec */
doubleUnitLength_in_cm;/*!< factor to convert internal length unit to cm/h */
doubleUnitPressure_in_cgs;/*!< factor to convert internal pressure unit to cgs units (little 'h' still around!) */
doubleUnitDensity_in_cgs;/*!< factor to convert internal length unit to g/cm^3*h^2 */
doubleUnitCoolingRate_in_cgs;/*!< factor to convert internal cooling rate to cgs units */
doubleUnitEnergy_in_cgs;/*!< factor to convert internal energy to cgs units */
doubleUnitTime_in_Megayears;/*!< factor to convert internal time to megayears/h */
doubleGravityConstantInternal;/*!< If set to zero in the parameterfile, the internal value of the
gravitational constant is set to the Newtonian value based on the system of
units specified. Otherwise the value provided is taken as internal gravity constant G. */
/* Cosmological parameters */
doubleHubble;/*!< Hubble-constant in internal units */
doubleOmega0;/*!< matter density in units of the critical density (at z=0)*/
doubleOmegaLambda;/*!< vaccum energy density relative to crictical density (at z=0) */
doubleOmegaBaryon;/*!< baryon density in units of the critical density (at z=0)*/
doubleHubbleParam;/*!< little `h', i.e. Hubble constant in units of 100 km/s/Mpc. Only needed to get absolute physical values for cooling physics */
/* Code options */
intComovingIntegrationOn;/*!< flags that comoving integration is enabled */
intPeriodicBoundariesOn;/*!< flags that periodic boundaries are enabled */
intResubmitOn;/*!< flags that automatic resubmission of job to queue system is enabled */
intTypeOfOpeningCriterion;/*!< determines tree cell-opening criterion: 0 for Barnes-Hut, 1 for relative criterion */
intTypeOfTimestepCriterion;/*!< gives type of timestep criterion (only 0 supported right now - unlike gadget-1.1) */
intOutputListOn;/*!< flags that output times are listed in a specified file */
/* Parameters determining output frequency */
intSnapshotFileCount;/*!< number of snapshot that is written next */
doubleTimeBetSnapshot;/*!< simulation time interval between snapshot files */
doubleTimeOfFirstSnapshot;/*!< simulation time of first snapshot files */
doubleCpuTimeBetRestartFile;/*!< cpu-time between regularly generated restart files */
doubleTimeLastRestartFile;/*!< cpu-time when last restart-file was written */
doubleTimeBetStatistics;/*!< simulation time interval between computations of energy statistics */
doubleTimeLastStatistics;/*!< simulation time when the energy statistics was computed the last time */
intNumCurrentTiStep;/*!< counts the number of system steps taken up to this point */
/* Current time of the simulation, global step, and end of simulation */
doubleTime;/*!< current time of the simulation */
doubleTimeBegin;/*!< time of initial conditions of the simulation */
doubleTimeStep;/*!< difference between current times of previous and current timestep */
doubleTimeMax;/*!< marks the point of time until the simulation is to be evolved */
/* variables for organizing discrete timeline */
doubleTimebase_interval;/*!< factor to convert from floating point time interval to integer timeline */
intTi_Current;/*!< current time on integer timeline */
intTi_nextoutput;/*!< next output time on integer timeline */
#ifdef FLEXSTEPS
intPresentMinStep;/*!< If FLEXSTEPS is used, particle timesteps are chosen as multiples of the present minimum timestep. */
intPresentMaxStep;/*!< If FLEXSTEPS is used, this is the maximum timestep in timeline units, rounded down to the next power 2 division */
#endif
#ifdef PMGRID
intPM_Ti_endstep;/*!< begin of present long-range timestep */
intPM_Ti_begstep;/*!< end of present long-range timestep */
#endif
/* Placement of PM grids */
#ifdef PMGRID
doubleAsmth[2];/*!< Gives the scale of the long-range/short-range split (in mesh-cells), both for the coarse and the high-res mesh */
doubleRcut[2];/*!< Gives the maximum radius for which the short-range force is evaluated with the tree (in mesh-cells), both for the coarse and the high-res mesh */
doubleCorner[2][3];/*!< lower left corner of coarse and high-res PM-mesh */
doubleUpperCorner[2][3];/*!< upper right corner of coarse and high-res PM-mesh */
doubleXmintot[2][3];/*!< minimum particle coordinates both for coarse and high-res PM-mesh */
doubleXmaxtot[2][3];/*!< maximum particle coordinates both for coarse and high-res PM-mesh */
doubleTotalMeshSize[2];/*!< total extension of coarse and high-res PM-mesh */
#endif
/* Variables that keep track of cumulative CPU consumption */
doubleTimeLimitCPU;/*!< CPU time limit as defined in parameterfile */
doubleCPU_TreeConstruction;/*!< time spent for constructing the gravitational tree */
doubleCPU_TreeWalk;/*!< actual time spent for pure tree-walks */
doubleCPU_Gravity;/*!< cumulative time used for gravity computation (tree-algorithm only) */
doubleCPU_Potential;/*!< time used for computing gravitational potentials */
doubleCPU_Domain;/*!< cumulative time spent for domain decomposition */
doubleCPU_Snapshot;/*!< time used for writing snapshot files */
doubleCPU_Total;/*!< cumulative time spent for domain decomposition */
doubleCPU_CommSum;/*!< accumulated time used for communication, and for collecting partial results, in tree-gravity */
doubleCPU_Imbalance;/*!< cumulative time lost accross all processors as work-load imbalance in gravitational tree */
doubleCPU_HydCompWalk;/*!< time used for actual SPH computations, including neighbour search */
doubleCPU_HydCommSumm;/*!< cumulative time used for communication in SPH, and for collecting partial results */
doubleCPU_HydImbalance;/*!< cumulative time lost due to work-load imbalance in SPH */
doubleCPU_Hydro;/*!< cumulative time spent for SPH related computations */
#ifdef SFR
doubleCPU_StarFormation;/*!< cumulative time spent for star formation computations */
#endif
#ifdef CHIMIE
doubleCPU_Chimie;/*!< cumulative time spent for chimie computations */
doubleCPU_ChimieDensCompWalk;
doubleCPU_ChimieDensCommSumm;
doubleCPU_ChimieDensImbalance;
doubleCPU_ChimieDensEnsureNgb;
doubleCPU_ChimieCompWalk;
doubleCPU_ChimieCommSumm;
doubleCPU_ChimieImbalance;
#endif
#ifdef MULTIPHASE
doubleCPU_Sticky;/*!< cumulative time spent for sticky computations */
#endif
doubleCPU_EnsureNgb;/*!< time needed to iterate on correct neighbour numbers */
doubleCPU_Predict;/*!< cumulative time to drift the system forward in time, including dynamic tree updates */
doubleCPU_TimeLine;/*!< time used for determining new timesteps, and for organizing the timestepping, including kicks of active particles */
doubleCPU_PM;/*!< time used for long-range gravitational force */
doubleCPU_Peano;/*!< time required to establish Peano-Hilbert order */
#ifdef DETAILED_CPU_DOMAIN
doubleCPU_Domain_findExtend;
doubleCPU_Domain_determineTopTree;
doubleCPU_Domain_sumCost;
doubleCPU_Domain_findSplit;
doubleCPU_Domain_shiftSplit;
doubleCPU_Domain_countToGo;
doubleCPU_Domain_exchange;
#endif
#ifdef DETAILED_CPU_GRAVITY
doubleCPU_Gravity_TreeWalk1;
doubleCPU_Gravity_TreeWalk2;
doubleCPU_Gravity_CommSum1;
doubleCPU_Gravity_CommSum2;
doubleCPU_Gravity_Imbalance1;
doubleCPU_Gravity_Imbalance2;
#endif
#ifdef COOLING
doubleCPU_Cooling;
#endif
#ifdef DETAILED_CPU
doubleCPU_Leapfrog;
doubleCPU_Physics;
doubleCPU_Residual;
doubleCPU_Accel;
doubleCPU_Begrun;
#endif
/* tree code opening criterion */
doubleErrTolTheta;/*!< BH tree opening angle */
doubleErrTolForceAcc;/*!< parameter for relative opening criterion in tree walk */
/* adjusts accuracy of time-integration */
doubleErrTolIntAccuracy;/*!< accuracy tolerance parameter \f$ \eta \f$ for timestep criterion. The
timestep is \f$ \Delta t = \sqrt{\frac{2 \eta eps}{a}} \f$ */
doubleMinSizeTimestep;/*!< minimum allowed timestep. Normally, the simulation terminates if the
timestep determined by the timestep criteria falls below this limit. */
doubleMaxSizeTimestep;/*!< maximum allowed timestep */
doubleMaxRMSDisplacementFac;/*!< this determines a global timestep criterion for cosmological simulations
in comoving coordinates. To this end, the code computes the rms velocity
of all particles, and limits the timestep such that the rms displacement
is a fraction of the mean particle separation (determined from the
particle mass and the cosmological parameters). This parameter specifies
this fraction. */
doubleCourantFac;/*!< SPH-Courant factor */
/* frequency of tree reconstruction/domain decomposition */
doubleTreeDomainUpdateFrequency;/*!< controls frequency of domain decompositions */
/* Gravitational and hydrodynamical softening lengths (given in terms of an `equivalent' Plummer softening length).
* Five groups of particles are supported 0="gas", 1="halo", 2="disk", 3="bulge", 4="stars", 5="bndry"
*/
doubleMinGasHsmlFractional;/*!< minimum allowed SPH smoothing length in units of SPH gravitational softening length */
*GravDataIn,/*!< holds particle data to be exported to other processors */
*GravDataGet,/*!< holds particle data imported from other processors */
*GravDataResult,/*!< holds the partial results computed for imported particles. Note: We use GravDataResult = GravDataGet, such that the result replaces the imported data */
*GravDataOut;/*!< holds partial results received from other processors. This will overwrite the GravDataIn array */
externstructgravdata_index
{
intTask;
intIndex;
intSortIndex;
}
*GravDataIndexTable;/*!< the particles to be exported are grouped by task-number. This table allows the results to be disentangled again and to be assigned to the correct particle */
externstructdensdata_in
{
FLOATPos[3];
FLOATVel[3];
FLOATHsml;
#ifdef MULTIPHASE
intPhase;
#endif
intIndex;
intTask;
#ifdef ART_CONDUCTIVITY
FLOATEnergyIntPred;
#endif
}
*DensDataIn,/*!< holds particle data for SPH density computation to be exported to other processors */
*DensDataGet;/*!< holds imported particle data for SPH density computation */
externstructdensdata_out
{
FLOATRho;
FLOATDiv,Rot[3];
FLOATDhsmlDensity;
FLOATNgb;
#ifdef ART_CONDUCTIVITY
FLOATGradEnergyInt[3];
#endif
#ifdef DENSITY_INDEPENDENT_SPH
FLOATEgyRho;
FLOATDhsmlEgyDensity;
#endif
#ifdef CHIMIE_SMOOTH_METALS
FLOATRhoMetal[NELEMENTS];
#endif
#ifdef METAL_DIFFUSION
FLOATRmsSpeed;
#endif
}
*DensDataResult,/*!< stores the locally computed SPH density results for imported particles */
*DensDataPartialResult;/*!< imported partial SPH density results from other processors */