From a3e6581902e0c3e23927b813f09505181ce45933 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sat, 13 Mar 2021 22:58:47 +0100 Subject: [PATCH 01/15] Move the writing of header and other general properties to rank 0 only in the SWIFT i/o plugin --- src/plugins/output_swift.cc | 210 ++++++++++++++++++------------------ 1 file changed, 105 insertions(+), 105 deletions(-) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index 636a936..aa70ed1 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -38,14 +38,15 @@ class swift_output_plugin : public output_plugin { protected: - int num_files_, num_simultaneous_writers_; + int num_ranks_, this_rank_; + int num_files_; real_t lunit_, vunit_, munit_; real_t boxsize_, hubble_param_, astart_, zstart_; bool blongids_, bdobaryons_; - std::string this_fname_; - std::array npart_, npartTotal_, npartTotalHighWord_; + std::array npart_; + std::array npartTotal_, npartTotalHighWord_; std::array mass_; double time_; @@ -54,11 +55,12 @@ public: explicit swift_output_plugin(config_file &cf, std::unique_ptr &pcc) : output_plugin(cf, pcc, "SWIFT") { + + // SWIFT uses a single file as IC */ num_files_ = 1; -#ifdef USE_MPI - // use as many output files as we have MPI tasks - MPI_Comm_size(MPI_COMM_WORLD, &num_files_); -#endif + this_rank_ = 0; + num_files_ = 1; + real_t astart = 1.0 / (1.0 + cf_.get_value("setup", "zstart")); const double rhoc = 27.7519737; // in h^2 1e10 M_sol / Mpc^3 @@ -83,32 +85,28 @@ public: } time_ = astart; - - this_fname_ = fname_; + #ifdef USE_MPI - int thisrank = 0; - MPI_Comm_rank(MPI_COMM_WORLD, &thisrank); - if (num_files_ > 1) - this_fname_ += "." + std::to_string(thisrank); + MPI_Comm_rank(MPI_COMM_WORLD, &this_rank_); + MPI_Comm_size(MPI_COMM_WORLD, &num_ranks_); #endif - // Now that we modified the filename for MPI support, propagate the change to the super-class - // as it does operations on the file behind our back. - fname_ = this_fname_; + // Only ranks 0 writes the header + if (this_rank_ != 0) return; // delete output file if it exists - unlink(this_fname_.c_str()); + unlink(fname_.c_str()); // create output HDF5 file - HDFCreateFile(this_fname_); + HDFCreateFile(fname_); // Write UNITS header using the physical constants assumed internally by SWIFT - HDFCreateGroup(this_fname_, "Units"); - HDFWriteGroupAttribute(this_fname_, "Units", "Unit mass in cgs (U_M)", 1.98841e43); // 10^10 Msun in grams - HDFWriteGroupAttribute(this_fname_, "Units", "Unit length in cgs (U_L)", 3.08567758149e24); // 1 Mpc in cm - HDFWriteGroupAttribute(this_fname_, "Units", "Unit time in cgs (U_t)", 3.08567758149e19); // so that unit vel is 1 km/s - HDFWriteGroupAttribute(this_fname_, "Units", "Unit current in cgs (U_I)", 1.0); // 1 Ampere - HDFWriteGroupAttribute(this_fname_, "Units", "Unit temperature in cgs (U_T)", 1.0); // 1 Kelvin + HDFCreateGroup(fname_, "Units"); + HDFWriteGroupAttribute(fname_, "Units", "Unit mass in cgs (U_M)", 1.98841e43); // 10^10 Msun in grams + HDFWriteGroupAttribute(fname_, "Units", "Unit length in cgs (U_L)", 3.08567758149e24); // 1 Mpc in cm + HDFWriteGroupAttribute(fname_, "Units", "Unit time in cgs (U_t)", 3.08567758149e19); // so that unit vel is 1 km/s + HDFWriteGroupAttribute(fname_, "Units", "Unit current in cgs (U_I)", 1.0); // 1 Ampere + HDFWriteGroupAttribute(fname_, "Units", "Unit temperature in cgs (U_T)", 1.0); // 1 Kelvin // Write MUSIC configuration header int order = cf_.get_value("setup", "LPTorder"); @@ -122,32 +120,32 @@ public: int do_baryonsVrel = cf_.get_value("setup", "DoBaryonVrel"); int L = cf_.get_value("setup", "GridRes"); - HDFCreateGroup(this_fname_, "ICs_parameters"); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Code", std::string("MUSIC2 - monofonIC")); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Git Revision", std::string(GIT_REV)); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Git Tag", std::string(GIT_TAG)); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Git Branch", std::string(GIT_BRANCH)); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Precision", std::string(CMAKE_PRECISION_STR)); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Convolutions", std::string(CMAKE_CONVOLVER_STR)); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "PLT", std::string(CMAKE_PLT_STR)); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "LPT Order", order); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Particle Load", load); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Transfer Function", tf); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Cosmology Parameter Set", cosmo_set); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Random Generator", rng); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Mode Fixing", do_fixing); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Mode inversion", do_invert); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Baryons", do_baryons); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Baryons Relative Velocity", do_baryonsVrel); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Grid Resolution", L); + HDFCreateGroup(fname_, "ICs_parameters"); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Code", std::string("MUSIC2 - monofonIC")); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Git Revision", std::string(GIT_REV)); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Git Tag", std::string(GIT_TAG)); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Git Branch", std::string(GIT_BRANCH)); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Precision", std::string(CMAKE_PRECISION_STR)); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Convolutions", std::string(CMAKE_CONVOLVER_STR)); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "PLT", std::string(CMAKE_PLT_STR)); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "LPT Order", order); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Particle Load", load); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Transfer Function", tf); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Cosmology Parameter Set", cosmo_set); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Random Generator", rng); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Mode Fixing", do_fixing); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Mode inversion", do_invert); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Baryons", do_baryons); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Baryons Relative Velocity", do_baryonsVrel); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Grid Resolution", L); if (tf == "CLASS") { double ztarget = cf_.get_value("cosmology", "ztarget"); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Target Redshift", ztarget); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Target Redshift", ztarget); } if (rng == "PANPHASIA") { std::string desc = cf_.get_value("random", "descriptor"); - HDFWriteGroupAttribute(this_fname_, "ICs_parameters", "Descriptor", desc); + HDFWriteGroupAttribute(fname_, "ICs_parameters", "Descriptor", desc); } } @@ -155,56 +153,27 @@ public: ~swift_output_plugin() { if (!std::uncaught_exception()) - { - HDFCreateGroup(this_fname_, "Header"); - HDFWriteGroupAttribute(fname_, "Header", "Dimension", 3); - HDFWriteGroupAttribute(fname_, "Header", "BoxSize", boxsize_ / hubble_param_); // in Mpc, not Mpc/h - - HDFWriteGroupAttribute(this_fname_, "Header", "NumPart_Total", from_7array(npartTotal_)); - HDFWriteGroupAttribute(this_fname_, "Header", "NumPart_Total_HighWord", from_7array(npartTotalHighWord_)); - HDFWriteGroupAttribute(this_fname_, "Header", "NumPart_ThisFile", from_7array(npart_)); - HDFWriteGroupAttribute(this_fname_, "Header", "MassTable", from_7array(mass_)); - - HDFWriteGroupAttribute(this_fname_, "Header", "Time", from_value(time_)); - HDFWriteGroupAttribute(this_fname_, "Header", "Redshift", from_value(zstart_)); - HDFWriteGroupAttribute(this_fname_, "Header", "Flag_Entropy_ICs", from_value(0)); + { + if (this_rank_ == 0) { + + // Write Standard Gadget / SWIFT hdf5 header + HDFCreateGroup(fname_, "Header"); + HDFWriteGroupAttribute(fname_, "Header", "Dimension", 3); + HDFWriteGroupAttribute(fname_, "Header", "BoxSize", boxsize_ / hubble_param_); // in Mpc, not Mpc/h + + HDFWriteGroupAttribute(fname_, "Header", "NumPart_Total", from_7array(npartTotal_)); + HDFWriteGroupAttribute(fname_, "Header", "NumPart_Total_HighWord", from_7array(npartTotalHighWord_)); + HDFWriteGroupAttribute(fname_, "Header", "NumPart_ThisFile", from_7array(npart_)); + HDFWriteGroupAttribute(fname_, "Header", "MassTable", from_7array(mass_)); + + HDFWriteGroupAttribute(fname_, "Header", "Time", from_value(time_)); + HDFWriteGroupAttribute(fname_, "Header", "Redshift", from_value(zstart_)); + HDFWriteGroupAttribute(fname_, "Header", "Flag_Entropy_ICs", from_value(0)); + + HDFWriteGroupAttribute(fname_, "Header", "NumFilesPerSnapshot", from_value(num_files_)); - HDFWriteGroupAttribute(this_fname_, "Header", "NumFilesPerSnapshot", from_value(num_files_)); - - // write GAS internal energy and smoothing length if baryons are enabled - if( bdobaryons_ ) - { - const double gamma = cf_.get_value_safe("cosmology", "gamma", 5.0 / 3.0); - const double YHe = pcc_->cosmo_param_["YHe"]; - //const double Omega0 = cf_.get_value("cosmology", "Omega_m"); - const double omegab = pcc_->cosmo_param_["Omega_b"]; - const double Tcmb0 = pcc_->cosmo_param_["Tcmb"]; - - - // compute gas internal energy - const double npol = (fabs(1.0 - gamma) > 1e-7) ? 1.0 / (gamma - 1.) : 1.0; - const double unitv = 1e5; - const double adec = 1.0 / (160. * std::pow(omegab * hubble_param_ * hubble_param_ / 0.022, 2.0 / 5.0)); - const double Tini = astart_ < adec ? Tcmb0 / astart_ : Tcmb0 / astart_ / astart_ * adec; - const double mu = (Tini > 1.e4) ? 4.0 / (8. - 5. * YHe) : 4.0 / (1. + 3. * (1. - YHe)); - const double ceint = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv; - - music::ilog.Print("Swift : Calculated initial gas temperature: %.2f K/mu", Tini / mu); - music::ilog.Print("Swift : set initial internal energy to %.2e km^2/s^2", ceint); - - std::vector data( npart_[0], ceint ); - HDFWriteDataset(this_fname_, "PartType0/InternalEnergy", data); - - const double h = boxsize_ / hubble_param_ / cf_.get_value("setup","GridRes"); - - music::ilog.Print("Swift : set initial smoothing length to mean inter-part separation: %.2f Mpc", h); - - data.assign( npart_[0], h); - HDFWriteDataset(this_fname_, "PartType0/SmoothingLength", data); - + music::ilog << "Wrote SWIFT IC file(s) to " << fname_ << std::endl; } - - music::ilog << "Wrote SWIFT IC file(s) to " << this_fname_ << std::endl; } } @@ -247,46 +216,77 @@ public: void write_particle_data(const particle::container &pc, const cosmo_species &s, double Omega_species) { int sid = get_species_idx(s); - assert(sid != -1); - npart_[sid] = (pc.get_local_num_particles()); + npart_[sid] = (pc.get_global_num_particles()); npartTotal_[sid] = (uint32_t)(pc.get_global_num_particles()); npartTotalHighWord_[sid] = (uint32_t)((pc.get_global_num_particles()) >> 32); - + if( pc.bhas_individual_masses_ ) mass_[sid] = 0.0; else mass_[sid] = Omega_species * munit_ / pc.get_global_num_particles(); - - HDFCreateGroup(this_fname_, std::string("PartType") + std::to_string(sid)); + + HDFCreateGroup(fname_, std::string("PartType") + std::to_string(sid)); //... write positions and velocities..... if (this->has_64bit_reals()) { - HDFWriteDatasetVector(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions64_); - HDFWriteDatasetVector(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities64_); + HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions64_); + HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities64_); } else { - HDFWriteDatasetVector(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions32_); - HDFWriteDatasetVector(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities32_); + HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions32_); + HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities32_); } //... write ids..... if (this->has_64bit_ids()) - HDFWriteDataset(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids64_); + HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids64_); else - HDFWriteDataset(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids32_); + HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids32_); //... write masses..... if( pc.bhas_individual_masses_ ){ if (this->has_64bit_reals()){ - HDFWriteDataset(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass64_); + HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass64_); }else{ - HDFWriteDataset(this_fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass32_); + HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass32_); } } + + // write GAS internal energy and smoothing length if baryons are enabled + if( bdobaryons_ && s == cosmo_species::baryon) { + + const double gamma = cf_.get_value_safe("cosmology", "gamma", 5.0 / 3.0); + const double YHe = pcc_->cosmo_param_["YHe"]; + //const double Omega0 = cf_.get_value("cosmology", "Omega_m"); + const double omegab = pcc_->cosmo_param_["Omega_b"]; + const double Tcmb0 = pcc_->cosmo_param_["Tcmb"]; + + // compute gas internal energy + const double npol = (fabs(1.0 - gamma) > 1e-7) ? 1.0 / (gamma - 1.) : 1.0; + const double unitv = 1e5; + const double adec = 1.0 / (160. * std::pow(omegab * hubble_param_ * hubble_param_ / 0.022, 2.0 / 5.0)); + const double Tini = astart_ < adec ? Tcmb0 / astart_ : Tcmb0 / astart_ / astart_ * adec; + const double mu = (Tini > 1.e4) ? 4.0 / (8. - 5. * YHe) : 4.0 / (1. + 3. * (1. - YHe)); + const double ceint = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv; + + music::ilog.Print("Swift : Calculated initial gas temperature: %.2f K/mu", Tini / mu); + music::ilog.Print("Swift : set initial internal energy to %.2e km^2/s^2", ceint); + + std::vector data( npart_[0], ceint ); + HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), data); + + const double h = boxsize_ / hubble_param_ / cf_.get_value("setup","GridRes"); + + music::ilog.Print("Swift : set initial smoothing length to mean inter-part separation: %.2f Mpc", h); + + data.assign( npart_[0], h); + HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), data); + + } } }; From 7a463aad2eaf8d4756b32846f9e974d29f9965c2 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sat, 13 Mar 2021 23:33:26 +0100 Subject: [PATCH 02/15] Move the calculation of the temperatures to the constructor of the SWIFT i/o plugin --- src/plugins/output_swift.cc | 54 +++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 26 deletions(-) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index aa70ed1..ab32d6a 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -49,6 +49,7 @@ protected: std::array npartTotal_, npartTotalHighWord_; std::array mass_; double time_; + double ceint_, h_; public: //! constructor @@ -147,6 +148,29 @@ public: std::string desc = cf_.get_value("random", "descriptor"); HDFWriteGroupAttribute(fname_, "ICs_parameters", "Descriptor", desc); } + + + if (bdobaryons_) { + + const double gamma = cf_.get_value_safe("cosmology", "gamma", 5.0 / 3.0); + const double YHe = pcc_->cosmo_param_["YHe"]; + const double omegab = pcc_->cosmo_param_["Omega_b"]; + const double Tcmb0 = pcc_->cosmo_param_["Tcmb"]; + + // compute gas internal energy + const double npol = (fabs(1.0 - gamma) > 1e-7) ? 1.0 / (gamma - 1.) : 1.0; + const double unitv = 1e5; + const double adec = 1.0 / (160. * std::pow(omegab * hubble_param_ * hubble_param_ / 0.022, 2.0 / 5.0)); + const double Tini = astart_ < adec ? Tcmb0 / astart_ : Tcmb0 / astart_ / astart_ * adec; + const double mu = (Tini > 1.e4) ? 4.0 / (8. - 5. * YHe) : 4.0 / (1. + 3. * (1. - YHe)); + ceint_ = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv; + + music::ilog.Print("Swift : Calculated initial gas temperature: %.2f K/mu", Tini / mu); + music::ilog.Print("Swift : set initial internal energy to %.2e km^2/s^2", ceint_); + + h_ = boxsize_ / hubble_param_ / cf_.get_value("setup","GridRes"); + music::ilog.Print("Swift : set initial smoothing length to mean inter-part separation: %.2f Mpc", h_); + } } // use destructor to write header post factum @@ -226,7 +250,7 @@ public: mass_[sid] = 0.0; else mass_[sid] = Omega_species * munit_ / pc.get_global_num_particles(); - + HDFCreateGroup(fname_, std::string("PartType") + std::to_string(sid)); //... write positions and velocities..... @@ -258,34 +282,12 @@ public: // write GAS internal energy and smoothing length if baryons are enabled if( bdobaryons_ && s == cosmo_species::baryon) { - - const double gamma = cf_.get_value_safe("cosmology", "gamma", 5.0 / 3.0); - const double YHe = pcc_->cosmo_param_["YHe"]; - //const double Omega0 = cf_.get_value("cosmology", "Omega_m"); - const double omegab = pcc_->cosmo_param_["Omega_b"]; - const double Tcmb0 = pcc_->cosmo_param_["Tcmb"]; - - // compute gas internal energy - const double npol = (fabs(1.0 - gamma) > 1e-7) ? 1.0 / (gamma - 1.) : 1.0; - const double unitv = 1e5; - const double adec = 1.0 / (160. * std::pow(omegab * hubble_param_ * hubble_param_ / 0.022, 2.0 / 5.0)); - const double Tini = astart_ < adec ? Tcmb0 / astart_ : Tcmb0 / astart_ / astart_ * adec; - const double mu = (Tini > 1.e4) ? 4.0 / (8. - 5. * YHe) : 4.0 / (1. + 3. * (1. - YHe)); - const double ceint = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv; - - music::ilog.Print("Swift : Calculated initial gas temperature: %.2f K/mu", Tini / mu); - music::ilog.Print("Swift : set initial internal energy to %.2e km^2/s^2", ceint); - - std::vector data( npart_[0], ceint ); + + std::vector data( npart_[0], ceint_ ); HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), data); - const double h = boxsize_ / hubble_param_ / cf_.get_value("setup","GridRes"); - - music::ilog.Print("Swift : set initial smoothing length to mean inter-part separation: %.2f Mpc", h); - - data.assign( npart_[0], h); + data.assign( npart_[0], h_); HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), data); - } } }; From c1ebf38aa13dae82726012d21147610b577a6dab Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 14 Mar 2021 12:26:38 +0100 Subject: [PATCH 03/15] Make rank 0 create the arrays in the hdf5 file for the SWIFT i/o plugin. --- include/HDF_IO.hh | 54 +++++++++++++++++++++++++++++++++++++ src/plugins/output_swift.cc | 42 +++++++++++++++++++++++++++-- 2 files changed, 94 insertions(+), 2 deletions(-) diff --git a/include/HDF_IO.hh b/include/HDF_IO.hh index 1b15b34..a8a50b6 100755 --- a/include/HDF_IO.hh +++ b/include/HDF_IO.hh @@ -886,6 +886,60 @@ inline void HDFWriteDatasetVector( const std::string Filename, const std::string H5Fclose( HDF_FileID ); } +template< typename T > +inline void HDFCreateEmptyDataset( const std::string Filename, const std::string ObjName, const size_t num_particles) +{ + + hid_t + HDF_FileID, + HDF_DatasetID, + HDF_DataspaceID, + HDF_Type; + + hsize_t HDF_Dims; + + HDF_FileID = H5Fopen( Filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT ); + + HDF_Type = GetDataType(); + + HDF_Dims = (hsize_t) (num_particles); + HDF_DataspaceID = H5Screate_simple(1, &HDF_Dims, NULL); + HDF_DatasetID = H5Dcreate( HDF_FileID, ObjName.c_str(), HDF_Type, + HDF_DataspaceID, H5P_DEFAULT ); + H5Dclose( HDF_DatasetID ); + H5Sclose( HDF_DataspaceID ); + + H5Fclose( HDF_FileID ); +} + +template< typename T > +inline void HDFCreateEmptyDatasetVector( const std::string Filename, const std::string ObjName, const size_t num_particles) +{ + + hid_t + HDF_FileID, + HDF_DatasetID, + HDF_DataspaceID, + HDF_Type; + + hsize_t HDF_Dims[2]; + + HDF_FileID = H5Fopen( Filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT ); + + HDF_Type = GetDataType(); + + HDF_Dims[0] = (hsize_t) (num_particles); + HDF_Dims[1] = (hsize_t) 3; + + HDF_DataspaceID = H5Screate_simple(2, HDF_Dims, NULL); + HDF_DatasetID = H5Dcreate( HDF_FileID, ObjName.c_str(), HDF_Type, + HDF_DataspaceID, H5P_DEFAULT ); + H5Dclose( HDF_DatasetID ); + H5Sclose( HDF_DataspaceID ); + H5Fclose( HDF_FileID ); + +} + inline void HDFCreateGroup( const std::string Filename, const std::string GroupName ) { hid_t HDF_FileID, HDF_GroupID; diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index ab32d6a..06e85c8 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -60,7 +60,7 @@ public: // SWIFT uses a single file as IC */ num_files_ = 1; this_rank_ = 0; - num_files_ = 1; + num_ranks_ = 1; real_t astart = 1.0 / (1.0 + cf_.get_value("setup", "zstart")); const double rhoc = 27.7519737; // in h^2 1e10 M_sol / Mpc^3 @@ -251,7 +251,45 @@ public: else mass_[sid] = Omega_species * munit_ / pc.get_global_num_particles(); - HDFCreateGroup(fname_, std::string("PartType") + std::to_string(sid)); + const size_t global_num_particles = pc.get_global_num_particles(); + + // start by creating the full empty datasets in the file + if (this_rank_ == 0) { + + HDFCreateGroup(fname_, std::string("PartType") + std::to_string(sid)); + + if (this->has_64bit_reals()) + { + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), global_num_particles); + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), global_num_particles); + } + else + { + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), global_num_particles); + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), global_num_particles); + } + + if (this->has_64bit_ids()) + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), global_num_particles); + else + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), global_num_particles); + + if( pc.bhas_individual_masses_ ){ + if (this->has_64bit_reals()) + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); + else + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); + } + + if( bdobaryons_ && s == cosmo_species::baryon) { + + // note: despite this being a constant array we still need to handle it in a distributed way + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), global_num_particles); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothinLength"), global_num_particles); + } + } + + // Now each node writes its own chunk in a round-robin fashion, appending at the end of the currently existing data //... write positions and velocities..... if (this->has_64bit_reals()) From 5207492b8e58e5ff5864a2fcf2c7598c2faba62b Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 14 Mar 2021 18:34:34 +0100 Subject: [PATCH 04/15] In the SWIFT i/o plugin, get each rank to write its data in a round-robin fashion using an offset in the previously-created array --- include/HDF_IO.hh | 90 +++++++++++++++++++++++++++++++++++++ src/plugins/output_swift.cc | 81 +++++++++++++++++++-------------- 2 files changed, 138 insertions(+), 33 deletions(-) diff --git a/include/HDF_IO.hh b/include/HDF_IO.hh index a8a50b6..7597e10 100755 --- a/include/HDF_IO.hh +++ b/include/HDF_IO.hh @@ -940,6 +940,96 @@ inline void HDFCreateEmptyDatasetVector( const std::string Filename, const std:: } +template< typename T > +inline void HDFWriteDatasetChunk( const std::string Filename, const std::string ObjName, const std::vector &Data, const size_t offset ) +{ + + hid_t + HDF_FileID, + HDF_DatasetID, + HDF_MemDataspaceID, + HDF_FileDataspaceID, + HDF_Type; + + hsize_t HDF_Dims, + HDF_Shape, + HDF_Offset; + + HDF_FileID = H5Fopen( Filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT ); + + HDF_Type = GetDataType(); + + HDF_Dims = (hsize_t)Data.size(); + HDF_MemDataspaceID = H5Screate_simple(1, &HDF_Dims, NULL); + + HDF_Shape = (hsize_t)Data.size(); + HDF_Offset = (hsize_t)offset; + + HDF_DatasetID = H5Dopen( HDF_FileID, ObjName.c_str() ); + + HDF_FileDataspaceID = H5Dget_space(HDF_DatasetID); + + H5Sselect_hyperslab(HDF_FileDataspaceID, H5S_SELECT_SET, &HDF_Offset, NULL, &HDF_Shape, NULL); + + H5Dwrite( HDF_DatasetID, HDF_Type, HDF_MemDataspaceID, HDF_FileDataspaceID, H5P_DEFAULT, &Data[0] ); + + H5Dclose( HDF_DatasetID ); + H5Sclose( HDF_MemDataspaceID ); + H5Sclose( HDF_FileDataspaceID ); + H5Fclose( HDF_FileID ); +} + +template< typename T > +inline void HDFWriteDatasetVectorChunk( const std::string Filename, const std::string ObjName, const std::vector &Data, const size_t offset ) +{ + + hid_t + HDF_FileID, + HDF_DatasetID, + HDF_MemDataspaceID, + HDF_FileDataspaceID, + HDF_Type; + + hsize_t HDF_Dims[2], + HDF_Shape[2], + HDF_Offset[2]; + + HDF_FileID = H5Fopen( Filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT ); + + HDF_Type = GetDataType(); + + HDF_Dims[0] = (hsize_t)(Data.size()/3); + HDF_Dims[1] = 3; + + HDF_MemDataspaceID = H5Screate_simple(2, HDF_Dims, NULL); + + if( Data.size() % 3 != 0 ){ + std::cerr << " - Warning: Trying to write vector data in HDFWriteDatasetVector\n" + << " but array length not divisible by 3!\n\n"; + + } + + HDF_Shape[0] = (hsize_t)(Data.size()/3); + HDF_Shape[1] = (hsize_t)3; + HDF_Offset[0] = (hsize_t)offset; + HDF_Offset[1] = (hsize_t)0; + + HDF_DatasetID = H5Dopen( HDF_FileID, ObjName.c_str() ); + + HDF_FileDataspaceID = H5Dget_space(HDF_DatasetID); + + H5Sselect_hyperslab(HDF_FileDataspaceID, H5S_SELECT_SET, HDF_Offset, NULL, HDF_Shape, NULL); + + H5Dwrite( HDF_DatasetID, HDF_Type, HDF_MemDataspaceID, HDF_FileDataspaceID, H5P_DEFAULT, &Data[0] ); + + H5Dclose( HDF_DatasetID ); + H5Sclose( HDF_MemDataspaceID ); + H5Sclose( HDF_FileDataspaceID ); + H5Fclose( HDF_FileID ); +} + + + inline void HDFCreateGroup( const std::string Filename, const std::string GroupName ) { hid_t HDF_FileID, HDF_GroupID; diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index 06e85c8..ca00414 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -285,48 +285,63 @@ public: // note: despite this being a constant array we still need to handle it in a distributed way HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), global_num_particles); - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothinLength"), global_num_particles); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), global_num_particles); } } - // Now each node writes its own chunk in a round-robin fashion, appending at the end of the currently existing data + // compute each rank's offset in the global array + const size_t n_local = pc.get_local_num_particles(); + size_t offset = 0; + MPI_Exscan(&n_local, &offset, 1, MPI_UNSIGNED_LONG_LONG, MPI_SUM, MPI_COMM_WORLD); - //... write positions and velocities..... - if (this->has_64bit_reals()) - { - HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions64_); - HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities64_); - } - else - { - HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions32_); - HDFWriteDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities32_); - } + // now each node writes its own chunk in a round-robin fashion, appending at the end of the currently existing data + for (int rank = 0; rank < num_ranks_; ++rank) { - //... write ids..... - if (this->has_64bit_ids()) - HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids64_); - else - HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids32_); + MPI_Barrier(MPI_COMM_WORLD); - //... write masses..... - if( pc.bhas_individual_masses_ ){ - if (this->has_64bit_reals()){ - HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass64_); - }else{ - HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass32_); + if (rank == this_rank_) { + + //... write positions and velocities..... + if (this->has_64bit_reals()) + { + HDFWriteDatasetVectorChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions64_, offset); + HDFWriteDatasetVectorChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities64_, offset); + } + else + { + HDFWriteDatasetVectorChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions32_, offset); + HDFWriteDatasetVectorChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities32_, offset); + } + + //... write ids..... + if (this->has_64bit_ids()) + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids64_, offset); + else + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids32_, offset); + + //... write masses..... + if( pc.bhas_individual_masses_ ){ + if (this->has_64bit_reals()){ + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass64_, offset); + }else{ + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass32_, offset); + } + } + + // write GAS internal energy and smoothing length if baryons are enabled + if(bdobaryons_ && s == cosmo_species::baryon) { + + std::vector data( pc.get_local_num_particles(), ceint_ ); + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), data, offset); + + data.assign( pc.get_local_num_particles(), h_); + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), data, offset); + } } } - // write GAS internal energy and smoothing length if baryons are enabled - if( bdobaryons_ && s == cosmo_species::baryon) { - - std::vector data( npart_[0], ceint_ ); - HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), data); - - data.assign( npart_[0], h_); - HDFWriteDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), data); - } + // end with a barrier to make sure everyone is done before the destructor does its job + MPI_Barrier(MPI_COMM_WORLD); } }; From 098dafa1ef4bd227aafab7028972ab9ac0b03227 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 14 Mar 2021 18:50:46 +0100 Subject: [PATCH 05/15] Fix constant used in the SWIFT i/o plugin to match exactly the value of the physical constants used internally by SWIFT --- src/plugins/output_swift.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index ca00414..cac166c 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -63,7 +63,7 @@ public: num_ranks_ = 1; real_t astart = 1.0 / (1.0 + cf_.get_value("setup", "zstart")); - const double rhoc = 27.7519737; // in h^2 1e10 M_sol / Mpc^3 + const double rhoc = 27.7536609198; // in h^2 1e10 M_sol / Mpc^3 assuming SWIFT's internal constants hubble_param_ = pcc->cosmo_param_["h"]; zstart_ = cf_.get_value("setup","zstart"); @@ -297,6 +297,7 @@ public: // now each node writes its own chunk in a round-robin fashion, appending at the end of the currently existing data for (int rank = 0; rank < num_ranks_; ++rank) { + // wait until the initialisation or the previous rank in the loop is done MPI_Barrier(MPI_COMM_WORLD); if (rank == this_rank_) { From 36bf7b07606b61568602c74198ec20b4a3c8b0e0 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 28 Mar 2021 23:14:29 +0200 Subject: [PATCH 06/15] Make sure all ranks compute h and u, not just rank 0 --- src/plugins/output_swift.cc | 45 ++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index cac166c..64d9505 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -92,6 +92,28 @@ public: MPI_Comm_size(MPI_COMM_WORLD, &num_ranks_); #endif + if (bdobaryons_) { + + const double gamma = cf_.get_value_safe("cosmology", "gamma", 5.0 / 3.0); + const double YHe = pcc_->cosmo_param_["YHe"]; + const double omegab = pcc_->cosmo_param_["Omega_b"]; + const double Tcmb0 = pcc_->cosmo_param_["Tcmb"]; + + // compute gas internal energy + const double npol = (fabs(1.0 - gamma) > 1e-7) ? 1.0 / (gamma - 1.) : 1.0; + const double unitv = 1e5; + const double adec = 1.0 / (160. * std::pow(omegab * hubble_param_ * hubble_param_ / 0.022, 2.0 / 5.0)); + const double Tini = astart_ < adec ? Tcmb0 / astart_ : Tcmb0 / astart_ / astart_ * adec; + const double mu = (Tini > 1.e4) ? 4.0 / (8. - 5. * YHe) : 4.0 / (1. + 3. * (1. - YHe)); + ceint_ = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv; + + music::ilog.Print("Swift : Calculated initial gas temperature: %.2f K/mu", Tini / mu); + music::ilog.Print("Swift : set initial internal energy to %.2e km^2/s^2", ceint_); + + h_ = boxsize_ / hubble_param_ / cf_.get_value("setup","GridRes"); + music::ilog.Print("Swift : set initial smoothing length to mean inter-part separation: %.2f Mpc", h_); + } + // Only ranks 0 writes the header if (this_rank_ != 0) return; @@ -148,29 +170,6 @@ public: std::string desc = cf_.get_value("random", "descriptor"); HDFWriteGroupAttribute(fname_, "ICs_parameters", "Descriptor", desc); } - - - if (bdobaryons_) { - - const double gamma = cf_.get_value_safe("cosmology", "gamma", 5.0 / 3.0); - const double YHe = pcc_->cosmo_param_["YHe"]; - const double omegab = pcc_->cosmo_param_["Omega_b"]; - const double Tcmb0 = pcc_->cosmo_param_["Tcmb"]; - - // compute gas internal energy - const double npol = (fabs(1.0 - gamma) > 1e-7) ? 1.0 / (gamma - 1.) : 1.0; - const double unitv = 1e5; - const double adec = 1.0 / (160. * std::pow(omegab * hubble_param_ * hubble_param_ / 0.022, 2.0 / 5.0)); - const double Tini = astart_ < adec ? Tcmb0 / astart_ : Tcmb0 / astart_ / astart_ * adec; - const double mu = (Tini > 1.e4) ? 4.0 / (8. - 5. * YHe) : 4.0 / (1. + 3. * (1. - YHe)); - ceint_ = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv; - - music::ilog.Print("Swift : Calculated initial gas temperature: %.2f K/mu", Tini / mu); - music::ilog.Print("Swift : set initial internal energy to %.2e km^2/s^2", ceint_); - - h_ = boxsize_ / hubble_param_ / cf_.get_value("setup","GridRes"); - music::ilog.Print("Swift : set initial smoothing length to mean inter-part separation: %.2f Mpc", h_); - } } // use destructor to write header post factum From c1f942907a32e6261b509b202041afd356196ae7 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 28 Mar 2021 23:19:51 +0200 Subject: [PATCH 07/15] Fix some whitespace problems --- src/plugins/output_swift.cc | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index 64d9505..ee8b970 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -72,7 +72,7 @@ public: lunit_ = boxsize_ / hubble_param_; // final units will be in Mpc (without h) vunit_ = boxsize_; // final units will be in km/s - munit_ = rhoc * std::pow(boxsize_, 3) / hubble_param_; // final units will be in 1e10 M_sol + munit_ = rhoc * std::pow(boxsize_, 3) / hubble_param_; // final units will be in 1e10 M_sol blongids_ = cf_.get_value_safe("output", "UseLongids", false); bdobaryons_ = cf_.get_value("setup","DoBaryons"); @@ -113,7 +113,7 @@ public: h_ = boxsize_ / hubble_param_ / cf_.get_value("setup","GridRes"); music::ilog.Print("Swift : set initial smoothing length to mean inter-part separation: %.2f Mpc", h_); } - + // Only ranks 0 writes the header if (this_rank_ != 0) return; @@ -175,24 +175,24 @@ public: // use destructor to write header post factum ~swift_output_plugin() { - if (!std::uncaught_exception()) - { + if (!std::uncaught_exception()) + { if (this_rank_ == 0) { - + // Write Standard Gadget / SWIFT hdf5 header HDFCreateGroup(fname_, "Header"); HDFWriteGroupAttribute(fname_, "Header", "Dimension", 3); HDFWriteGroupAttribute(fname_, "Header", "BoxSize", boxsize_ / hubble_param_); // in Mpc, not Mpc/h - + HDFWriteGroupAttribute(fname_, "Header", "NumPart_Total", from_7array(npartTotal_)); HDFWriteGroupAttribute(fname_, "Header", "NumPart_Total_HighWord", from_7array(npartTotalHighWord_)); HDFWriteGroupAttribute(fname_, "Header", "NumPart_ThisFile", from_7array(npart_)); HDFWriteGroupAttribute(fname_, "Header", "MassTable", from_7array(mass_)); - + HDFWriteGroupAttribute(fname_, "Header", "Time", from_value(time_)); HDFWriteGroupAttribute(fname_, "Header", "Redshift", from_value(zstart_)); HDFWriteGroupAttribute(fname_, "Header", "Flag_Entropy_ICs", from_value(0)); - + HDFWriteGroupAttribute(fname_, "Header", "NumFilesPerSnapshot", from_value(num_files_)); music::ilog << "Wrote SWIFT IC file(s) to " << fname_ << std::endl; @@ -244,14 +244,14 @@ public: npart_[sid] = (pc.get_global_num_particles()); npartTotal_[sid] = (uint32_t)(pc.get_global_num_particles()); npartTotalHighWord_[sid] = (uint32_t)((pc.get_global_num_particles()) >> 32); - + if( pc.bhas_individual_masses_ ) mass_[sid] = 0.0; else mass_[sid] = Omega_species * munit_ / pc.get_global_num_particles(); const size_t global_num_particles = pc.get_global_num_particles(); - + // start by creating the full empty datasets in the file if (this_rank_ == 0) { @@ -276,7 +276,7 @@ public: if( pc.bhas_individual_masses_ ){ if (this->has_64bit_reals()) HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); - else + else HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); } @@ -290,7 +290,7 @@ public: // compute each rank's offset in the global array const size_t n_local = pc.get_local_num_particles(); - size_t offset = 0; + size_t offset = 0; MPI_Exscan(&n_local, &offset, 1, MPI_UNSIGNED_LONG_LONG, MPI_SUM, MPI_COMM_WORLD); // now each node writes its own chunk in a round-robin fashion, appending at the end of the currently existing data @@ -312,13 +312,13 @@ public: HDFWriteDatasetVectorChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), pc.positions32_, offset); HDFWriteDatasetVectorChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), pc.velocities32_, offset); } - + //... write ids..... if (this->has_64bit_ids()) HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids64_, offset); else HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids32_, offset); - + //... write masses..... if( pc.bhas_individual_masses_ ){ if (this->has_64bit_reals()){ @@ -327,13 +327,13 @@ public: HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass32_, offset); } } - + // write GAS internal energy and smoothing length if baryons are enabled if(bdobaryons_ && s == cosmo_species::baryon) { - + std::vector data( pc.get_local_num_particles(), ceint_ ); HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), data, offset); - + data.assign( pc.get_local_num_particles(), h_); HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), data, offset); } From 17f51ed8f46acda5aa52f64b82222adce212b093 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 28 Mar 2021 23:23:34 +0200 Subject: [PATCH 08/15] Protect the MPI calls with correct ifdef's --- src/plugins/output_swift.cc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index ee8b970..cb70c44 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -291,13 +291,17 @@ public: // compute each rank's offset in the global array const size_t n_local = pc.get_local_num_particles(); size_t offset = 0; +#ifdef USE_MPI MPI_Exscan(&n_local, &offset, 1, MPI_UNSIGNED_LONG_LONG, MPI_SUM, MPI_COMM_WORLD); +#endif // now each node writes its own chunk in a round-robin fashion, appending at the end of the currently existing data for (int rank = 0; rank < num_ranks_; ++rank) { +#ifdef USE_MPI // wait until the initialisation or the previous rank in the loop is done MPI_Barrier(MPI_COMM_WORLD); +#endif if (rank == this_rank_) { @@ -340,8 +344,10 @@ public: } } +#ifdef USE_MPI // end with a barrier to make sure everyone is done before the destructor does its job MPI_Barrier(MPI_COMM_WORLD); +#endif } }; From d7273c7ae5abe34fb5ca50a143a7e58383f6d75f Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 28 Mar 2021 23:40:33 +0200 Subject: [PATCH 09/15] Write logging messages to report the writing progress --- src/plugins/output_swift.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index cb70c44..0d4d15b 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -195,7 +195,7 @@ public: HDFWriteGroupAttribute(fname_, "Header", "NumFilesPerSnapshot", from_value(num_files_)); - music::ilog << "Wrote SWIFT IC file(s) to " << fname_ << std::endl; + music::ilog << "Done writing SWIFT IC file to " << fname_ << std::endl; } } } @@ -286,6 +286,8 @@ public: HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), global_num_particles); HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), global_num_particles); } + + music::ilog << "Created empty arrays for PartType" << std::to_string(sid) << " into file " << fname_ << "." << std::endl; } // compute each rank's offset in the global array @@ -342,6 +344,10 @@ public: HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), data, offset); } } + + if (this_rank_ == 0) { + music::ilog << "Rank " << rank << " wrote its PartType" << std::to_string(sid) << " data to the IC file." << std::endl; + } } #ifdef USE_MPI From 6ed5afe4733a787b87b597552929b7377818ab65 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 28 Mar 2021 23:48:12 +0200 Subject: [PATCH 10/15] Update README file and example configuration about the SWIFT output plugin. --- README.md | 2 +- example.conf | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index b3c4173..5b29149 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Currently supported features (the list is growing, so check back): - Multiple Einstein-Boltzmann modules: direct interface with [CLASS](https://lesgourg.github.io/class_public/class.html), file input from CAMB, and fitting formulae (Eisenstein&Hu). -- Multiple output modules for RAMSES, Arepo, Gadget-2/3, and HACC (courtesy M.Buehlmann) via plugins (Swift and Nyx are next). +- Multiple output modules for RAMSES, Arepo, Gadget-2/3, SWIFT, and HACC (courtesy M.Buehlmann) via plugins (Nyx is next). - Multiple random number modules (MUSIC1,NGenIC,Panphasia,...) (A new MUSIC2 module is in development) diff --git a/example.conf b/example.conf index 92f438e..b97c50b 100644 --- a/example.conf +++ b/example.conf @@ -144,7 +144,8 @@ NumThreads = 8 # format = genericio # filename = ics_hacc -##> SWIFT compatible HDF5 format +##> SWIFT compatible HDF5 format. Format broadly similar to gadget_hdf5 but in a single +## file even when using MPI. No h-factors and no sqrt(a)-factor for the velocities. # format = SWIFT # filename = ics_swift.hdf5 From 515e2b3063dcdc8b897cc65b236649a3faef37dc Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Mon, 29 Mar 2021 14:05:07 +0200 Subject: [PATCH 11/15] Add more information about what SWIFT parameters not to set in the SWIFT yaml file when using monofonIC outputs --- src/plugins/output_swift.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index 0d4d15b..122ec41 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -196,6 +196,8 @@ public: HDFWriteGroupAttribute(fname_, "Header", "NumFilesPerSnapshot", from_value(num_files_)); music::ilog << "Done writing SWIFT IC file to " << fname_ << std::endl; + music::ilog << "Note that the IC file does not contain any h-factors nor any extra sqrt(a)-factors for the velocities" << std::endl; + music::ilog << "The SWIFT parameters 'InitialConditions:cleanup_h_factors' and 'InitialConditions:cleanup_velocity_factors' should hence *NOT* be set!" << std::endl; } } } From ba3a82aea134741f1b04aaf303667dfddae20311 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sat, 29 May 2021 23:21:31 +0200 Subject: [PATCH 12/15] Always write out a mass array in the SWIFT plugin even if the masses are constant --- src/plugins/output_swift.cc | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index 122ec41..9f4baa8 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -247,10 +247,11 @@ public: npartTotal_[sid] = (uint32_t)(pc.get_global_num_particles()); npartTotalHighWord_[sid] = (uint32_t)((pc.get_global_num_particles()) >> 32); + std::array particle_masses; if( pc.bhas_individual_masses_ ) - mass_[sid] = 0.0; + particle_masses[sid] = 0.0; else - mass_[sid] = Omega_species * munit_ / pc.get_global_num_particles(); + particle_masses[sid] = Omega_species * munit_ / pc.get_global_num_particles(); const size_t global_num_particles = pc.get_global_num_particles(); @@ -275,12 +276,10 @@ public: else HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), global_num_particles); - if( pc.bhas_individual_masses_ ){ - if (this->has_64bit_reals()) - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); - else - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); - } + if (this->has_64bit_reals()) + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); + else + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); if( bdobaryons_ && s == cosmo_species::baryon) { @@ -328,13 +327,22 @@ public: HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), pc.ids32_, offset); //... write masses..... - if( pc.bhas_individual_masses_ ){ - if (this->has_64bit_reals()){ - HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass64_, offset); - }else{ - HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass32_, offset); + if( pc.bhas_individual_masses_ ) + { + if (this->has_64bit_reals()) + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass64_, offset); + else + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), pc.mass32_, offset); + } + else + { + std::vector data( pc.get_local_num_particles(), particle_masses[sid]); + + if (this->has_64bit_reals()) + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), data, offset); + else + HDFWriteDatasetChunk(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), data, offset); } - } // write GAS internal energy and smoothing length if baryons are enabled if(bdobaryons_ && s == cosmo_species::baryon) { From cc1709304a59f22586322d032dec9df413d103e3 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 30 May 2021 00:20:40 +0200 Subject: [PATCH 13/15] Add md5sum filter to the outputs of the SWIFT plugin --- include/HDF_IO.hh | 42 ++++++++++++++++++++++++++++++------- src/plugins/output_swift.cc | 20 +++++++++--------- 2 files changed, 45 insertions(+), 17 deletions(-) diff --git a/include/HDF_IO.hh b/include/HDF_IO.hh index 7597e10..16946d8 100755 --- a/include/HDF_IO.hh +++ b/include/HDF_IO.hh @@ -887,17 +887,30 @@ inline void HDFWriteDatasetVector( const std::string Filename, const std::string } template< typename T > -inline void HDFCreateEmptyDataset( const std::string Filename, const std::string ObjName, const size_t num_particles) +inline void HDFCreateEmptyDataset( const std::string Filename, const std::string ObjName, const size_t num_particles, const bool filter = false) { hid_t HDF_FileID, HDF_DatasetID, HDF_DataspaceID, - HDF_Type; + HDF_Type, + HDF_Prop; hsize_t HDF_Dims; + HDF_Prop = H5Pcreate(H5P_DATASET_CREATE); + + if (filter) + { + // 1MB chunking + hsize_t HDF_Dims[1] = {1024 * 1024 / sizeof(T)}; + H5Pset_chunk(HDF_Prop, 1, HDF_Dims); + + // md5 checksum + H5Pset_fletcher32(HDF_Prop); + } + HDF_FileID = H5Fopen( Filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT ); HDF_Type = GetDataType(); @@ -905,25 +918,39 @@ inline void HDFCreateEmptyDataset( const std::string Filename, const std::string HDF_Dims = (hsize_t) (num_particles); HDF_DataspaceID = H5Screate_simple(1, &HDF_Dims, NULL); HDF_DatasetID = H5Dcreate( HDF_FileID, ObjName.c_str(), HDF_Type, - HDF_DataspaceID, H5P_DEFAULT ); + HDF_DataspaceID, HDF_Prop ); H5Dclose( HDF_DatasetID ); H5Sclose( HDF_DataspaceID ); + H5Pclose( HDF_Prop ); H5Fclose( HDF_FileID ); } template< typename T > -inline void HDFCreateEmptyDatasetVector( const std::string Filename, const std::string ObjName, const size_t num_particles) +inline void HDFCreateEmptyDatasetVector( const std::string Filename, const std::string ObjName, const size_t num_particles, const bool filter = false) { hid_t HDF_FileID, HDF_DatasetID, HDF_DataspaceID, - HDF_Type; + HDF_Type, + HDF_Prop; hsize_t HDF_Dims[2]; + HDF_Prop = H5Pcreate(H5P_DATASET_CREATE); + + if (filter) + { + // 1MB chunking + hsize_t HDF_Dims[2] = {1024 * 1024 / sizeof(T), 3}; + H5Pset_chunk(HDF_Prop, 2, HDF_Dims); + + // md5 checksum + H5Pset_fletcher32(HDF_Prop); + } + HDF_FileID = H5Fopen( Filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT ); HDF_Type = GetDataType(); @@ -933,11 +960,12 @@ inline void HDFCreateEmptyDatasetVector( const std::string Filename, const std:: HDF_DataspaceID = H5Screate_simple(2, HDF_Dims, NULL); HDF_DatasetID = H5Dcreate( HDF_FileID, ObjName.c_str(), HDF_Type, - HDF_DataspaceID, H5P_DEFAULT ); + HDF_DataspaceID, HDF_Prop ); H5Dclose( HDF_DatasetID ); H5Sclose( HDF_DataspaceID ); + H5Pclose( HDF_Prop ); + H5Fclose( HDF_FileID ); - } template< typename T > diff --git a/src/plugins/output_swift.cc b/src/plugins/output_swift.cc index 9f4baa8..a3b61a2 100644 --- a/src/plugins/output_swift.cc +++ b/src/plugins/output_swift.cc @@ -262,30 +262,30 @@ public: if (this->has_64bit_reals()) { - HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), global_num_particles); - HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), global_num_particles); + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), global_num_particles, true); + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), global_num_particles, true); } else { - HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), global_num_particles); - HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), global_num_particles); + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Coordinates"), global_num_particles, true); + HDFCreateEmptyDatasetVector(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Velocities"), global_num_particles, true); } if (this->has_64bit_ids()) - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), global_num_particles); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), global_num_particles, true); else - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), global_num_particles); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/ParticleIDs"), global_num_particles, true); if (this->has_64bit_reals()) - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles, true); else - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/Masses"), global_num_particles, true); if( bdobaryons_ && s == cosmo_species::baryon) { // note: despite this being a constant array we still need to handle it in a distributed way - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), global_num_particles); - HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), global_num_particles); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/InternalEnergy"), global_num_particles, true); + HDFCreateEmptyDataset(fname_, std::string("PartType") + std::to_string(sid) + std::string("/SmoothingLength"), global_num_particles, true); } music::ilog << "Created empty arrays for PartType" << std::to_string(sid) << " into file " << fname_ << "." << std::endl; From 0b6f7e25017d5ae404704c58f455538ee37168e1 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 30 May 2021 00:20:52 +0200 Subject: [PATCH 14/15] Add md5sum filter to the outputs of the SWIFT plugin --- include/HDF_IO.hh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/include/HDF_IO.hh b/include/HDF_IO.hh index 16946d8..4537571 100755 --- a/include/HDF_IO.hh +++ b/include/HDF_IO.hh @@ -943,8 +943,8 @@ inline void HDFCreateEmptyDatasetVector( const std::string Filename, const std:: if (filter) { - // 1MB chunking - hsize_t HDF_Dims[2] = {1024 * 1024 / sizeof(T), 3}; + // ~1MB chunking + hsize_t HDF_Dims[2] = {256 * 1024 / sizeof(T), 3}; H5Pset_chunk(HDF_Prop, 2, HDF_Dims); // md5 checksum From 427c7f2e101064a048b52a94e47b04dc6b4c4cd3 Mon Sep 17 00:00:00 2001 From: Matthieu Schaller Date: Sun, 30 May 2021 00:27:35 +0200 Subject: [PATCH 15/15] Small improvement to the example config file --- example.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example.conf b/example.conf index b97c50b..f14f7e0 100644 --- a/example.conf +++ b/example.conf @@ -145,7 +145,7 @@ NumThreads = 8 # filename = ics_hacc ##> SWIFT compatible HDF5 format. Format broadly similar to gadget_hdf5 but in a single -## file even when using MPI. No h-factors and no sqrt(a)-factor for the velocities. +##> file even when using MPI. No h-factors and no sqrt(a)-factor for the velocities. # format = SWIFT # filename = ics_swift.hdf5