mirror of
https://github.com/glatterf42/sims_python_files.git
synced 2024-09-10 05:43:46 +02:00
Visualisation of swift update and mass profile for zoom sims added.
This commit is contained in:
parent
764c1358d1
commit
c92465a95a
3 changed files with 137 additions and 25 deletions
|
@ -8,19 +8,21 @@ Created on Tue Mar 8 11:16:06 2022
|
|||
|
||||
import h5py
|
||||
from pathlib import Path
|
||||
import numpy as np
|
||||
|
||||
# directory = Path(r"/home/ben/sims/swiftsim/examples/agora/")
|
||||
directory = Path(r"/home/ben/sims/swiftsim/examples/agora/")
|
||||
# directory = Path(r"/home/ben/Pictures/Gadget4/vsc4_256/")
|
||||
directory = Path(r'/home/ben/sims/music-panphasia/test/')
|
||||
# directory = Path(r'/home/ben/sims/music-panphasia/test/')
|
||||
|
||||
|
||||
file = h5py.File(directory / "agora_128.hdf5", "r")
|
||||
# file = h5py.File(directory / "output_0001.hdf5", "r")
|
||||
# ics = h5py.File(directory / "agora_128.hdf5", "r")
|
||||
file = h5py.File(directory / "output_0001.hdf5", "r")
|
||||
# file_2 = h5py.File(directory / 'output_0001.hdf5', 'r')
|
||||
|
||||
# file = h5py.File(directory / "cosmo_ics_gadget_256_pbh_vsc.hdf5", "r")
|
||||
|
||||
for key in file.keys():
|
||||
print(key) #for finding all header entries, which are:
|
||||
# for key in file.keys():
|
||||
# print(key) #for finding all header entries, which are:
|
||||
|
||||
|
||||
Header = file["Header"]
|
||||
|
@ -29,14 +31,64 @@ PartType1 = file["PartType1"]
|
|||
Units = file["Units"]
|
||||
# PartType2 = file['PartType2']
|
||||
|
||||
print(list(Header.attrs.items()))
|
||||
# print(list(Header.attrs.items()))
|
||||
# print(list(ics['Header'].attrs.items()))
|
||||
print(Header.attrs['Scale-factor'])
|
||||
|
||||
|
||||
print(PartType1.keys())
|
||||
# print(PartType2.keys())
|
||||
print(PartType1['Masses'][0:10])
|
||||
# print(PartType1.keys())
|
||||
# # print(PartType2.keys())
|
||||
# print(PartType1['Masses'][0:10])
|
||||
|
||||
print(PartType1['Velocities'][0:10])
|
||||
# print(PartType1['Velocities'][0:10])
|
||||
|
||||
# ic_ids = ics['PartType1']['ParticleIDs'][:]
|
||||
# ic_velocities = ics['PartType1']['Velocities'][:]
|
||||
# ic_absolute_vel = np.sqrt(np.sum(ic_velocities ** 2, axis=1))
|
||||
# ic_data = np.vstack([ic_ids, ic_absolute_vel]).T
|
||||
# ic_positions = ics['PartType1']['Coordinates'][:]
|
||||
# ic_distances = np.sqrt(np.sum(ic_positions ** 2, axis=1))
|
||||
# ic_data = np.vstack([ic_ids, ic_distances]).T
|
||||
|
||||
# snap_ids = PartType1['ParticleIDs'][:]
|
||||
# snap_velocities = PartType1['Velocities'][:]
|
||||
# snap_absolute_vel = np.sqrt(np.sum(snap_velocities ** 2, axis=1))
|
||||
# snap_data = np.vstack([snap_ids, snap_absolute_vel]).T
|
||||
# snap_data_sorted = snap_data[snap_data[:, 0].argsort()]
|
||||
# snap_positions = PartType1['Coordinates'][:]
|
||||
# snap_distances = np.sqrt(np.sum(snap_positions ** 2, axis=1))
|
||||
# snap_data = np.vstack([snap_ids, snap_distances]).T
|
||||
# snap_data_sorted = snap_data[snap_data[:, 0].argsort()]
|
||||
|
||||
# snap_2_ids = file_2['PartType1']['ParticleIDs'][:]
|
||||
# snap_2_velocities = file_2['PartType1']['Velocities'][:]
|
||||
# snap_2_absolute_vel = np.sqrt(np.sum(snap_2_velocities ** 2, axis=1))
|
||||
# snap_2_data = np.vstack([snap_2_ids, snap_2_absolute_vel]).T
|
||||
# snap_2_data_sorted = snap_2_data[snap_2_data[:, 0].argsort()]
|
||||
# snap_2_positions = file_2['PartType1']['Coordinates'][:]
|
||||
# snap_2_distances = np.sqrt(np.sum(snap_2_positions ** 2, axis=1))
|
||||
# snap_2_data = np.vstack([snap_2_ids, snap_2_distances]).T
|
||||
# snap_2_data_sorted = snap_2_data[snap_2_data[:, 0].argsort()]
|
||||
|
||||
# velocity_difference = np.abs(ic_data[:, 1] - snap_data_sorted[:, 1])
|
||||
# mean_velocity_difference = np.mean(velocity_difference)
|
||||
|
||||
# velocity_difference_2 = np.abs(snap_data_sorted[:, 1] - snap_2_data_sorted[:, 1])
|
||||
# mean_2_velocity_difference = np.mean(velocity_difference_2)
|
||||
|
||||
# print(mean_velocity_difference, mean_2_velocity_difference)
|
||||
# print(np.min(velocity_difference), np.max(velocity_difference))
|
||||
# print(np.min(velocity_difference_2), np.max(velocity_difference_2))
|
||||
|
||||
# distance_difference = np.abs(ic_data[:, 1] - snap_data_sorted[:, 1])
|
||||
# mean_distance_difference = np.mean(distance_difference)
|
||||
|
||||
# distance_difference_2 = np.abs(snap_data_sorted[:, 1] - snap_2_data_sorted[:, 1])
|
||||
# mean_2_distance_difference = np.mean(distance_difference_2)
|
||||
|
||||
# print(mean_distance_difference, mean_2_distance_difference)
|
||||
# print(np.min(ic_distances), np.max(ic_distances))
|
||||
# print(np.min(snap_distances), np.max(snap_distances))
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -9,6 +9,10 @@ Created on Tue Mar 8 11:16:06 2022
|
|||
import h5py
|
||||
from pathlib import Path
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
def V(r):
|
||||
return 4 * np.pi * r**3 / 3
|
||||
|
||||
directory = Path(r"/home/ben/sims/swiftsim/examples/zoom_tests/")
|
||||
|
||||
|
@ -19,6 +23,8 @@ file = h5py.File(directory / f'output_000{snap_number}.hdf5', 'r')
|
|||
|
||||
PartType1 = file["PartType1"]
|
||||
highres_groupids = PartType1['FOFGroupIDs'][:]
|
||||
highres_coordinates = PartType1['Coordinates'][:]
|
||||
highres_masses = PartType1['Masses'][:]
|
||||
|
||||
total_highres_particles = len(highres_groupids)
|
||||
unique_groups, particle_count = np.unique(highres_groupids, return_counts=True)
|
||||
|
@ -35,8 +41,7 @@ groups = fof_file['Groups']
|
|||
# for key in groups.keys():
|
||||
# print(key)
|
||||
|
||||
|
||||
# centres = groups['Centres'][:]
|
||||
centres = groups['Centres'][:]
|
||||
groupids = groups['GroupIDs'][:]
|
||||
masses = groups['Masses'][:]
|
||||
number_of_members = groups['Sizes'][:]
|
||||
|
@ -48,10 +53,61 @@ for i in range(len(groupids)-1):
|
|||
if np.isin(groupids[i], unique_groups):
|
||||
highres_members = particle_count[separate_unique_counter]
|
||||
contamination = (1 - highres_members / number_of_members[i])
|
||||
print(f'Group: {groupids[i]:{table_width}} | Mass: {masses[i]:{table_width + 1}.{table_width}} | Highres Members: {highres_members:{table_width}} | Total Members: {number_of_members[i]:{table_width}} | Contamination: {contamination * 100:.{table_width}}%\n')
|
||||
print(f'Group: {groupids[i]:{table_width}} | Mass: {masses[i]:{table_width + 1}.{table_width}} | Highres Members: {highres_members:{table_width + 1}} | Total Members: {number_of_members[i]:{table_width}} | Contamination: {contamination * 100:.{table_width}}%\n')
|
||||
separate_unique_counter += 1
|
||||
|
||||
|
||||
main_group_origin = centres[0]
|
||||
main_group_mass = masses[0]
|
||||
distances = []
|
||||
# masses = []
|
||||
|
||||
for j in range(len(highres_groupids)):
|
||||
if highres_groupids[j] == np.min(unique_groups):
|
||||
distance = np.linalg.norm(main_group_origin - highres_coordinates[j])
|
||||
# mass = highres_masses[j]
|
||||
distances.append(distance)
|
||||
# masses.append(mass)
|
||||
|
||||
group_radius = np.max(np.array(distances))
|
||||
normalised_distances = np.array(distances) / group_radius
|
||||
|
||||
num_bins = 30
|
||||
log_radial_bins = np.geomspace(0.01, 2, num_bins)
|
||||
|
||||
particle_mass = highres_masses[0] # Careful: this assumes that all highres particles have the same mass and that it's exclusively them in the main group,
|
||||
# so keep an eye on its contamination and adjust accordingly!
|
||||
|
||||
counts_in_radial_bins = []
|
||||
for k in range(len(log_radial_bins) - 1):
|
||||
count = 0
|
||||
for member_distance in normalised_distances:
|
||||
if log_radial_bins[k] <= member_distance < log_radial_bins[k + 1]:
|
||||
count += 1
|
||||
volume = V(log_radial_bins[k + 1] * group_radius) - V(log_radial_bins[k] * group_radius)
|
||||
count /= volume
|
||||
counts_in_radial_bins.append(count)
|
||||
|
||||
plot_log_radial_bins = log_radial_bins[0:len(log_radial_bins) - 1] # there is one fewer bin then there are bin borders
|
||||
masses_in_radial_bins = np.array(counts_in_radial_bins) * particle_mass
|
||||
|
||||
Nres = 128
|
||||
Lbox = 100
|
||||
softening = Lbox / Nres / 30
|
||||
plt.axvline(4 * softening / group_radius, linestyle='--', color='grey')
|
||||
|
||||
plt.title('Density profile Auriga 6')
|
||||
plt.xlabel(r'R / $\mathrm{R}_\mathrm{group}$')
|
||||
plt.ylabel(r'ρ [$10^{10}\ \mathrm{M}_\odot\ /\ \mathrm{Mpc}^3$]')
|
||||
|
||||
plt.loglog(plot_log_radial_bins, masses_in_radial_bins)
|
||||
plt.show()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# for i in range(10):
|
||||
# print(f'Group: {groupids[i]} | Mass: {masses[i]} | Size: {sizes[i]} | Centre: {centres[i]}\n')
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ def find_coordinates_to_move(minimum, maximum, ratio, x_offset, y_offset, z_offs
|
|||
return coordinates_to_move
|
||||
|
||||
|
||||
directory = Path(r"/home/ben/sims/swiftsim/examples/zoom_tests/")
|
||||
directory = Path(r"/home/ben/sims/swiftsim/examples/zoom_tests/auriga6_halo7_8_10/")
|
||||
|
||||
# file = h5py.File(directory / "auriga6_halo7_8_9.hdf5", "r")
|
||||
|
||||
|
@ -54,26 +54,29 @@ for filename in sorted(directory.glob("output_*.hdf5")):
|
|||
highres_names = file["PartType1"]["ParticleIDs"][:]
|
||||
highres_velocities = file["PartType1"]["Velocities"][:]
|
||||
highres_masses = file['PartType1']['Masses'][:]
|
||||
highres_group_ids = file['PartType1']['FOFGroupIDs'][:]
|
||||
highres_absolute_velo = np.sqrt(np.sum(highres_velocities ** 2, axis=1))
|
||||
|
||||
lowres_coordinates = file["PartType2"]["Coordinates"][:] # for cdm particles
|
||||
lowres_names = file["PartType2"]["ParticleIDs"][:]
|
||||
lowres_velocities = file["PartType2"]["Velocities"][:]
|
||||
lowres_masses = file['PartType2']['Masses'][:]
|
||||
lowres_group_ids = file['PartType2']['FOFGroupIDs'][:]
|
||||
lowres_absolute_velo = np.sqrt(np.sum(lowres_velocities ** 2, axis=1))
|
||||
|
||||
original_coordinates = np.concatenate((highres_coordinates, lowres_coordinates))
|
||||
# if "auriga" in str(filename):
|
||||
# original_coordinates /= 1000
|
||||
print(original_coordinates.mean())
|
||||
print(original_coordinates.min())
|
||||
print(original_coordinates.max())
|
||||
# print(original_coordinates.mean())
|
||||
# print(original_coordinates.min())
|
||||
# print(original_coordinates.max())
|
||||
# print(file['Header'])
|
||||
# print(list(file['Units'].attrs.items()))
|
||||
# exit()
|
||||
names = np.concatenate((highres_names, lowres_names))
|
||||
velocities = np.concatenate((highres_velocities, lowres_velocities))
|
||||
masses = np.concatenate((highres_masses, lowres_masses))
|
||||
group_ids = np.concatenate((highres_group_ids, lowres_group_ids))
|
||||
absolute_velo = np.concatenate((highres_absolute_velo, lowres_absolute_velo))
|
||||
# for bla in [original_coordinates, names, velocities, masses, absolute_velo]:
|
||||
# print(bla.shape)
|
||||
|
@ -86,13 +89,14 @@ for filename in sorted(directory.glob("output_*.hdf5")):
|
|||
velocities[::, 1],
|
||||
velocities[::, 2],
|
||||
masses,
|
||||
group_ids,
|
||||
absolute_velo,
|
||||
]).T
|
||||
print(original_data.shape)
|
||||
assert (original_coordinates == original_data[::, 0:3]).all()
|
||||
|
||||
boundaries = Header.attrs['BoxSize'] # BoxLength for e5 boxes depends on Nres, 2.36438 for 256, 4.72876 for 512.
|
||||
print(boundaries)
|
||||
print(boundaries, len(highres_names))
|
||||
if not boundaries.shape:
|
||||
boundaries = np.array([boundaries] * 3)
|
||||
offsets = [-1, 0, 1]
|
||||
|
@ -191,13 +195,13 @@ for filename in sorted(directory.glob("output_*.hdf5")):
|
|||
|
||||
# print(closest_neighbours.shape)
|
||||
|
||||
print(densities)
|
||||
print(densities.shape)
|
||||
# print(densities)
|
||||
# print(densities.shape)
|
||||
all_data = np.column_stack([list(range(densities.shape[0])), transformed_data, densities, alt_densities])
|
||||
print(all_data.shape)
|
||||
print(original_data.shape[0])
|
||||
# print(all_data.shape)
|
||||
# print(original_data.shape[0])
|
||||
export_data = all_data[:original_data.shape[0]]
|
||||
print(export_data.shape)
|
||||
# print(export_data.shape)
|
||||
|
||||
# all_data = np.append(coordinates, velocities, axis=1)
|
||||
# all_data = np.column_stack((all_data, absolute_velo, names, densities))
|
||||
|
@ -209,5 +213,5 @@ for filename in sorted(directory.glob("output_*.hdf5")):
|
|||
export_data,
|
||||
delimiter=",",
|
||||
fmt="%.3f",
|
||||
header="num,x,y,z,name,vx,vy,vz,masse,v,density,density_alt")
|
||||
header="num,x,y,z,name,vx,vy,vz,masse,groupid,v,density,density_alt")
|
||||
file.close()
|
||||
|
|
Loading…
Reference in a new issue