1
0
Fork 0
mirror of https://github.com/Findus23/halo_comparison.git synced 2024-09-19 16:03:50 +02:00

minor fixes and notes

This commit is contained in:
Lukas Winkler 2022-06-03 10:33:16 +02:00
parent cf52fc2a23
commit 3ba0ace4e3
Signed by: lukas
GPG key ID: 54DE4D798D244853
6 changed files with 23 additions and 14 deletions

7
cic.py
View file

@ -78,11 +78,14 @@ if __name__ == '__main__':
ax: Axes = fig.gca() ax: Axes = fig.gca()
print("start cic") print("start cic")
rho, extent = cic_from_radius(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 2000, 48.85, 56.985, 2, periodic=False) # rho, extent = cic_from_radius(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 2000, 48.85, 56.985, 2, periodic=False)
# rho, extent = cic_range(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1000, 30, 70, 30, 70, periodic=False) rho, extent = cic_from_radius(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 500, 56, 49.5, 5, periodic=False)
# rho, extent = cic_range(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1000, 0, 100, 0, 100, periodic=False)
print("finished cic") print("finished cic")
data = 1.1 + rho data = 1.1 + rho
i = ax.imshow(data.T, norm=LogNorm(), extent=extent, origin="lower") i = ax.imshow(data.T, norm=LogNorm(), extent=extent, origin="lower")
ax.set_title(str(input_file.relative_to(input_file.parent.parent)))
fig.colorbar(i) fig.colorbar(i)
plt.show() plt.show()

View file

@ -56,7 +56,7 @@ def compare_halo_resolutions(
df_ref, ref_meta = read_file(reference_dir / "output_0004.hdf5") df_ref, ref_meta = read_file(reference_dir / "output_0004.hdf5")
if velo_halos: if velo_halos:
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(reference_dir, recursivly=False) df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(reference_dir, recursivly=False)
# TODO: clarify if unbound particles should be ignored # TODO: clarify if unbound particles should be ignored -> yes
for k, v in ref_halo_lookup.items(): for k, v in ref_halo_lookup.items():
v.update(ref_unbound[k]) v.update(ref_unbound[k])
else: else:

View file

@ -9,9 +9,11 @@ from read_vr_files import read_velo_halos
fig: Figure = plt.figure() fig: Figure = plt.figure()
ax: Axes = fig.gca() ax: Axes = fig.gca()
num_bins = 100 num_bins = 30
sim_volume = 100 ** 3 sim_volume = 100 ** 3
# notes: shannon + db2
linestyles = ["solid", "dashed", "dotted"] linestyles = ["solid", "dashed", "dotted"]
colors = ["C1", "C2", "C3", "C4"] colors = ["C1", "C2", "C3", "C4"]
@ -21,7 +23,7 @@ for i, waveform in enumerate(["DB2", "DB4", "DB8", "shannon"]):
dir = base_dir / f"{waveform}_{resolution}_100" dir = base_dir / f"{waveform}_{resolution}_100"
halos = read_velo_halos(dir) halos = read_velo_halos(dir)
# halos = halos[halos["Mvir"] > 2] # there seem to be multiple halos with a mass of 1.88196993 halos = halos[halos["Mvir"] > 2] # there seem to be multiple halos with a mass of 1.88196993
# halos.to_csv("weird_halos.csv") # halos.to_csv("weird_halos.csv")

View file

@ -20,11 +20,13 @@ def read_file(file: Path) -> Tuple[pd.DataFrame, ParticlesMeta]:
meta_cache_file = file.with_suffix(".cache_meta.pickle") meta_cache_file = file.with_suffix(".cache_meta.pickle")
if not (cache_file.exists() and meta_cache_file.exists()): if not (cache_file.exists() and meta_cache_file.exists()):
reference_file = h5py.File(file) reference_file = h5py.File(file)
has_fof= "FOFGroupIDs" in reference_file["PartType1"]
masses = reference_file["PartType1"]["Masses"] masses = reference_file["PartType1"]["Masses"]
if not np.all(masses == masses[0]): if not np.all(masses == masses[0]):
raise ValueError("only equal mass particles are supported for now") raise ValueError("only equal mass particles are supported for now")
df = pd.DataFrame(reference_file["PartType1"]["Coordinates"], columns=["X", "Y", "Z"]) df = pd.DataFrame(reference_file["PartType1"]["Coordinates"], columns=["X", "Y", "Z"])
if has_fof:
df2 = pd.DataFrame(reference_file["PartType1"]["FOFGroupIDs"], columns=["FOFGroupIDs"]).astype("category") df2 = pd.DataFrame(reference_file["PartType1"]["FOFGroupIDs"], columns=["FOFGroupIDs"]).astype("category")
df = df.merge(df2, "outer", left_index=True, right_index=True) df = df.merge(df2, "outer", left_index=True, right_index=True)
del df2 del df2
@ -33,6 +35,7 @@ def read_file(file: Path) -> Tuple[pd.DataFrame, ParticlesMeta]:
df = df.merge(df3, "outer", left_index=True, right_index=True) df = df.merge(df3, "outer", left_index=True, right_index=True)
del df3 del df3
df.set_index("ParticleIDs", inplace=True) df.set_index("ParticleIDs", inplace=True)
if has_fof:
print("sorting") print("sorting")
df.sort_values("FOFGroupIDs",inplace=True) df.sort_values("FOFGroupIDs",inplace=True)
meta = ParticlesMeta( meta = ParticlesMeta(

View file

@ -16,9 +16,10 @@ with pd.option_context('display.max_rows', None):
fig: Figure = plt.figure() fig: Figure = plt.figure()
ax: Axes = fig.gca() ax: Axes = fig.gca()
# hist2d, log?
x_col = "ref_Mvir" x_col = "ref_Mass_200mean"
y_col = "comp_Mvir" y_col = "comp_Mass_200mean"
# ax.scatter(df["ref_sizes"], df["comp_sizes"], s=1, alpha=.3) # ax.scatter(df["ref_sizes"], df["comp_sizes"], s=1, alpha=.3)
ax.scatter(df[x_col], df[y_col], s=1, alpha=.3) ax.scatter(df[x_col], df[y_col], s=1, alpha=.3)
@ -31,7 +32,7 @@ ax.set_ylabel(y_col)
min_x = min([min(df[x_col]), min(df[y_col])]) min_x = min([min(df[x_col]), min(df[y_col])])
max_x = max([max(df[x_col]), max(df[y_col])]) max_x = max([max(df[x_col]), max(df[y_col])])
ax.plot([min_x, max_x], [min_x, max_x], linewidth=1, color="C2") ax.loglog([min_x, max_x], [min_x, max_x], linewidth=1, color="C2")
fig2: Figure = plt.figure() fig2: Figure = plt.figure()
ax2: Axes = fig2.gca() ax2: Axes = fig2.gca()