1
0
Fork 0
mirror of https://github.com/Findus23/halo_comparison.git synced 2024-09-19 16:03:50 +02:00

Merge branch 'main' of github.com:Findus23/halo_comparison

This commit is contained in:
glatterf42 2022-07-04 12:01:44 +02:00
commit a48832b6bb
6 changed files with 54 additions and 44 deletions

View file

@ -1,15 +1,23 @@
from compare_halos import compare_halo_resolutions
something_failed = False
for wf in ["DB2", "DB4", "DB8", "shannon"]:
for res in [128, 256, 512]:
for source_wf in [wf, "shannon"]:
compare_halo_resolutions(
ref_waveform=source_wf,
reference_resolution=128,
comp_waveform=wf,
comparison_resolution=res,
plot=False,
plot3d=False,
velo_halos=True,
single=False
)
try:
compare_halo_resolutions(
ref_waveform=source_wf,
reference_resolution=128,
comp_waveform=wf,
comparison_resolution=res,
plot=False,
plot3d=False,
velo_halos=True,
single=False
)
except:
something_failed = True
continue
if something_failed:
print("\n\nAt least one comparison failed")

View file

@ -85,14 +85,12 @@ def compare_halo_resolutions(
if ref_halo["cNFW"] < 0:
print("NEGATIVE")
print(ref_halo["cNFW"])
raise ValueError()
if len(halo_particle_ids) < 50:
# TODO: decide on a lower size limit (and also apply it to comparison halo?)
print(f"halo is too small ({len(halo_particle_ids)}")
print("skipping")
continue
if ref_halo_mass < 2:
print("skip weird, small mass halo")
continue
print("LEN", len(halo_particle_ids), ref_halo.Mass_tot)
offset_x, offset_y = ref_halo.X, ref_halo.Y
# cumulative_mass_profile(particles_in_ref_halo, ref_halo, ref_meta, plot=plot)
@ -132,6 +130,9 @@ def compare_halo_resolutions(
if len(nearby_halos) < 10:
print(f"only {len(nearby_halos)} halos, expanding to 50xRvir")
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 50].index.to_list())
if len(nearby_halos) < 10:
print(f"only {len(nearby_halos)} halos, expanding to 150xRvir")
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 150].index.to_list())
if not nearby_halos:
raise Exception("no halos are nearby") # TODO
@ -204,9 +205,6 @@ def compare_halo_resolutions(
# particles_in_comp_halo: DataFrame = df_comp.loc[df_comp["FOFGroupIDs"] == halo]
particle_ids_in_comp_halo = comp_halo_lookup[halo_id]
mass_factor_limit = 5
if comp_halo_masses[halo_id] < 2:
print("skip weird, small mass halo")
continue
if not (1 / mass_factor_limit < (comp_halo_masses[halo_id] / ref_halo_mass) < mass_factor_limit):
# print("mass not similar, skipping")
@ -216,7 +214,9 @@ def compare_halo_resolutions(
halo_size = len(particle_ids_in_comp_halo)
# df = particles_in_comp_halo.join(halo_particles, how="inner", rsuffix="ref")
shared_particles = particle_ids_in_comp_halo.intersection(halo_particle_ids)
shared_size = len(shared_particles)
union_particles=particle_ids_in_comp_halo.union(halo_particle_ids)
shared_size = len(shared_particles)/len(union_particles)
# print(shared_size)
if not shared_size:
continue
@ -225,6 +225,7 @@ def compare_halo_resolutions(
df = df_comp.loc[list(shared_particles)]
if plot:
color = f"C{i + 1}"
comp_halo: pd.Series = df_comp_halo.loc[halo_id]
ax.scatter(apply_offset_to_list(df["X"], offset_x), apply_offset_to_list(df["Y"], offset_y), s=1,
alpha=.3, c=color)
@ -253,7 +254,7 @@ def compare_halo_resolutions(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z]) - np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
) / ref_halo.Rvir
halo_data["distance"] = distance
halo_data["match"] = best_halo_match / len(halo_particle_ids)
halo_data["match"] = best_halo_match
compared_halos.append(halo_data)
# exit()
if plot:
@ -301,7 +302,7 @@ if __name__ == '__main__':
reference_resolution=128,
comparison_resolution=256,
plot=False,
plot3d=False,
plot3d=True,
plot_cic=False,
velo_halos=True,
single=False,

View file

@ -88,8 +88,8 @@ def read_velo_halos(directory: Path,veloname="vroutput"):
Returns a dataframe containing all scalar properties of the halos
(https://velociraptor-stf.readthedocs.io/en/latest/output.html),
"""
group_catalog = h5py.File(directory / f"{veloname}.catalog_groups")
group_properties = h5py.File(directory / f"{veloname}.properties")
group_catalog = h5py.File(directory / f"{veloname}.catalog_groups.0")
group_properties = h5py.File(directory / f"{veloname}.properties.0")
scalar_properties = {}
for k, v in group_properties.items():
if not isinstance(v, Dataset):
@ -125,11 +125,11 @@ def read_velo_halo_particles(
and two dictionaries mapping the halo IDs to sets of particle IDs
"""
df = read_velo_halos(directory)
particle_catalog = h5py.File(directory / "vroutput.catalog_particles")
particle_catalog = h5py.File(directory / "vroutput.catalog_particles.0")
particle_ids = np.asarray(particle_catalog["Particle_IDs"])
particle_catalog_unbound = h5py.File(
directory / "vroutput.catalog_particles.unbound"
directory / "vroutput.catalog_particles.unbound.0"
)
particle_ids_unbound = particle_catalog_unbound["Particle_IDs"][:]

View file

@ -12,19 +12,24 @@ file = Path(argv[1])
df = pd.read_csv(file)
with pd.option_context('display.max_rows', None):
print(df[["ref_npart", "comp_npart", "ref_cNFW", "comp_cNFW"]])
# df = df.iloc
fig: Figure = plt.figure()
ax: Axes = fig.gca()
# hist2d, log?
x_col = "ref_cNFW"
y_col = "comp_cNFW"
matches:pd.DataFrame=df["match"]
# print(matches)
# exit()
print(matches.median())
print(matches.std())
exit()
# x_col = "ref_Mvir"
# y_col = "comp_Mvir"
# x_col = "ref_cNFW"
# y_col = "comp_cNFW"
#
x_col = "ref_Mvir"
y_col = "comp_Mvir"
min_x = min([min(df[x_col]), min(df[y_col])])
max_x = max([max(df[x_col]), max(df[y_col])])

View file

@ -95,7 +95,6 @@ def cross_run(waveforms: list, resolutions: list, Lbox: int, time: str):
if __name__ == '__main__':
input("are you sure you want to run this? This might need a large amount of memory")
Lbox = 100
k0 = 2 * 3.14159265358979323846264338327950 / Lbox
resolutions = [128, 256, 512]
waveforms = ["DB2", "DB4", "DB8", "shannon"]

View file

@ -29,7 +29,8 @@ columns = [
]
# linestyles = ["solid", "dashed", "dotted"]
colors = ["C1", "C2", "C3", "C4"]
colors=[f"C{i}" for i in range(10)]
# colors = ["C1", "C2", "C3", "C4"]
def spectra_data(
@ -96,10 +97,10 @@ def create_plot(mode):
verticalalignment="top",
transform=ax.transAxes,
)
for res in [128]:
for j, res in enumerate(resolutions[:-1] if mode == "cross" else resolutions):
ax.axvline(
k0 * res,
color="gray",
color=colors[j],
linestyle="dashed",
label=f"{res}",
)
@ -123,15 +124,11 @@ def create_plot(mode):
comp_p1 = comp_data["P1"]
end_p1 /= comp_p1
ax_ics.loglog(ics_k, ics_p1, color=colors[j])
ax_end.loglog(end_k, end_p1, color=colors[j])
ax_ics.semilogx(ics_k, ics_p1, color=colors[j])
ax_end.semilogx(end_k, end_p1, color=colors[j])
for ax in [ax_ics, ax_end]:
ax.axvline(
k0 * resolution,
color=colors[j],
linestyle="dashed",
label=f"{resolution}",
)
ax.set_ylim(0.9, 1.10)
# fig.suptitle(f"Power Spectra {time}") #Not needed for paper
# fig.tight_layout()
@ -144,15 +141,15 @@ def create_plot(mode):
ics_k = ics_data["k [Mpc]"]
ics_pcross = ics_data["Pcross"]
ax_ics.semilogx(ics_k, ics_pcross, color=colors[j], label=f'{res1} vs {res2}')
ax_ics.semilogx(ics_k, ics_pcross, color=colors[j+3], label=f'{res1} vs {res2}')
end_data = spectra_data(waveform, res1, res2, Lbox, 'end')
end_k = end_data["k [Mpc]"]
end_pcross = end_data["Pcross"]
ax_end.semilogx(end_k, end_pcross, color=colors[j], label=f'{res1} vs {res2}')
ax_end.semilogx(end_k, end_pcross, color=colors[j+3], label=f'{res1} vs {res2}')
ax_end.set_xlim(right=k0 * 256)
ax_end.set_xlim(right=k0 * resolutions[-1])
ax_end.set_ylim(0.8, 1.02)
if bottom_row:
ax_end.legend()