1
0
Fork 0
mirror of https://github.com/Findus23/halo_comparison.git synced 2024-09-19 16:03:50 +02:00

some minor changes

This commit is contained in:
Lukas Winkler 2022-06-01 11:31:42 +02:00
parent e1c77488e1
commit 4210b02941
Signed by: lukas
GPG key ID: 54DE4D798D244853
6 changed files with 87 additions and 20 deletions

2
cic.py
View file

@ -53,8 +53,6 @@ def cic_range(
print((60 - ymin) / yrange)
extent = (xmin, xmax, ymin, ymax)
rho = cic_deposit(Xs, Ys, ngrid, *args, **kwargs)
print(rho)
exit()
return rho, extent

View file

@ -13,7 +13,7 @@ from pyvista import Plotter
from cic import cic_deposit
from paths import base_dir
from read_vr_files import read_velo_halos
from read_vr_files import read_velo_halo_particles
from readfiles import read_file, read_halo_file
from remap_particle_IDs import IDScaler
from threed import plotdf3d
@ -56,7 +56,7 @@ def compare_halo_resolutions(
print("reading reference file")
df_ref, ref_meta = read_file(reference_dir)
if velo_halos:
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halos(reference_dir, recursivly=False)
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(reference_dir, recursivly=False)
# TODO: clarify if unbound particles should be ignored
for k, v in ref_halo_lookup.items():
v.update(ref_unbound[k])
@ -66,7 +66,7 @@ def compare_halo_resolutions(
print("reading comparison file")
df_comp, comp_meta = read_file(comparison_dir)
if velo_halos:
df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halos(comparison_dir, recursivly=False)
df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halo_particles(comparison_dir, recursivly=False)
for k, v in comp_halo_lookup.items():
v.update(comp_unbound[k])

20
estimate_vmax.py Normal file
View file

@ -0,0 +1,20 @@
import numpy as np
from paths import base_dir
from read_vr_files import read_velo_halos
waveform = "shannon"
resolution = 128
dir = base_dir / f"{waveform}_{resolution}_100"
halos = read_velo_halos(dir)
vmax = []
for _, halo in halos.iterrows():
if 40 < halo["npart"] < 60:
vmax.append(halo["Vmax"])
vmax = np.array(vmax)
print(vmax.mean())
print(vmax.std())

View file

@ -5,8 +5,28 @@ import numpy as np
from matplotlib import pyplot as plt
from matplotlib.colors import LogNorm
from matplotlib.figure import Figure
from matplotlib.patches import Circle
from pyvista import Axes
from cic import Extent
from paths import base_dir, vis_datafile
from read_vr_files import read_velo_halos
def increase_extent_1d(xmin: float, xmax: float, factor: float):
xrange = xmax - xmin
xcenter = (xmax + xmin) / 2
return
def increase_extent(extent: Extent, factor: float) -> Extent:
xmin, xmax, ymin, ymax = extent
def in_extent(extent: Extent, X, Y, factor=2) -> bool:
xmin, xmax, ymin, ymax = extent
return (xmin < X < xmax) and (ymin < Y < ymax)
def main():
rows = ["shannon", "DB8", "DB4", "DB2"]
@ -14,21 +34,41 @@ def main():
columns = [128, 256, 512]
fig: Figure = plt.figure(figsize=(9, 9))
axes: List[List[Axes]] = fig.subplots(len(rows), len(columns), sharex=True, sharey=True)
with h5py.File("vis.cache.hdf5") as vis_out:
with h5py.File(vis_datafile) as vis_out:
vmin, vmax = vis_out["vmin_vmax"]
print(vmin, vmax)
for i, waveform in enumerate(rows):
for j, resolution in enumerate(columns):
dir = base_dir / f"{waveform}_{resolution}_100"
halos = read_velo_halos(dir)
ax = axes[i][j]
rho = np.asarray(vis_out[f"{waveform}_{resolution}_rho"])
extent = list(vis_out[f"{waveform}_{resolution}_extent"])
mass = vis_out[f"{waveform}_{resolution}_mass"]
extent = tuple(vis_out[f"{waveform}_{resolution}_extent"])
mass = vis_out[f"{waveform}_{resolution}_mass"][()] # get scalar value from Dataset
main_halo_id = vis_out[f"{waveform}_{resolution}_halo_id"][()]
vmin_scaled = (vmin + offset) * mass
vmax_scaled = (vmax + offset) * mass
rho = (rho + offset) * mass
img = ax.imshow(rho.T, norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled), extent=extent,origin="lower")
img = ax.imshow(rho.T, norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled), extent=extent,
origin="lower")
for halo_id, halo in halos.iterrows():
if halo["Vmax"] > 135:
if in_extent(extent, halo.X, halo.Y):
color = "red" if halo_id == main_halo_id else "white"
if halo_id == main_halo_id:
print(halo_id == main_halo_id, halo_id, main_halo_id, halo["Rvir"])
print("plotting main halo")
circle = Circle(
(halo.X, halo.Y),
halo["Rvir"], zorder=10,
linewidth=1, edgecolor=color, fill=None, alpha=.2
)
ax.add_artist(circle)
print(img)
# break
# break
pad = 5
# based on https://stackoverflow.com/a/25814386/4398037
for ax, col in zip(axes[0], columns):

View file

@ -6,8 +6,8 @@ import pandas as pd
from matplotlib import pyplot as plt
from cic import cic_from_radius
from paths import base_dir
from read_vr_files import read_velo_halos
from paths import base_dir, vis_datafile
from read_vr_files import read_velo_halo_particles
from readfiles import read_file
show_unbound = False
@ -19,7 +19,7 @@ Coords = Tuple[float, float, float, float] # radius, X, Y, Z
def load_halo_data(waveform: str, resolution: int, halo_id: int, coords: Coords):
dir = base_dir / f"{waveform}_{resolution}_100"
df, meta = read_file(dir)
df_halo, halo_lookup, unbound = read_velo_halos(dir, recursivly=False, skip_unbound=not show_unbound)
df_halo, halo_lookup, unbound = read_velo_halo_particles(dir, recursivly=False, skip_unbound=not show_unbound)
if show_unbound:
for k, v in halo_lookup.items():
v.update(unbound[k])
@ -79,7 +79,9 @@ def main():
coords = None
vmin = np.Inf
vmax = -np.Inf
with h5py.File("vis.cache.hdf5", "w") as vis_out:
if vis_datafile.exists():
input("confirm to overwrite file")
with h5py.File(vis_datafile, "w") as vis_out:
for waveform in ["shannon", "DB2", "DB4", "DB8"]:
for resolution in [128, 256, 512]:
if first_halo:
@ -89,7 +91,6 @@ def main():
first_halo = False
else:
halo_id = map_halo_id(initial_halo_id, ref_waveform, ref_resolution, waveform, resolution)
halo, halo_particles, meta, image_coords = load_halo_data(waveform, resolution, halo_id, coords)
if not coords:
coords = image_coords
@ -107,6 +108,7 @@ def main():
vis_out.create_dataset(f"{waveform}_{resolution}_rho", data=rho, compression='gzip', compression_opts=5)
vis_out.create_dataset(f"{waveform}_{resolution}_extent", data=extent)
vis_out.create_dataset(f"{waveform}_{resolution}_mass", data=meta.particle_mass)
vis_out.create_dataset(f"{waveform}_{resolution}_halo_id", data=halo_id)
imsave(rho, f"out_halo{initial_halo_id}_{waveform}_{resolution}_{halo_id}.png")
vis_out.create_dataset("vmin_vmax", data=[vmin, vmax])

View file

@ -83,14 +83,10 @@ def cached_particles_in_halo(file: Path, *args, **kwargs) -> HaloParticleMapping
return halo_particle_ids
def read_velo_halos(
directory: Path, recursivly=False, skip_unbound=False
) -> Tuple[DataFrame, HaloParticleMapping, HaloParticleMapping]:
def read_velo_halos(directory: Path):
"""
This reads the output files of VELOCIraptor
Returns a dataframe containing all scalar properties of the halos
(https://velociraptor-stf.readthedocs.io/en/latest/output.html),
and two dictionaries mapping the halo IDs to sets of particle IDs
"""
group_catalog = h5py.File(directory / "vroutput.catalog_groups")
group_properties = h5py.File(directory / "vroutput.properties")
@ -117,7 +113,18 @@ def read_velo_halos(
}
df = pd.DataFrame({**data, **scalar_properties}) # create dataframe from two merged dicts
df.index += 1 # Halo IDs start at 1
return df
def read_velo_halo_particles(
directory: Path, recursivly=False, skip_unbound=False
) -> Tuple[DataFrame, HaloParticleMapping, HaloParticleMapping]:
"""
This reads the output files of VELOCIraptor
and returns the halo data from read_velo_halos
and two dictionaries mapping the halo IDs to sets of particle IDs
"""
df = read_velo_halos(directory)
particle_catalog = h5py.File(directory / "vroutput.catalog_particles")
particle_ids = np.asarray(particle_catalog["Particle_IDs"])
@ -145,7 +152,7 @@ def main():
Nres = 512
directory = base_dir / f"{waveform}_{Nres}_100"
df_halo, halo_particle_ids, halo_particle_unbound_ids = read_velo_halos(directory)
df_halo, halo_particle_ids, halo_particle_unbound_ids = read_velo_halo_particles(directory)
particles, meta = read_file(directory)
HALO = 1000
while True: