mirror of
https://github.com/Findus23/halo_comparison.git
synced 2024-09-13 09:03:49 +02:00
auriga comparison improvements
This commit is contained in:
parent
db4fec38cd
commit
453cae05ce
4 changed files with 79 additions and 45 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -7,3 +7,4 @@ __pycache__
|
|||
*.txt
|
||||
*.hdf5
|
||||
mass_function.py
|
||||
*.pickle
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import pickle
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
|
@ -9,25 +11,27 @@ from matplotlib.colors import LogNorm
|
|||
from matplotlib.figure import Figure
|
||||
|
||||
from cic import cic_from_radius
|
||||
from cumulative_mass_profiles import cumulative_mass_profile
|
||||
from paths import auriga_dir
|
||||
from halo_mass_profile import halo_mass_profile
|
||||
from paths import auriga_dir, richings_dir
|
||||
from readfiles import read_file, read_halo_file
|
||||
from utils import read_swift_config
|
||||
|
||||
|
||||
# softening_length = 0.026041666666666668
|
||||
class Mode(Enum):
|
||||
richings = 1
|
||||
auriga6 = 2
|
||||
|
||||
|
||||
mode = Mode.richings
|
||||
|
||||
|
||||
def dir_name_to_parameter(dir_name: str):
|
||||
return map(int, dir_name.lstrip("auriga6_halo").split("_"))
|
||||
return map(int, dir_name.lstrip("auriga6_halo").lstrip("richings21_").split("_"))
|
||||
|
||||
|
||||
def levelmax_to_softening_length(levelmax: int) -> float:
|
||||
# TODO: replace with real calculation
|
||||
if levelmax == 9:
|
||||
return 0.00651041667
|
||||
if levelmax == 10:
|
||||
return 0.00325520833
|
||||
raise ValueError(f"unknown levelmax {levelmax}")
|
||||
box_size = 100
|
||||
return box_size / 30 / 2 ** levelmax
|
||||
|
||||
|
||||
fig1: Figure = plt.figure(figsize=(9, 6))
|
||||
|
@ -40,6 +44,10 @@ for ax in [ax1, ax2]:
|
|||
ax1.set_ylabel(r'M [$10^{10} \mathrm{M}_\odot$]')
|
||||
ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]")
|
||||
|
||||
part_numbers = []
|
||||
|
||||
reference_file = Path("auriga_reference.pickle")
|
||||
|
||||
|
||||
@dataclass
|
||||
class Result:
|
||||
|
@ -50,7 +58,8 @@ class Result:
|
|||
images = []
|
||||
vmin = np.Inf
|
||||
vmax = -np.Inf
|
||||
dirs = [d for d in auriga_dir.glob("*") if d.is_dir() and "bak" not in d.name]
|
||||
root_dir = auriga_dir if mode == Mode.auriga6 else richings_dir
|
||||
dirs = [d for d in root_dir.glob("*") if d.is_dir() and "bak" not in d.name]
|
||||
for i, dir in enumerate(sorted(dirs)):
|
||||
is_by_adrian = "arj" in dir.name
|
||||
print(dir.name)
|
||||
|
@ -58,7 +67,7 @@ for i, dir in enumerate(sorted(dirs)):
|
|||
if not is_by_adrian:
|
||||
levelmin, levelmin_TF, levelmax = dir_name_to_parameter(dir.name)
|
||||
print(levelmin, levelmin_TF, levelmax)
|
||||
# if levelmin_TF != 8:
|
||||
# if levelmax != 12:
|
||||
# continue
|
||||
Xc_adrian = 56.50153741810241
|
||||
Yc_adrian = 49.40761085700951
|
||||
|
@ -68,6 +77,8 @@ for i, dir in enumerate(sorted(dirs)):
|
|||
Zc = 51.68749302578122
|
||||
|
||||
input_file = dir / "output_0007.hdf5"
|
||||
if mode == Mode.richings:
|
||||
input_file = dir / "output_0004.hdf5"
|
||||
if is_by_adrian:
|
||||
input_file = dir / "output_0000.hdf5"
|
||||
softening_length = None
|
||||
|
@ -75,8 +86,10 @@ for i, dir in enumerate(sorted(dirs)):
|
|||
swift_conf = read_swift_config(dir)
|
||||
softening_length = swift_conf["Gravity"]["comoving_DM_softening"]
|
||||
assert softening_length == swift_conf["Gravity"]["max_physical_DM_softening"]
|
||||
print(levelmax_to_softening_length(levelmax))
|
||||
assert softening_length == levelmax_to_softening_length(levelmax)
|
||||
ideal_softening_length = levelmax_to_softening_length(levelmax)
|
||||
# if not np.isclose(softening_length, levelmax_to_softening_length(levelmax)):
|
||||
# raise ValueError(f"softening length for levelmax {levelmax} should be {ideal_softening_length} "
|
||||
# f"but is {softening_length}")
|
||||
print(input_file)
|
||||
df, particles_meta = read_file(input_file)
|
||||
df_halos = read_halo_file(input_file.with_name("fof_" + input_file.name))
|
||||
|
@ -91,14 +104,34 @@ for i, dir in enumerate(sorted(dirs)):
|
|||
halo_id += 1
|
||||
|
||||
halo = df_halos.loc[halo_id]
|
||||
part_numbers.append(len(df) * particles_meta.particle_mass)
|
||||
# halo = halos.loc[1]
|
||||
log_radial_bins, bin_masses, bin_densities, group_radius = cumulative_mass_profile(
|
||||
df, halo, particles_meta, plot=False
|
||||
log_radial_bins, bin_masses, bin_densities, center = halo_mass_profile(
|
||||
df, halo, particles_meta, plot=False, num_bins=100,
|
||||
vmin=0.002, vmax=6.5
|
||||
)
|
||||
if is_by_adrian:
|
||||
with reference_file.open("wb") as f:
|
||||
pickle.dump([log_radial_bins, bin_masses, bin_densities], f)
|
||||
ax1.loglog(log_radial_bins[:-1], bin_masses, label=str(dir.name), c=f"C{i}")
|
||||
|
||||
ax2.loglog(log_radial_bins[:-1], bin_densities, label=str(dir.name), c=f"C{i}")
|
||||
|
||||
if reference_file.exists() and not is_by_adrian and mode == Mode.auriga6:
|
||||
with reference_file.open("rb") as f:
|
||||
data: List[np.ndarray] = pickle.load(f)
|
||||
ref_log_radial_bins, ref_bin_masses, ref_bin_densities = data
|
||||
mass_deviation: np.ndarray = np.abs(bin_masses - ref_bin_masses)
|
||||
density_deviation: np.ndarray = np.abs(bin_densities - ref_bin_densities)
|
||||
ax1.loglog(log_radial_bins[:-1], mass_deviation, c=f"C{i}",
|
||||
linestyle="dotted")
|
||||
|
||||
ax2.loglog(log_radial_bins[:-1], density_deviation, c=f"C{i}",
|
||||
linestyle="dotted")
|
||||
accuracy = mass_deviation / ref_bin_masses
|
||||
print(accuracy)
|
||||
print("mean accuracy", accuracy.mean())
|
||||
|
||||
if softening_length:
|
||||
for ax in [ax1, ax2]:
|
||||
ax.axvline(4 * softening_length, color=f"C{i}", linestyle="dotted")
|
||||
|
@ -107,17 +140,17 @@ for i, dir in enumerate(sorted(dirs)):
|
|||
print()
|
||||
print(Yc - Yc_adrian)
|
||||
# shift: (-6, 0, -12)
|
||||
if not is_by_adrian:
|
||||
xshift = Xc - Xc_adrian
|
||||
yshift = Yc - Yc_adrian
|
||||
zshift = Zc - Zc_adrian
|
||||
print("shift", xshift, yshift, zshift)
|
||||
# if not is_by_adrian:
|
||||
# xshift = Xc - Xc_adrian
|
||||
# yshift = Yc - Yc_adrian
|
||||
# zshift = Zc - Zc_adrian
|
||||
# print("shift", xshift, yshift, zshift)
|
||||
|
||||
X -= 1.9312
|
||||
Y -= 1.7375
|
||||
Z -= 1.8978
|
||||
X -= center[0]
|
||||
Y -= center[1]
|
||||
Z -= center[2]
|
||||
|
||||
rho, extent = cic_from_radius(X, Z, 500, Xc_adrian, Yc_adrian, 5, periodic=False)
|
||||
rho, extent = cic_from_radius(X, Z, 500, 0, 0, 5, periodic=False)
|
||||
|
||||
vmin = min(vmin, rho.min())
|
||||
vmax = max(vmax, rho.max())
|
||||
|
@ -134,8 +167,10 @@ for i, dir in enumerate(sorted(dirs)):
|
|||
ax1.legend()
|
||||
ax2.legend()
|
||||
|
||||
fig3: Figure = plt.figure(figsize=(9, 6))
|
||||
axes: List[Axes] = fig3.subplots(2, 3, sharex=True, sharey=True).flatten()
|
||||
# fig3: Figure = plt.figure(figsize=(9, 9))
|
||||
# axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
|
||||
fig3: Figure = plt.figure(figsize=(6, 3))
|
||||
axes: List[Axes] = fig3.subplots(1, 2, sharex=True, sharey=True).flatten()
|
||||
|
||||
for result, ax in zip(images, axes):
|
||||
data = 1.1 + result.rho
|
||||
|
@ -155,3 +190,4 @@ fig2.savefig(Path(f"~/tmp/auriga2_{8}.pdf").expanduser())
|
|||
fig3.savefig(Path("~/tmp/auriga3.pdf").expanduser())
|
||||
|
||||
plt.show()
|
||||
print(part_numbers)
|
||||
|
|
|
@ -1,28 +1,29 @@
|
|||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from utils import print_progress
|
||||
|
||||
|
||||
def find_center(df: pd.DataFrame, center: np.ndarray, initial_radius=1):
|
||||
# plt.figure()
|
||||
all_particles = df[["X", "Y", "Z"]].to_numpy()
|
||||
radius = initial_radius
|
||||
center_history = []
|
||||
i = 0
|
||||
while True:
|
||||
center_history.append(center)
|
||||
distances = np.linalg.norm(all_particles - center, axis=1)
|
||||
in_radius_particles = all_particles[distances < radius]
|
||||
num_particles = in_radius_particles.shape[0]
|
||||
print("num_particles", num_particles)
|
||||
print_progress(i, "?", f"n={num_particles}, r={radius}, c={center}")
|
||||
if num_particles < 10:
|
||||
break
|
||||
center_of_mass = in_radius_particles.mean(axis=0)
|
||||
new_center = (center_of_mass + center) / 2
|
||||
print("new center", new_center)
|
||||
shift = np.linalg.norm(center - new_center)
|
||||
radius = max(2 * shift, radius * 0.9)
|
||||
print("radius", radius)
|
||||
center = new_center
|
||||
i += 1
|
||||
center_history = np.array(center_history)
|
||||
# print(center_history)
|
||||
# plt.scatter(center_history[::, 0], center_history[::, 1], c=range(len(center_history[::, 1])))
|
||||
|
|
|
@ -12,22 +12,18 @@ from readfiles import ParticlesMeta, read_file, read_halo_file
|
|||
|
||||
|
||||
def V(r):
|
||||
return 4 * np.pi * r ** 3 / 3
|
||||
return 4 / 3 * np.pi * r ** 3
|
||||
|
||||
|
||||
def cumulative_mass_profile(particles: pd.DataFrame, halo: pd.Series,
|
||||
particles_meta: ParticlesMeta, plot=False, num_bins=30):
|
||||
print(type(particles))
|
||||
def halo_mass_profile(particles: pd.DataFrame, halo: pd.Series,
|
||||
particles_meta: ParticlesMeta, vmin: float, vmax: float, plot=False, num_bins=30):
|
||||
center = np.array([halo.X, halo.Y, halo.Z])
|
||||
center = find_center(particles, center)
|
||||
positions = particles[["X", "Y", "Z"]].to_numpy()
|
||||
print(positions)
|
||||
print(positions.shape)
|
||||
distances = np.linalg.norm(positions - center, axis=1)
|
||||
group_radius = distances.max()
|
||||
# normalized_distances = distances / group_radius
|
||||
|
||||
log_radial_bins = np.geomspace(0.01, 2, num_bins)
|
||||
log_radial_bins = np.geomspace(vmin, vmax, num_bins)
|
||||
|
||||
bin_masses = []
|
||||
bin_densities = []
|
||||
|
@ -37,13 +33,13 @@ def cumulative_mass_profile(particles: pd.DataFrame, halo: pd.Series,
|
|||
in_bin = np.where((bin_start < distances) & (distances < bin_end))[0]
|
||||
count = in_bin.shape[0]
|
||||
mass = count * particles_meta.particle_mass
|
||||
volume = V(bin_end * group_radius) - V(bin_start * group_radius)
|
||||
volume = V(bin_end) - V(bin_start)
|
||||
bin_masses.append(mass)
|
||||
density = mass / volume
|
||||
bin_densities.append(density)
|
||||
print(bin_masses)
|
||||
print(bin_densities)
|
||||
|
||||
bin_masses = np.array(bin_masses)
|
||||
bin_densities = np.array(bin_densities)
|
||||
bin_masses = np.cumsum(bin_masses)
|
||||
|
||||
if plot:
|
||||
|
@ -54,13 +50,13 @@ def cumulative_mass_profile(particles: pd.DataFrame, halo: pd.Series,
|
|||
|
||||
ax.loglog(log_radial_bins[:-1], bin_masses, label="counts")
|
||||
ax2.loglog(log_radial_bins[:-1], bin_densities, label="densities", c="C1")
|
||||
ax.set_xlabel(r'R / R$_\mathrm{group}$')
|
||||
# ax.set_xlabel(r'R / R$_\mathrm{group}$')
|
||||
ax.set_ylabel(r'M [$10^{10} \mathrm{M}_\odot$]')
|
||||
ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]")
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
return log_radial_bins, bin_masses, bin_densities, group_radius
|
||||
return log_radial_bins, bin_masses, bin_densities, center
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -78,4 +74,4 @@ if __name__ == '__main__':
|
|||
|
||||
halo = df_halos.loc[halo_id]
|
||||
|
||||
cumulative_mass_profile(particles_in_halo, halo, particles_meta, plot=True)
|
||||
halo_mass_profile(particles_in_halo, halo, particles_meta, plot=True)
|
Loading…
Reference in a new issue