1
0
Fork 0
mirror of https://github.com/Findus23/halo_comparison.git synced 2024-09-19 16:03:50 +02:00

Merge branch 'main' of github.com:Findus23/halo_comparison

This commit is contained in:
Lukas Winkler 2022-10-27 16:34:18 +02:00
commit 98a1d925d1
Signed by: lukas
GPG key ID: 54DE4D798D244853
9 changed files with 365 additions and 162 deletions

View file

@ -3,26 +3,34 @@ from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from pprint import pprint
from subprocess import run
from sys import argv
from typing import List, Tuple
import h5py
import numpy as np
import pandas as pd
import pynbody
from matplotlib import pyplot as plt
from matplotlib.axes import Axes
from matplotlib.colors import LogNorm
from matplotlib.figure import Figure
from matplotlib.image import AxesImage
from numpy import log10
from pynbody.array import SimArray
from pynbody.snapshot import FamilySubSnap
from pynbody.snapshot.ramses import RamsesSnap
from cache import HDFCache
from cic import cic_from_radius, cic_range
from find_center import find_center
from halo_mass_profile import halo_mass_profile
from halo_mass_profile import halo_mass_profile, property_profile
from nfw import fit_nfw
from paths import auriga_dir, richings_dir
from ramses import load_ramses_data
from ramses import load_ramses_data, get_slice_argument, load_slice_data
from readfiles import read_file, read_halo_file, ParticlesMeta
from slices import create_2d_slice
from utils import read_swift_config, print_wall_time, figsize_from_page_fraction
from slices import create_2d_slice, filter_3d
from utils import read_swift_config, figsize_from_page_fraction
class Mode(Enum):
@ -30,10 +38,23 @@ class Mode(Enum):
auriga6 = 2
class Plot(Enum):
auriga_plots = "auriga"
richings_bary = "richings_bary"
mode = Mode.richings
try:
plottype = Plot(argv[1])
except KeyError:
plottype = None
cache = HDFCache(Path("auriga_cache.hdf5"))
if plottype == Plot.auriga_plots:
mode = Mode.auriga6
def dir_name_to_parameter(dir_name: str):
return map(
@ -56,7 +77,16 @@ def main():
ax1: Axes = fig1.gca()
fig2: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax2: Axes = fig2.gca()
fig4, axs_baryon = plt.subplots(nrows=3, ncols=5, sharex="all", sharey="all", figsize=(10, 4))
axs_baryon: List[List[Axes]]
fig4, axs_baryon = plt.subplots(
nrows=2, ncols=4,
sharex="all", sharey="all",
figsize=figsize_from_page_fraction(columns=2, height_to_width=0.5)
)
fig5: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax5: Axes = fig5.gca()
fig6: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax6: Axes = fig6.gca()
baryon_plot_counter = 0
for ax in [ax1, ax2]:
ax.set_xlabel(r"R [Mpc]")
@ -92,12 +122,18 @@ def main():
if not is_by_adrian:
levelmin, levelmin_TF, levelmax = dir_name_to_parameter(dir.name)
print(levelmin, levelmin_TF, levelmax)
if not has_baryons:
continue
if is_ramses:
continue
if plottype == Plot.auriga_plots:
if (levelmin, levelmin_TF, levelmax) == (7, 9, 9):
continue
elif plottype == Plot.richings_bary:
if not has_baryons:
continue
if levelmax != 11:
continue
# if not is_ramses:
# continue
input_file = dir / "output_0009.hdf5"
input_file = dir / "output_0007.hdf5"
if mode == Mode.richings:
input_file = dir / "output_0004.hdf5"
if is_by_adrian or is_ramses:
@ -106,7 +142,7 @@ def main():
else:
try:
swift_conf = read_swift_config(dir)
print_wall_time(dir)
# print_wall_time(dir)
except FileNotFoundError:
continue
gravity_conf = swift_conf["Gravity"]
@ -132,7 +168,7 @@ def main():
softening_length = None
elif "ramses" in dir.name:
h = 0.6777
hr_coordinates, particles_meta, center = load_ramses_data(dir / "output_00007")
hr_coordinates, particles_meta, center = load_ramses_data(dir / "output_00009")
df = pd.DataFrame(hr_coordinates, columns=["X", "Y", "Z"])
softening_length = None
else:
@ -154,7 +190,8 @@ def main():
center = np.array([halo.X, halo.Y, halo.Z])
center = find_center(df, center)
log_radial_bins, bin_masses, bin_densities, center = halo_mass_profile(
df, center, particles_meta, plot=False, num_bins=100, vmin=0.002, vmax=6.5
df[["X", "Y", "Z"]].to_numpy(), center, particles_meta, plot=False,
num_bins=100, rmin=0.002, rmax=6.5
)
i_min_border = np.argmax(
0.01 < log_radial_bins
@ -177,9 +214,9 @@ def main():
with reference_file.open("wb") as f:
pickle.dump([log_radial_bins, bin_masses, bin_densities], f)
if is_by_adrian:
label = "reference"
label = "Reference"
else:
label = f"{levelmin}, {levelmin_TF}, {levelmax}"
label = f"({levelmin}, {levelmin_TF}, {levelmax})"
ax1.loglog(log_radial_bins[:-1], bin_masses, label=label, c=f"C{i}")
ax2.loglog(log_radial_bins[:-1], bin_densities, label=label, c=f"C{i}")
@ -204,8 +241,7 @@ def main():
ax.axvline(4 * softening_length, color=f"C{i}", linestyle="dotted")
# for ax in [ax1, ax2]:
# ax.axvline(vr_halo.Rvir, color=f"C{i}", linestyle="dashed")
X, Y, Z = df.X.to_numpy(), df.Y.to_numpy(), df.Z.to_numpy()
coords = df[["X", "Y", "Z"]].to_numpy()
# shift: (-6, 0, -12)
# if not is_by_adrian:
@ -214,71 +250,135 @@ def main():
# zshift = Zc - Zc_adrian
# print("shift", xshift, yshift, zshift)
X -= center[0]
Y -= center[1]
Z -= center[2]
coords_centered = coords - center
rho, extent = cic_from_radius(X, Z, 4000, 0, 0, 5, periodic=False)
rho, extent = cic_from_radius(coords_centered[::, 0], coords_centered[::, 2], 500, 0, 0, 1.5, periodic=False)
vmin = min(vmin, rho.min())
vmax = max(vmax, rho.max())
images.append(
Result(
rho=rho,
title=str(dir.name),
levels=(levelmin, levelmin_TF, levelmax) if levelmin else None,
)
res = Result(
rho=rho,
title=f"levelmin={levelmin}, levelmin_TF={levelmin_TF}, levelmax={levelmax}" if not is_by_adrian else "Reference",
levels=(levelmin, levelmin_TF, levelmax) if not is_by_adrian else (100, 100, 100),
)
images.append(res)
i += 1
if has_baryons:
interpolation_method = "nearest" # "linear"
extent = [46, 52, 54, 60] # xrange[0], xrange[-1], yrange[0], yrange[-1]
extent = [42, 62, 50, 70]
for ii, property in enumerate(["cic", "Densities", "Entropies", "InternalEnergies", "Temperatures"]):
key = f"grid_{property}_{interpolation_method}"
cached_grid = cache.get(key, str(input_file))
bary_file = dir / "output_00009" if is_ramses else input_file
if is_ramses:
s: RamsesSnap = pynbody.load(str(bary_file))
gas_data: FamilySubSnap = s.gas
temperature_array: SimArray = gas_data["temp"]
p_array: SimArray = gas_data["p"].in_units("1e10 Msol Mpc^-3 km^2 s^-2")
rho_array: SimArray = gas_data["rho"].in_units("1e10 Msol Mpc^-3")
coord_array: SimArray = gas_data["pos"].in_units("Mpc")
mass_array = np.asarray(gas_data["mass"].in_units("1e10 Msol"))
bary_coords = np.asarray(coord_array)
bary_properties = {
"Temperatures": np.asarray(temperature_array.in_units("K")),
"Pressures": np.asarray(p_array),
"Densities": np.asarray(rho_array),
"Entropies": np.asarray(log10(p_array / rho_array ** (5 / 3))),
}
else:
with h5py.File(input_file) as f:
pt0 = f["PartType0"]
bary_coords = pt0["Coordinates"][:]
mass_array = pt0["Masses"][:]
bary_properties = {
"InternalEnergies": pt0["InternalEnergies"][:],
"Densities": pt0["Densities"][:],
"Pressures": pt0["Pressures"][:],
# "Entropies": log10(pt0["Densities"][:] / pt0["Densities"][:] ** (5 / 3)),
"Entropies": pt0["Entropies"][:]
}
bary_properties["Temperatures"] = bary_properties["InternalEnergies"]
radius = 1.9
resolution = 1000
# xrange[0], xrange[-1], yrange[0], yrange[-1]
extent = [center[0] - radius, center[0] + radius,
center[1] - radius, center[1] + radius]
# extent = [42, 62, 50, 70]
ramses_done = False
for ii, property in enumerate(["cic", "Densities", "Entropies", "Temperatures"]):
print("property:", property)
key = f"grid_{resolution}_{property}_{interpolation_method}_{radius}"
cached_grid = cache.get(key, str(bary_file))
if cached_grid is not None:
grid = cached_grid
else:
print("grid not yet cached, calculating now")
if property == "cic":
grid, _ = cic_range(X + center[0], Y + center[1], 1000, *extent, periodic=False)
grid = grid.T
coords_in_box = filter_3d(coords, extent, zlimit=(center[2] - .1, center[2] + .1))
rho, _ = cic_range(coords_in_box[::, 0], coords_in_box[::, 1], resolution, *extent,
periodic=False)
grid = 1.1 + rho.T
else:
grid = create_2d_slice(input_file, center, property=property,
extent=extent, method=interpolation_method)
cache.set(key, grid, str(input_file), compressed=True)
ax_baryon: Axes = axs_baryon[baryon_plot_counter, ii]
img = ax_baryon.imshow(
if not is_ramses:
grid = create_2d_slice(center, coords=bary_coords,
resolution=resolution,
property_name=property,
property_data=bary_properties[property],
extent=extent, method=interpolation_method)
else:
frac_center = center / 100
frac_extent = np.array(extent) / 100
print(frac_extent)
print(frac_center)
args, imager_dir = get_slice_argument(
frac_extent, frac_center,
bary_file,interpolation_method,
depth=.001
)
print(" ".join(args))
if not ramses_done:
run(args, cwd=imager_dir)
ramses_done = True
property_map = {
"Densities": "rhomap",
"Entropies": "Smap",
"Temperatures": "Tmap"
}
fname = imager_dir / f"snapshot_{property_map[property]}_zproj_zobs-0p00.bin"
grid = load_slice_data(fname).T
cache.set(key, grid, str(bary_file), compressed=True)
ax_baryon = axs_baryon[baryon_plot_counter][ii]
img: AxesImage = ax_baryon.imshow(
grid,
norm=LogNorm(),
interpolation="none",
origin="lower",
extent=extent,
)
ax_baryon.set_title(property)
if baryon_plot_counter == 0:
ax_baryon.set_title(property)
# ax_baryon.set_xlabel("X")
# ax_baryon.set_ylabel("Y")
ax_baryon.set_aspect("equal")
# exit()
baryon_plot_counter += 1
continue
# plot_cic(
# rho, extent,
# title=str(dir.name)
# )
ax1.legend()
ax2.legend()
fig1.tight_layout()
fig2.tight_layout()
r, prof = property_profile(bary_coords, center, mass_array, bary_properties, num_bins=100, rmin=0.002,
rmax=6.5)
integrator_name = "Ramses" if is_ramses else "Swift"
label = f"{integrator_name} {levelmin}, {levelmin_TF}, {levelmax}"
ax5.set_title("Densities")
ax6.set_title("Pressures")
ax5.loglog(r[1:], prof["Densities"], label=label)
ax6.loglog(r[1:], prof["Pressures"], label=label)
# fig3: Figure = plt.figure(figsize=(9, 9))
# axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
fig3: Figure = plt.figure(
figsize=figsize_from_page_fraction(columns=2, height_to_width=1)
# just a bit more than 2/3 so that the two rows don't overlap
figsize=figsize_from_page_fraction(columns=2, height_to_width=33 / 48)
)
axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
axes: List[Axes] = fig3.subplots(2, 3, sharex="all", sharey="all").flatten()
images.sort(key=lambda r: r.levels, reverse=True)
for result, ax in zip(images, axes):
data = 1.1 + result.rho
@ -289,21 +389,31 @@ def main():
norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled),
extent=extent,
origin="lower",
cmap="Greys",
interpolation="none"
)
ax.text(
0.5,
0.95,
result.title,
horizontalalignment="center",
verticalalignment="top",
transform=ax.transAxes,
)
ax.set_title(result.title)
fig3.tight_layout()
fig3.subplots_adjust(right=0.825)
cbar_ax = fig3.add_axes([0.85, 0.05, 0.05, 0.9])
fig3.colorbar(img, cax=cbar_ax)
for ax in [ax1, ax2, ax5, ax6]:
ax.legend()
for fig in [fig1, fig2, fig3, fig4, fig5, fig6]:
fig.tight_layout()
fig.subplots_adjust(wspace=0, hspace=0)
axs_baryon[0][0].set_ylabel("Swift")
axs_baryon[1][0].set_ylabel("Ramses")
fig1.savefig(Path(f"~/tmp/{plottype.value}1.pdf").expanduser())
fig2.savefig(Path(f"~/tmp/{plottype.value}2.pdf").expanduser())
fig3.savefig(Path(f"~/tmp/{plottype.value}3.pdf").expanduser())
fig1.savefig(Path(f"~/tmp/auriga1.pdf").expanduser())
fig2.savefig(Path(f"~/tmp/auriga2.pdf").expanduser())
fig3.savefig(Path("~/tmp/auriga3.pdf").expanduser())
fig4.tight_layout()
fig4.savefig(Path("~/tmp/slice.png").expanduser(), dpi=300)
fig4.savefig(Path(f"~/tmp/{plottype.value}4.pdf").expanduser())
pprint(centers)
plt.show()

View file

@ -28,6 +28,7 @@ class Counterset:
bad_match: int = 0
negative_cnfw: int = 0
too_small_halo: int = 0
not_a_field_halo: int = 0
checking_50: int = 0
checking_150: int = 0
num_matches: int = 0
@ -45,16 +46,16 @@ def apply_offset(value, offset):
def compare_halo_resolutions(
ref_waveform: str,
comp_waveform: str,
reference_resolution: int,
comparison_resolution: int,
plot=False,
plot3d=False,
plot_cic=False,
single=False,
velo_halos=False,
force=False,
ref_waveform: str,
comp_waveform: str,
reference_resolution: int,
comparison_resolution: int,
plot=False,
plot3d=False,
plot_cic=False,
single=False,
velo_halos=False,
force=False,
):
reference_dir = base_dir / f"{ref_waveform}_{reference_resolution}_100"
comparison_dir = base_dir / f"{comp_waveform}_{comparison_resolution}_100/"
@ -120,6 +121,10 @@ def compare_halo_resolutions(
print("skipping")
counters.too_small_halo += 1
continue
if ref_halo.Structuretype != 10:
print("not a field halo")
counters.not_a_field_halo += 1
continue
print("LEN", len(halo_particle_ids), ref_halo.Mass_tot)
offset_x, offset_y = ref_halo.X, ref_halo.Y
# cumulative_mass_profile(particles_in_ref_halo, ref_halo, ref_meta, plot=plot)
@ -243,9 +248,9 @@ def compare_halo_resolutions(
mass_factor_limit = 5
if not (
1 / mass_factor_limit
< (comp_halo_masses[halo_id] / ref_halo_mass)
< mass_factor_limit
1 / mass_factor_limit
< (comp_halo_masses[halo_id] / ref_halo_mass)
< mass_factor_limit
):
# print("mass not similar, skipping")
num_skipped_for_mass += 1
@ -258,9 +263,9 @@ def compare_halo_resolutions(
# similarity = len(shared_particles) / len(union_particles)
similarity = len(shared_particles) / (
len(halo_particle_ids)
+ len(particle_ids_in_comp_halo)
- len(shared_particles)
len(halo_particle_ids)
+ len(particle_ids_in_comp_halo)
- len(shared_particles)
)
# assert similarity_orig == similarity
# print(shared_size)
@ -295,6 +300,10 @@ def compare_halo_resolutions(
if plot3d:
plotdf3d(pl, df, color="#fed9a6") # light orange
if similarity > best_halo_match:
comp_halo_in_comparison: pd.Series = df_comp_halo.loc[halo_id]
if comp_halo_in_comparison.Structuretype != ref_halo.Structuretype:
print("different Structuretype")
continue
best_halo_match = similarity
best_halo = halo_id
print(f"skipped {num_skipped_for_mass} halos due to mass ratio")
@ -312,11 +321,11 @@ def compare_halo_resolutions(
[ref_halo.add_prefix("ref_"), comp_halo.add_prefix("comp_")]
)
distance = (
linalg.norm(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z])
- np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
)
/ ref_halo.Rvir
linalg.norm(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z])
- np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
)
/ ref_halo.Rvir
)
halo_data["distance"] = distance
halo_data["match"] = best_halo_match
@ -353,7 +362,7 @@ def precalculate_halo_membership(df_comp, df_comp_halo):
print_progress(i, len(df_comp_halo), halo["Sizes"])
size = int(halo["Sizes"])
halo_id = int(i)
halo_particles = df_comp.iloc[pointer : pointer + size]
halo_particles = df_comp.iloc[pointer: pointer + size]
# check_id = halo_particles["FOFGroupIDs"].to_numpy()
# assert (check_id == i).all()

View file

@ -1,14 +1,15 @@
import sys
from pathlib import Path
from typing import Dict
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from matplotlib.axes import Axes
from matplotlib.figure import Figure
from find_center import find_center
from readfiles import ParticlesMeta, read_file, read_halo_file
from temperatures import calculate_T
from utils import print_progress
def V(r):
@ -16,19 +17,17 @@ def V(r):
def halo_mass_profile(
particles: pd.DataFrame,
center: np.ndarray,
particles_meta: ParticlesMeta,
vmin: float,
vmax: float,
plot=False,
num_bins=30,
positions: np.ndarray,
center: np.ndarray,
particles_meta: ParticlesMeta,
rmin: float,
rmax: float,
plot=False,
num_bins=30,
):
positions = particles[["X", "Y", "Z"]].to_numpy()
distances = np.linalg.norm(positions - center, axis=1)
group_radius = distances.max()
log_radial_bins = np.geomspace(vmin, vmax, num_bins)
log_radial_bins = np.geomspace(rmin, rmax, num_bins)
bin_masses = []
bin_densities = []
@ -64,6 +63,35 @@ def halo_mass_profile(
return log_radial_bins, bin_masses, bin_densities, center
def property_profile(positions: np.ndarray, center: np.ndarray, masses: np.ndarray, properties: Dict[str, np.ndarray],
rmin: float, rmax: float, num_bins: int):
distances = np.linalg.norm(positions - center, axis=1)
log_radial_bins = np.geomspace(rmin, rmax, num_bins)
means = {}
for key in properties.keys():
means[key] = []
for k in range(num_bins - 1):
bin_start = log_radial_bins[k]
bin_end = log_radial_bins[k + 1]
print_progress(k, num_bins - 2, bin_end)
in_bin = np.where((bin_start < distances) & (distances < bin_end))[0]
masses_in_ring = masses[in_bin]
for property, property_values in properties.items():
if property == "InternalEnergies":
continue
prop_in_ring = property_values[in_bin]
if property == "Temperatures":
prop_in_ring = np.array([calculate_T(u) for u in prop_in_ring])
# mean_in_ring_unweighted = np.mean(prop_in_ring)
mean_in_ring = (prop_in_ring * masses_in_ring).sum() / masses_in_ring.sum()
# print(mean_in_ring_unweighted, mean_in_ring)
means[property].append(mean_in_ring)
return log_radial_bins, means
if __name__ == "__main__":
input_file = Path(sys.argv[1])
df, particles_meta = read_file(input_file)
@ -79,4 +107,4 @@ if __name__ == "__main__":
halo = df_halos.loc[halo_id]
halo_mass_profile(particles_in_halo, halo, particles_meta, plot=True)
halo_mass_profile(particles_in_halo[["X", "Y", "Z"]].to_numpy(), halo, particles_meta, plot=True)

View file

@ -129,7 +129,10 @@ def main():
# break
# break
ylabels = [item for item in waveforms for _ in range(2)]
if is_box:
ylabels = waveforms
else:
ylabels = [item for item in waveforms for _ in range(2)]
rowcolumn_labels(axes, ylabels, isrow=True)
rowcolumn_labels(axes, resolutions, isrow=False)

View file

@ -6,3 +6,5 @@ auriga_dir = Path("/path/to/auriga6/")
richings_dir = Path("path/to/richings21_ics/").expanduser()
spectra_dir = Path("/path/to/git/spectra/build/")
has_1024_simulations = False
ramses_imager = Path("~/cosmoca/RamsesImager/ramses_imager").expanduser()
pbh_dir = Path("~/cosmos_data/PBH/").expanduser()

View file

@ -1,23 +1,21 @@
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from matplotlib.axes import Axes
from matplotlib.figure import Figure
from cic import cic_from_radius, plot_cic
from find_center import find_center
from halo_mass_profile import halo_mass_profile
from paths import pbh_dir
from readfiles import read_g4_file, ParticlesMeta
from utils import figsize_from_page_fraction
def cic_comparison(pbh_high_coord, ref_high_coord,center):
def cic_comparison(pbh_high_coord, ref_high_coord, center):
rhos = []
i = 0
for coord in [ref_high_coord, pbh_high_coord]:
rho, extent = cic_from_radius(
coord[::, 0], coord[::, 2], 3000, center[0], center[2], .2, periodic=False
coord[::, 0], coord[::, 1], 3000, center[0], center[1], 2, periodic=False
)
rhos.append(rho)
@ -32,20 +30,20 @@ def cic_comparison(pbh_high_coord, ref_high_coord,center):
def main():
ref_data = read_g4_file(
pbh_dir / "CDM-L50-N128" / "snapshot_039.hdf5",
pbh_dir / "cdm" / "snapshot_005.hdf5",
zoom_type="cdm")
pbh_data = read_g4_file(
pbh_dir / "DM-L50-N128" / "snapshot_039.hdf5",
pbh_dir / "10000sigma" / "snapshot_005.hdf5",
zoom_type="pbh")
center = [30, 32, 30]
cic_comparison(ref_data[0], pbh_data[0], center)
fig1: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax1: Axes = fig1.gca()
fig2: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax2: Axes = fig2.gca()
center = [32.423177, 37.255220, 36.026005]
centered = False
for data in [ref_data, pbh_data]:
highres_coords, lowres_coords, highres_mass, lowres_mass = data
df = pd.DataFrame(highres_coords, columns=["X", "Y", "Z"])
particles_meta = ParticlesMeta(particle_mass=highres_mass)
# center = np.median(highres_coords, axis=0)
print(center)
@ -53,13 +51,13 @@ def main():
# center = find_center(df, center, initial_radius=0.01)
centered = True
log_radial_bins, bin_masses, bin_densities, center = halo_mass_profile(
df, center, particles_meta, plot=False, num_bins=100, vmin=0.002, vmax=5
highres_coords, center, particles_meta, plot=False, num_bins=100, rmin=0.002, rmax=5
)
ax1.loglog(log_radial_bins[:-1], bin_masses)
ax2.loglog(log_radial_bins[:-1], bin_densities)
plt.show()
cic_comparison(ref_data[0], pbh_data[0],center)
cic_comparison(ref_data[0], pbh_data[0], center)
if __name__ == '__main__':

View file

@ -1,4 +1,5 @@
from pathlib import Path
from typing import List
import numpy as np
import pynbody
@ -14,7 +15,12 @@ def load_ramses_data(ramses_dir: Path):
coord_array: SimArray = s.dm["pos"]
a = s.properties["a"]
print("RAMSES a", a)
# p = Profile(s.gas, ndim=3)
# s.gas["pos"]-=
# fig,ax=create_figure()
# ax.plot(p['rbins'], p['density'], 'k')
# plt.show()
# exit()
masses = np.asarray(mass_array.in_units("1e10 Msol"))
high_res_mass = np.amin(np.unique(masses)) # get lowest mass of particles
is_high_res_particle = masses == high_res_mass
@ -24,4 +30,47 @@ def load_ramses_data(ramses_dir: Path):
particles_meta = ParticlesMeta(particle_mass=high_res_mass)
center = np.median(hr_coordinates, axis=0)
return hr_coordinates, particles_meta, center
def get_slice_argument(extent: List[float], center: List[float], ramses_dir: Path, interpolation_method: str,
depth: float):
xmin, xmax, ymin, ymax = extent
_, _, zcenter = center
interpolate=interpolation_method=="linear"
arguments = {
"x": (xmin + xmax) / 2,
"y": (ymin + ymax) / 2,
"z": zcenter,
"w": xmax - xmin,
"h": ymax - ymin,
"d": depth,
"l": 14 if interpolate else 12
}
from paths import ramses_imager
args = [str(ramses_imager)]
for k, v in arguments.items():
args.append(f"-{k} {v}")
if interpolate:
args.append("-i")
args.append(str(ramses_dir / "info_00009.txt"))
return args, ramses_imager.parent
def load_slice_data(file: Path):
with file.open("rb") as infile:
np.fromfile(file=infile, dtype=np.int32, count=1)
[nx, ny] = np.fromfile(file=infile, dtype=np.int32, count=2)
np.fromfile(file=infile, dtype=np.int32, count=1)
np.fromfile(file=infile, dtype=np.int32, count=1)
data: np.ndarray = np.fromfile(file=infile, dtype=np.float32, count=nx * ny)
np.fromfile(file=infile, dtype=np.int32, count=1)
print("NEGATIVE", (data < 0).sum())
# np.fromfile(file=infile, dtype=np.int32, count=1)
# cm_per_px = np.fromfile(file=infile, dtype=np.float64, count=1)[0]
# np.fromfile(file=infile, dtype=np.int32, count=1)
return data.reshape((nx, ny))

103
slices.py
View file

@ -1,7 +1,5 @@
from pathlib import Path
from typing import List, Tuple
import h5py
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.colors import LogNorm
@ -12,9 +10,9 @@ from utils import create_figure
def filter_3d(
coords: np.ndarray, data: np.ndarray,
extent: List[float]
) -> Tuple[np.ndarray, np.ndarray]:
coords: np.ndarray, extent: List[float], data: np.ndarray = None, zlimit=None
) -> Tuple[np.ndarray, np.ndarray] | np.ndarray:
filter = (
(extent[0] < coords[::, 0]) &
(coords[::, 0] < extent[1]) &
@ -22,57 +20,60 @@ def filter_3d(
(extent[2] < coords[::, 1]) &
(coords[::, 1] < extent[3])
)
if zlimit:
filter = filter & (
(zlimit[0] < coords[::, 2]) &
(coords[::, 2] < zlimit[1])
)
print("before", coords.shape)
data = data[filter]
if data is not None:
data = data[filter]
coords = coords[filter]
print("after", coords.shape)
return coords, data
if data is not None:
return coords, data
return coords
def create_2d_slice(
input_file: Path, center: List[float], extent,
property="InternalEnergies", method="nearest"
) -> np.ndarray:
def create_2d_slice(center: List[float], extent, coords: np.ndarray, property_name: str, property_data: np.ndarray,
resolution: int,
method="nearest") -> np.ndarray:
cut_axis = 2 # Z
with h5py.File(input_file) as f:
pt0 = f["PartType0"]
coords = pt0["Coordinates"][:]
data = pt0[property if property != "Temperatures" else "InternalEnergies"][:]
coords, data = filter_3d(coords, data, extent)
if property == "Temperatures":
print("calculating temperatures")
data = np.array([calculate_T(u) for u in data])
xrange = np.linspace(extent[0],extent[1], 1000)
yrange = np.linspace(extent[2],extent[3], 1000)
gx, gy, gz = np.meshgrid(xrange, yrange, center[cut_axis])
print("interpolating")
grid = griddata(coords, data, (gx, gy, gz), method=method)[::, ::, 0]
return grid
print(grid.shape)
# stats, x_edge, y_edge, _ = binned_statistic_2d(
# coords_in_slice[::, x_axis],
# coords_in_slice[::, y_axis],
# data_in_slice,
# bins=500,
# statistic="mean"
# )
fig, ax = create_figure()
# stats = np.nan_to_num(stats)
print("plotting")
img = ax.imshow(
grid,
norm=LogNorm(),
interpolation="nearest",
origin="lower",
extent=[xrange[0], xrange[-1], yrange[0], yrange[-1]],
)
ax.set_title(input_file.parent.stem)
ax.set_xlabel(x_axis_label)
ax.set_ylabel(y_axis_label)
ax.set_aspect("equal")
fig.colorbar(img, label="Temperatures")
fig.tight_layout()
plt.show()
coords, property_data = filter_3d(coords, extent, property_data)
if property_name == "Temperatures":
print("calculating temperatures")
property_data = np.array([calculate_T(u) for u in property_data])
xrange = np.linspace(extent[0], extent[1], resolution)
yrange = np.linspace(extent[2], extent[3], resolution)
gx, gy, gz = np.meshgrid(xrange, yrange, center[cut_axis])
print("interpolating")
grid = griddata(coords, property_data, (gx, gy, gz), method=method)[::, ::, 0]
return grid
print(grid.shape)
# stats, x_edge, y_edge, _ = binned_statistic_2d(
# coords_in_slice[::, x_axis],
# coords_in_slice[::, y_axis],
# data_in_slice,
# bins=500,
# statistic="mean"
# )
fig, ax = create_figure()
# stats = np.nan_to_num(stats)
print("plotting")
img = ax.imshow(
grid,
norm=LogNorm(),
interpolation="nearest",
origin="lower",
extent=[xrange[0], xrange[-1], yrange[0], yrange[-1]],
)
ax.set_title(input_file.parent.stem)
ax.set_xlabel(x_axis_label)
ax.set_ylabel(y_axis_label)
ax.set_aspect("equal")
fig.colorbar(img, label="Temperatures")
fig.tight_layout()
plt.show()

View file

@ -39,6 +39,9 @@ def read_swift_config(dir: Path):
def print_wall_time(dir: Path):
"""
Attention: This idea is flawed as it only shows the wall time of the last time the simulation was restarted
"""
with (dir / "swift.log").open() as f:
last_line = f.readlines()[-1]
print(last_line)