1
0
Fork 0
mirror of https://github.com/Findus23/halo_comparison.git synced 2024-09-19 16:03:50 +02:00

Changed layout of comparison figures: introduced sup(x/y)label but kept old structure, changed size of rowcolumn_labels to fit other labels, changed position of comp information

Formatted everything with black
This commit is contained in:
glatterf42 2022-08-10 16:26:30 +02:00
parent 60bd933469
commit 39bb626a42
25 changed files with 947 additions and 427 deletions

View file

@ -33,10 +33,12 @@ from utils import figsize_from_page_fraction
# rc('ytick',direction='in')
# rc('legend',fontsize='x-large')
base_shape = {'u': [np.array([0, 1]), np.array([1, 0]), np.array([0, -1])],
'd': [np.array([0, -1]), np.array([-1, 0]), np.array([0, 1])],
'r': [np.array([1, 0]), np.array([0, 1]), np.array([-1, 0])],
'l': [np.array([-1, 0]), np.array([0, -1]), np.array([1, 0])]}
base_shape = {
"u": [np.array([0, 1]), np.array([1, 0]), np.array([0, -1])],
"d": [np.array([0, -1]), np.array([-1, 0]), np.array([0, 1])],
"r": [np.array([1, 0]), np.array([0, 1]), np.array([-1, 0])],
"l": [np.array([-1, 0]), np.array([0, -1]), np.array([1, 0])],
}
def hilbert_curve(order, orientation):
@ -44,26 +46,46 @@ def hilbert_curve(order, orientation):
Recursively creates the structure for a hilbert curve of given order
"""
if order > 1:
if orientation == 'u':
return hilbert_curve(order - 1, 'r') + [np.array([0, 1])] + \
hilbert_curve(order - 1, 'u') + [np.array([1, 0])] + \
hilbert_curve(order - 1, 'u') + [np.array([0, -1])] + \
hilbert_curve(order - 1, 'l')
elif orientation == 'd':
return hilbert_curve(order - 1, 'l') + [np.array([0, -1])] + \
hilbert_curve(order - 1, 'd') + [np.array([-1, 0])] + \
hilbert_curve(order - 1, 'd') + [np.array([0, 1])] + \
hilbert_curve(order - 1, 'r')
elif orientation == 'r':
return hilbert_curve(order - 1, 'u') + [np.array([1, 0])] + \
hilbert_curve(order - 1, 'r') + [np.array([0, 1])] + \
hilbert_curve(order - 1, 'r') + [np.array([-1, 0])] + \
hilbert_curve(order - 1, 'd')
if orientation == "u":
return (
hilbert_curve(order - 1, "r")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "u")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "u")
+ [np.array([0, -1])]
+ hilbert_curve(order - 1, "l")
)
elif orientation == "d":
return (
hilbert_curve(order - 1, "l")
+ [np.array([0, -1])]
+ hilbert_curve(order - 1, "d")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "d")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "r")
)
elif orientation == "r":
return (
hilbert_curve(order - 1, "u")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "r")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "r")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "d")
)
else:
return hilbert_curve(order - 1, 'd') + [np.array([-1, 0])] + \
hilbert_curve(order - 1, 'l') + [np.array([0, -1])] + \
hilbert_curve(order - 1, 'l') + [np.array([1, 0])] + \
hilbert_curve(order - 1, 'u')
return (
hilbert_curve(order - 1, "d")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "l")
+ [np.array([0, -1])]
+ hilbert_curve(order - 1, "l")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "u")
)
else:
return base_shape[orientation]
@ -88,15 +110,17 @@ def hilbert_curve(order, orientation):
order = 6
curve = hilbert_curve(order, 'u')
curve = hilbert_curve(order, "u")
curve = np.array(curve) * 4
cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)])
cumulative_curve = (np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2) / 2 ** (order + 2)
cumulative_curve = (
np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2
) / 2 ** (order + 2)
# plot curve using plt
N = 2 ** (2 * order)
sublevel = order - 4
cmap = cm.get_cmap('jet')
cmap = cm.get_cmap("jet")
fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1))
t = {}
@ -104,31 +128,38 @@ sublevel = 7
for i in range(2 ** (2 * sublevel)):
il = i * N // (2 ** (2 * sublevel))
ir = (i + 1) * N // 2 ** (2 * sublevel)
plt.plot(cumulative_curve[il:ir + 1, 0], cumulative_curve[il:ir + 1, 1], lw=0.5, c=cmap(i / 2 ** (2 * sublevel)))
plt.plot(
cumulative_curve[il : ir + 1, 0],
cumulative_curve[il : ir + 1, 1],
lw=0.5,
c=cmap(i / 2 ** (2 * sublevel)),
)
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.xlabel("$x$")
plt.ylabel("$y$")
plt.tight_layout()
plt.savefig(Path(f"~/tmp/hilbert_indexcolor.eps").expanduser())
key = b'0123456789ABCDEF'
key = b"0123456789ABCDEF"
num = 123
print(siphash.SipHash_2_4(key, bytes(num)).hash())
order = 6
curve = hilbert_curve(order, 'u')
curve = hilbert_curve(order, "u")
curve = np.array(curve) * 4
cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)])
cumulative_curve = (np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2) / 2 ** (order + 2)
cumulative_curve = (
np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2
) / 2 ** (order + 2)
# plot curve using plt
N = 2 ** (2 * order)
sublevel = order - 4
cmap = cm.get_cmap('jet')
cmap = cm.get_cmap("jet")
plt.figure()
key = b'0123456789ABCDEF'
key = b"0123456789ABCDEF"
fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1))
t = {}
@ -137,10 +168,15 @@ for i in range(2 ** (2 * sublevel)):
il = i * N // (2 ** (2 * sublevel))
ir = (i + 1) * N // 2 ** (2 * sublevel)
sipkey = siphash.SipHash_2_4(key, bytes(il)).hash()
plt.plot(cumulative_curve[il:ir + 1, 0], cumulative_curve[il:ir + 1, 1], lw=0.5, c=cmap(sipkey / 2 ** 64))
plt.plot(
cumulative_curve[il : ir + 1, 0],
cumulative_curve[il : ir + 1, 1],
lw=0.5,
c=cmap(sipkey / 2 ** 64),
)
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.xlabel("$x$")
plt.ylabel("$y$")
plt.tight_layout()
plt.savefig(Path(f"~/tmp/hilbert_indexcolor_scrambled.eps").expanduser())
plt.show()

View file

@ -8,6 +8,7 @@ from typing import List
import matplotlib.pyplot as plt
import numpy as np
# two-fold upsampling -- https://cnx.org/contents/xsppCgXj@8.18:H_wA16rf@16/Upsampling
from matplotlib.axes import Axes
from matplotlib.figure import Figure
@ -69,12 +70,12 @@ def cascade_algorithm(h, g, maxit):
for it in range(maxit):
# perform repeated convolutions
phi_it = np.sqrt(2) * np.convolve(h_it, phi_it, mode='full')
phi_it = np.sqrt(2) * np.convolve(h_it, phi_it, mode="full")
if it != maxit - 1:
psi_it = np.sqrt(2) * np.convolve(h_it, psi_it, mode='full')
psi_it = np.sqrt(2) * np.convolve(h_it, psi_it, mode="full")
else:
psi_it = np.sqrt(2) * np.convolve(g_it, psi_it, mode='full')
psi_it = np.sqrt(2) * np.convolve(g_it, psi_it, mode="full")
# upsample the coefficients
h_it = upsample(h_it)
@ -108,55 +109,173 @@ xdb2, phidb2, psidb2 = cascade_algorithm(h_DB2, g_DB2, maxit)
# DB3 -- http://wavelets.pybytes.com/wavelet/db3/
h_DB3 = np.array(
[0.3326705529509569, 0.8068915093133388, 0.4598775021193313, -0.13501102001039084, -0.08544127388224149,
0.035226291882100656])
[
0.3326705529509569,
0.8068915093133388,
0.4598775021193313,
-0.13501102001039084,
-0.08544127388224149,
0.035226291882100656,
]
)
g_DB3 = np.array(
[0.035226291882100656, 0.08544127388224149, -0.13501102001039084, -0.4598775021193313, 0.8068915093133388,
-0.3326705529509569])
[
0.035226291882100656,
0.08544127388224149,
-0.13501102001039084,
-0.4598775021193313,
0.8068915093133388,
-0.3326705529509569,
]
)
xdb3, phidb3, psidb3 = cascade_algorithm(h_DB3, g_DB3, maxit)
# DB4 -- http://wavelets.pybytes.com/wavelet/db4/
h_DB4 = np.array(
[0.23037781330885523, 0.7148465705525415, 0.6308807679295904, -0.02798376941698385, -0.18703481171888114,
0.030841381835986965, 0.032883011666982945, -0.010597401784997278])
[
0.23037781330885523,
0.7148465705525415,
0.6308807679295904,
-0.02798376941698385,
-0.18703481171888114,
0.030841381835986965,
0.032883011666982945,
-0.010597401784997278,
]
)
g_DB4 = np.array(
[-0.010597401784997278, -0.032883011666982945, 0.030841381835986965, 0.18703481171888114, -0.02798376941698385,
-0.6308807679295904, 0.7148465705525415, -0.23037781330885523])
[
-0.010597401784997278,
-0.032883011666982945,
0.030841381835986965,
0.18703481171888114,
-0.02798376941698385,
-0.6308807679295904,
0.7148465705525415,
-0.23037781330885523,
]
)
xdb4, phidb4, psidb4 = cascade_algorithm(h_DB4, g_DB4, maxit)
# DB8 -- http://wavelets.pybytes.com/wavelet/db8/
h_DB8 = np.array(
[0.05441584224308161, 0.3128715909144659, 0.6756307362980128, 0.5853546836548691, -0.015829105256023893,
-0.2840155429624281, 0.00047248457399797254, 0.128747426620186, -0.01736930100202211, -0.04408825393106472,
0.013981027917015516, 0.008746094047015655, -0.00487035299301066, -0.0003917403729959771, 0.0006754494059985568,
-0.00011747678400228192])
[
0.05441584224308161,
0.3128715909144659,
0.6756307362980128,
0.5853546836548691,
-0.015829105256023893,
-0.2840155429624281,
0.00047248457399797254,
0.128747426620186,
-0.01736930100202211,
-0.04408825393106472,
0.013981027917015516,
0.008746094047015655,
-0.00487035299301066,
-0.0003917403729959771,
0.0006754494059985568,
-0.00011747678400228192,
]
)
g_DB8 = np.array(
[-0.00011747678400228192, -0.0006754494059985568, -0.0003917403729959771, 0.00487035299301066, 0.008746094047015655,
-0.013981027917015516, -0.04408825393106472, 0.01736930100202211, 0.128747426620186, -0.00047248457399797254,
-0.2840155429624281, 0.015829105256023893, 0.5853546836548691, -0.6756307362980128, 0.3128715909144659,
-0.05441584224308161])
[
-0.00011747678400228192,
-0.0006754494059985568,
-0.0003917403729959771,
0.00487035299301066,
0.008746094047015655,
-0.013981027917015516,
-0.04408825393106472,
0.01736930100202211,
0.128747426620186,
-0.00047248457399797254,
-0.2840155429624281,
0.015829105256023893,
0.5853546836548691,
-0.6756307362980128,
0.3128715909144659,
-0.05441584224308161,
]
)
xdb8, phidb8, psidb8 = cascade_algorithm(h_DB8, g_DB8, maxit)
# DB16 --
h_DB16 = np.array(
[0.0031892209253436892, 0.03490771432362905, 0.1650642834886438, 0.43031272284545874, 0.6373563320829833,
0.44029025688580486, -0.08975108940236352, -0.3270633105274758, -0.02791820813292813, 0.21119069394696974,
0.027340263752899923, -0.13238830556335474, -0.006239722752156254, 0.07592423604445779, -0.007588974368642594,
-0.036888397691556774, 0.010297659641009963, 0.013993768859843242, -0.006990014563390751, -0.0036442796214883506,
0.00312802338120381, 0.00040789698084934395, -0.0009410217493585433, 0.00011424152003843815,
0.00017478724522506327, -6.103596621404321e-05, -1.394566898819319e-05, 1.133660866126152e-05,
-1.0435713423102517e-06, -7.363656785441815e-07, 2.3087840868545578e-07, -2.1093396300980412e-08])
g_DB16 = np.array([-2.1093396300980412e-08, -2.3087840868545578e-07, -7.363656785441815e-07, 1.0435713423102517e-06,
1.133660866126152e-05, 1.394566898819319e-05, -6.103596621404321e-05, -0.00017478724522506327,
0.00011424152003843815, 0.0009410217493585433, 0.00040789698084934395, -0.00312802338120381,
-0.0036442796214883506, 0.006990014563390751, 0.013993768859843242, -0.010297659641009963,
-0.036888397691556774, 0.007588974368642594, 0.07592423604445779, 0.006239722752156254,
-0.13238830556335474, -0.027340263752899923, 0.21119069394696974, 0.02791820813292813,
-0.3270633105274758, 0.08975108940236352, 0.44029025688580486, -0.6373563320829833,
0.43031272284545874, -0.1650642834886438, 0.03490771432362905, -0.0031892209253436892])
[
0.0031892209253436892,
0.03490771432362905,
0.1650642834886438,
0.43031272284545874,
0.6373563320829833,
0.44029025688580486,
-0.08975108940236352,
-0.3270633105274758,
-0.02791820813292813,
0.21119069394696974,
0.027340263752899923,
-0.13238830556335474,
-0.006239722752156254,
0.07592423604445779,
-0.007588974368642594,
-0.036888397691556774,
0.010297659641009963,
0.013993768859843242,
-0.006990014563390751,
-0.0036442796214883506,
0.00312802338120381,
0.00040789698084934395,
-0.0009410217493585433,
0.00011424152003843815,
0.00017478724522506327,
-6.103596621404321e-05,
-1.394566898819319e-05,
1.133660866126152e-05,
-1.0435713423102517e-06,
-7.363656785441815e-07,
2.3087840868545578e-07,
-2.1093396300980412e-08,
]
)
g_DB16 = np.array(
[
-2.1093396300980412e-08,
-2.3087840868545578e-07,
-7.363656785441815e-07,
1.0435713423102517e-06,
1.133660866126152e-05,
1.394566898819319e-05,
-6.103596621404321e-05,
-0.00017478724522506327,
0.00011424152003843815,
0.0009410217493585433,
0.00040789698084934395,
-0.00312802338120381,
-0.0036442796214883506,
0.006990014563390751,
0.013993768859843242,
-0.010297659641009963,
-0.036888397691556774,
0.007588974368642594,
0.07592423604445779,
0.006239722752156254,
-0.13238830556335474,
-0.027340263752899923,
0.21119069394696974,
0.02791820813292813,
-0.3270633105274758,
0.08975108940236352,
0.44029025688580486,
-0.6373563320829833,
0.43031272284545874,
-0.1650642834886438,
0.03490771432362905,
-0.0031892209253436892,
]
)
xdb16, phidb16, psidb16 = cascade_algorithm(h_DB16, g_DB16, maxit)
@ -164,14 +283,15 @@ xdb16, phidb16, psidb16 = cascade_algorithm(h_DB16, g_DB16, maxit)
fig: Figure
fig, ax = plt.subplots(
4, 2,
4,
2,
figsize=figsize_from_page_fraction(height_to_width=12 / 8),
# sharex="all", sharey="all"
)
labels = ['Haar', 'DB2', 'DB4', 'DB8', 'DB16']
labels = ["Haar", "DB2", "DB4", "DB8", "DB16"]
ax[0, 0].set_title('scaling functions $\\varphi$')
ax[0, 1].set_title('wavelets $\\psi$')
ax[0, 0].set_title("scaling functions $\\varphi$")
ax[0, 1].set_title("wavelets $\\psi$")
ax[0, 0].plot(xhaar, phihaar, lw=1)
ax[0, 1].plot(xhaar, psihaar, lw=1)
@ -188,7 +308,7 @@ ax[3, 1].plot(xdb8, psidb8, lw=1)
# ax[4, 0].plot(xdb16, phidb16, lw=1)
# ax[4, 1].plot(xdb16, psidb16, lw=1)
for a in ax.flatten():
a.set_xlabel('t')
a.set_xlabel("t")
def inset_label(ax: Axes, text: str):
@ -198,7 +318,7 @@ def inset_label(ax: Axes, text: str):
text,
horizontalalignment="left",
verticalalignment="bottom",
transform=ax.transAxes
transform=ax.transAxes,
)
@ -238,32 +358,63 @@ def fourier_wavelet(h, g, n):
# ax.plot([0, np.pi], [1., 1.], 'k:')
kh, fphih, fpsih = fourier_wavelet(h_Haar, g_Haar, 256)
ax.plot(kh, np.abs(fphih) ** 2, label=r'$\hat\varphi_\textrm{Haar}$', c="C0")
ax.plot(kh, np.abs(fpsih) ** 2, label=r'$\hat\psi_\textrm{Haar}$', c="C0", linestyle="dashed")
ax.plot(kh, np.abs(fphih) ** 2, label=r"$\hat\varphi_\textrm{Haar}$", c="C0")
ax.plot(
kh,
np.abs(fpsih) ** 2,
label=r"$\hat\psi_\textrm{Haar}$",
c="C0",
linestyle="dashed",
)
kdb2, fphidb2, fpsidb2 = fourier_wavelet(h_DB2, g_DB2, 256)
ax.plot(kdb2, np.abs(fphidb2) ** 2, label=r'$\hat\varphi_\textrm{DB2}$', c="C1")
ax.plot(kdb2, np.abs(fpsidb2) ** 2, label=r'$\hat\psi_\textrm{DB2}$', c="C1", linestyle="dashed")
ax.plot(kdb2, np.abs(fphidb2) ** 2, label=r"$\hat\varphi_\textrm{DB2}$", c="C1")
ax.plot(
kdb2,
np.abs(fpsidb2) ** 2,
label=r"$\hat\psi_\textrm{DB2}$",
c="C1",
linestyle="dashed",
)
kdb4, fphidb4, fpsidb4 = fourier_wavelet(h_DB4, g_DB4, 256)
ax.plot(kdb4, np.abs(fphidb4) ** 2, label=r'$\hat\varphi_\textrm{DB4}$', c="C2")
ax.plot(kdb4, np.abs(fpsidb4) ** 2, label=r'$\hat\psi_\textrm{DB4}$', c="C2", linestyle="dashed")
ax.plot(kdb4, np.abs(fphidb4) ** 2, label=r"$\hat\varphi_\textrm{DB4}$", c="C2")
ax.plot(
kdb4,
np.abs(fpsidb4) ** 2,
label=r"$\hat\psi_\textrm{DB4}$",
c="C2",
linestyle="dashed",
)
kdb8, fphidb8, fpsidb8 = fourier_wavelet(h_DB8, g_DB8, 256)
ax.plot(kdb8, np.abs(fphidb8) ** 2, label=r'$\hat\varphi_\textrm{DB8}$', c="C3")
ax.plot(kdb8, np.abs(fpsidb8) ** 2, label=r'$\hat\psi_\textrm{DB8}$', c="C3", linestyle="dashed")
ax.plot(kdb8, np.abs(fphidb8) ** 2, label=r"$\hat\varphi_\textrm{DB8}$", c="C3")
ax.plot(
kdb8,
np.abs(fpsidb8) ** 2,
label=r"$\hat\psi_\textrm{DB8}$",
c="C3",
linestyle="dashed",
)
# all k* are np.linspace(0, np.pi, 256), so we can also use them for shannon
def shannon(k):
y = np.zeros_like(k)
y[k > pi / 2] = 1
return y
ax.plot(kdb8, 1 - shannon(kdb8), label=r'$\hat\varphi_\textrm{shannon}$', c="C4")
ax.plot(kdb8, shannon(kdb8), label=r'$\hat\psi_\textrm{shannon}$', c="C4", linestyle="dashed")
ax.plot(kdb8, 1 - shannon(kdb8), label=r"$\hat\varphi_\textrm{shannon}$", c="C4")
ax.plot(
kdb8,
shannon(kdb8),
label=r"$\hat\psi_\textrm{shannon}$",
c="C4",
linestyle="dashed",
)
# ax.plot(kdb8, np.abs(fpsidb8) ** 2, label='$\\hat\\psi_{DB8}$', c="C3", linestyle="dashed")
# kdb16, fphidb16, fpsidb16 = fourier_wavelet(h_DB16, g_DB16, 256)
@ -282,10 +433,12 @@ leg1 = ax.legend(frameon=False, handles=philines, loc="center left")
leg2 = ax.legend(frameon=False, handles=psilines, loc="center right")
ax.add_artist(leg1)
ax.add_artist(leg2)
ax.set_xlabel('k')
ax.set_ylabel('P(k)')
ax.set_xlabel("k")
ax.set_ylabel("P(k)")
ax.set_xticks([0, pi / 2, pi])
ax.set_xticklabels(["0", r"$k_\textrm{coarse}^\textrm{ny}$", r"$k_\textrm{fine}^\textrm{ny}$"])
ax.set_xticklabels(
["0", r"$k_\textrm{coarse}^\textrm{ny}$", r"$k_\textrm{fine}^\textrm{ny}$"]
)
# plt.semilogy()
# plt.ylim([1e-4,2.0])

View file

@ -32,7 +32,14 @@ mode = Mode.richings
def dir_name_to_parameter(dir_name: str):
return map(int, dir_name.lstrip("auriga6_halo").lstrip("richings21_").lstrip("bary_").lstrip("ramses_").split("_"))
return map(
int,
dir_name.lstrip("auriga6_halo")
.lstrip("richings21_")
.lstrip("bary_")
.lstrip("ramses_")
.split("_"),
)
def levelmax_to_softening_length(levelmax: int) -> float:
@ -46,8 +53,8 @@ fig2: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax2: Axes = fig2.gca()
for ax in [ax1, ax2]:
ax.set_xlabel(r'R [Mpc]')
ax1.set_ylabel(r'M [$10^{10} \mathrm{M}_\odot$]')
ax.set_xlabel(r"R [Mpc]")
ax1.set_ylabel(r"M [$10^{10} \mathrm{M}_\odot$]")
ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]")
part_numbers = []
@ -107,8 +114,10 @@ for dir in sorted(root_dir.glob("*")):
ideal_softening_length = levelmax_to_softening_length(levelmax)
if not np.isclose(softening_length, levelmax_to_softening_length(levelmax)):
raise ValueError(f"softening length for levelmax {levelmax} should be {ideal_softening_length} "
f"but is {softening_length}")
raise ValueError(
f"softening length for levelmax {levelmax} should be {ideal_softening_length} "
f"but is {softening_length}"
)
print(input_file)
if mode == Mode.richings and is_by_adrian:
h = 0.6777
@ -141,12 +150,16 @@ for dir in sorted(root_dir.glob("*")):
# halo = halos.loc[1]
center = np.array([halo.X, halo.Y, halo.Z])
log_radial_bins, bin_masses, bin_densities, center = halo_mass_profile(
df, center, particles_meta, plot=False, num_bins=100,
vmin=0.002, vmax=6.5
df, center, particles_meta, plot=False, num_bins=100, vmin=0.002, vmax=6.5
)
i_min_border = np.argmax(0.01 < log_radial_bins) # first bin outside of specific radius
i_min_border = np.argmax(
0.01 < log_radial_bins
) # first bin outside of specific radius
i_max_border = np.argmax(1.5 < log_radial_bins)
popt = fit_nfw(log_radial_bins[i_min_border:i_max_border], bin_densities[i_min_border:i_max_border]) # = rho_0, r_s
popt = fit_nfw(
log_radial_bins[i_min_border:i_max_border],
bin_densities[i_min_border:i_max_border],
) # = rho_0, r_s
print(popt)
# # Plot NFW profile
# ax.loglog(
@ -176,11 +189,11 @@ for dir in sorted(root_dir.glob("*")):
ref_log_radial_bins, ref_bin_masses, ref_bin_densities = data
mass_deviation: np.ndarray = np.abs(bin_masses - ref_bin_masses)
density_deviation: np.ndarray = np.abs(bin_densities - ref_bin_densities)
ax1.loglog(log_radial_bins[:-1], mass_deviation, c=f"C{i}",
linestyle="dotted")
ax1.loglog(log_radial_bins[:-1], mass_deviation, c=f"C{i}", linestyle="dotted")
ax2.loglog(log_radial_bins[:-1], density_deviation, c=f"C{i}",
linestyle="dotted")
ax2.loglog(
log_radial_bins[:-1], density_deviation, c=f"C{i}", linestyle="dotted"
)
accuracy = mass_deviation / ref_bin_masses
print(accuracy)
print("mean accuracy", accuracy.mean())
@ -209,11 +222,13 @@ for dir in sorted(root_dir.glob("*")):
vmin = min(vmin, rho.min())
vmax = max(vmax, rho.max())
images.append(Result(
images.append(
Result(
rho=rho,
title=str(dir.name),
levels=(levelmin, levelmin_TF, levelmax) if levelmin else None
))
levels=(levelmin, levelmin_TF, levelmax) if levelmin else None,
)
)
i += 1
# plot_cic(
# rho, extent,
@ -226,15 +241,21 @@ fig2.tight_layout()
# fig3: Figure = plt.figure(figsize=(9, 9))
# axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
fig3: Figure = plt.figure(figsize=figsize_from_page_fraction(columns=2, height_to_width=1))
fig3: Figure = plt.figure(
figsize=figsize_from_page_fraction(columns=2, height_to_width=1)
)
axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
for result, ax in zip(images, axes):
data = 1.1 + result.rho
vmin_scaled = 1.1 + vmin
vmax_scaled = 1.1 + vmax
img = ax.imshow(data.T, norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled), extent=extent,
origin="lower")
img = ax.imshow(
data.T,
norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled),
extent=extent,
origin="lower",
)
ax.set_title(result.title)
fig3.tight_layout()

47
cic.py
View file

@ -39,10 +39,15 @@ def cic_deposit(X, Y, ngrid, periodic=True) -> np.ndarray:
def cic_range(
X: np.ndarray, Y: np.ndarray,
X: np.ndarray,
Y: np.ndarray,
ngrid: int,
xmin: float, xmax: float,
ymin: float, ymax: float, *args, **kwargs
xmin: float,
xmax: float,
ymin: float,
ymax: float,
*args,
**kwargs
) -> Tuple[np.ndarray, Extent]:
xrange = xmax - xmin
yrange = ymax - ymin
@ -57,16 +62,25 @@ def cic_range(
def cic_from_radius(
X: np.ndarray, Y: np.ndarray,
X: np.ndarray,
Y: np.ndarray,
ngrid: int,
x_center: float, y_center: float,
radius: float, *args, **kwargs
x_center: float,
y_center: float,
radius: float,
*args,
**kwargs
) -> Tuple[np.ndarray, Extent]:
return cic_range(
X, Y, ngrid,
x_center - radius, x_center + radius,
y_center - radius, y_center + radius,
*args, **kwargs
X,
Y,
ngrid,
x_center - radius,
x_center + radius,
y_center - radius,
y_center + radius,
*args,
**kwargs
)
@ -87,18 +101,21 @@ def plot_cic(rho: np.ndarray, extent: Extent, title: str):
data = np.log(data)
norm = plt.Normalize(vmin=data.min(), vmax=data.max())
image = cmap(norm(data.T))
plt.imsave((Path("~/tmp").expanduser() / title).with_suffix(".png"), image, origin="lower")
plt.imsave(
(Path("~/tmp").expanduser() / title).with_suffix(".png"), image, origin="lower"
)
# ax.hist2d(df.X, df.Y, bins=500, norm=LogNorm())
# ax.hist2d(df2.X, df2.Y, bins=1000, norm=LogNorm())
if __name__ == '__main__':
if __name__ == "__main__":
input_file = Path(sys.argv[1])
df_ref, _ = read_file(input_file)
rho, extent = cic_from_radius(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1500, 48.8, 57, 1, periodic=False)
rho, extent = cic_from_radius(
df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1500, 48.8, 57, 1, periodic=False
)
# rho, extent = cic_range(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 800, 0, 85.47, 0, 85.47, periodic=False)
plot_cic(
rho, extent,
title=str(input_file.relative_to(input_file.parent.parent).name)
rho, extent, title=str(input_file.relative_to(input_file.parent.parent).name)
)

View file

@ -16,7 +16,7 @@ for wf in ["DB2", "DB4", "DB8", "shannon"]:
plot=False,
plot3d=False,
velo_halos=True,
single=False
single=False,
)
except Exception as e:
traceback.print_exc()

View file

@ -45,10 +45,16 @@ def apply_offset(value, offset):
def compare_halo_resolutions(
ref_waveform: str, comp_waveform: str,
reference_resolution: int, comparison_resolution: int,
plot=False, plot3d=False, plot_cic=False,
single=False, velo_halos=False, force=False
ref_waveform: str,
comp_waveform: str,
reference_resolution: int,
comparison_resolution: int,
plot=False,
plot3d=False,
plot_cic=False,
single=False,
velo_halos=False,
force=False,
):
reference_dir = base_dir / f"{ref_waveform}_{reference_resolution}_100"
comparison_dir = base_dir / f"{comp_waveform}_{comparison_resolution}_100/"
@ -68,14 +74,18 @@ def compare_halo_resolutions(
print("reading reference file")
df_ref, ref_meta = read_file(reference_dir / "output_0004.hdf5")
if velo_halos:
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(reference_dir)
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(
reference_dir
)
else:
df_ref_halo = read_halo_file(reference_dir / "fof_output_0004.hdf5")
print("reading comparison file")
df_comp, comp_meta = read_file(comparison_dir / "output_0004.hdf5")
if velo_halos:
df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halo_particles(comparison_dir)
df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halo_particles(
comparison_dir
)
else:
df_comp_halo = read_halo_file(comparison_dir / "fof_output_0004.hdf5")
@ -137,18 +147,22 @@ def compare_halo_resolutions(
print(f"{prev_len} => {after_len} (factor {prev_len / after_len:.2f})")
halo_distances = np.linalg.norm(
ref_halo[['X', 'Y', 'Z']].values
- df_comp_halo[['X', 'Y', 'Z']].values,
axis=1)
ref_halo[["X", "Y", "Z"]].values - df_comp_halo[["X", "Y", "Z"]].values,
axis=1,
)
# print(list(halo_distances))
print(f"find nearby halos (50x{ref_halo.Rvir:.1f})")
print(ref_halo[['X', 'Y', 'Z']].values)
print(ref_halo[["X", "Y", "Z"]].values)
# Find IDs of halos that are less than 50 Rvir away
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 50].index.to_list())
nearby_halos = set(
df_comp_halo.loc[halo_distances < ref_halo.Rvir * 50].index.to_list()
)
if len(nearby_halos) < 10:
print(f"only {len(nearby_halos)} halos, expanding to 150xRvir")
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 150].index.to_list())
nearby_halos = set(
df_comp_halo.loc[halo_distances < ref_halo.Rvir * 150].index.to_list()
)
counters.checking_150 += 1
if not nearby_halos:
@ -179,9 +193,13 @@ def compare_halo_resolutions(
if plot:
fig: Figure = plt.figure()
ax: Axes = fig.gca()
ax.scatter(apply_offset_to_list(halo_particles["X"], offset_x),
apply_offset_to_list(halo_particles["Y"], offset_y), s=1,
alpha=.3, label="Halo")
ax.scatter(
apply_offset_to_list(halo_particles["X"], offset_x),
apply_offset_to_list(halo_particles["Y"], offset_y),
s=1,
alpha=0.3,
label="Halo",
)
if plot_cic:
diameter = ref_halo["R_size"]
X = ref_halo["Xc"]
@ -207,6 +225,7 @@ def compare_halo_resolutions(
if plot3d:
from pyvista import Plotter
pl = Plotter()
plotdf3d(pl, halo_particles, color="#b3cde3") # light blue
pl.set_focus((ref_halo.X, ref_halo.Y, ref_halo.Z))
@ -223,7 +242,11 @@ def compare_halo_resolutions(
particle_ids_in_comp_halo = comp_halo_lookup[halo_id]
mass_factor_limit = 5
if not (1 / mass_factor_limit < (comp_halo_masses[halo_id] / ref_halo_mass) < mass_factor_limit):
if not (
1 / mass_factor_limit
< (comp_halo_masses[halo_id] / ref_halo_mass)
< mass_factor_limit
):
# print("mass not similar, skipping")
num_skipped_for_mass += 1
continue
@ -235,7 +258,10 @@ def compare_halo_resolutions(
# similarity = len(shared_particles) / len(union_particles)
similarity = len(shared_particles) / (
len(halo_particle_ids) + len(particle_ids_in_comp_halo) - len(shared_particles))
len(halo_particle_ids)
+ len(particle_ids_in_comp_halo)
- len(shared_particles)
)
# assert similarity_orig == similarity
# print(shared_size)
# if not similarity:
@ -247,11 +273,23 @@ def compare_halo_resolutions(
color = f"C{i + 1}"
comp_halo: pd.Series = df_comp_halo.loc[halo_id]
ax.scatter(apply_offset_to_list(df["X"], offset_x), apply_offset_to_list(df["Y"], offset_y), s=1,
alpha=.3, c=color)
circle = Circle((apply_offset(comp_halo.X, offset_x), apply_offset(comp_halo.Y, offset_y)),
comp_halo["Rvir"], zorder=10,
linewidth=1, edgecolor=color, fill=None
ax.scatter(
apply_offset_to_list(df["X"], offset_x),
apply_offset_to_list(df["Y"], offset_y),
s=1,
alpha=0.3,
c=color,
)
circle = Circle(
(
apply_offset(comp_halo.X, offset_x),
apply_offset(comp_halo.Y, offset_y),
),
comp_halo["Rvir"],
zorder=10,
linewidth=1,
edgecolor=color,
fill=None,
)
ax.add_artist(circle)
if plot3d:
@ -270,13 +308,16 @@ def compare_halo_resolutions(
comp_halo: pd.Series = df_comp_halo.loc[best_halo]
# merge the data of the two halos with fitting prefixes
halo_data = pd.concat([
ref_halo.add_prefix("ref_"),
comp_halo.add_prefix("comp_")
])
distance = linalg.norm(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z]) - np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
) / ref_halo.Rvir
halo_data = pd.concat(
[ref_halo.add_prefix("ref_"), comp_halo.add_prefix("comp_")]
)
distance = (
linalg.norm(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z])
- np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
)
/ ref_halo.Rvir
)
halo_data["distance"] = distance
halo_data["match"] = best_halo_match
halo_data["num_skipped_for_mass"] = num_skipped_for_mass
@ -285,7 +326,9 @@ def compare_halo_resolutions(
if plot:
print(f"plotting with offsets ({offset_x},{offset_y})")
# ax.legend()
ax.set_title(f"{reference_dir.name} vs. {comparison_dir.name} (Halo {index})")
ax.set_title(
f"{reference_dir.name} vs. {comparison_dir.name} (Halo {index})"
)
fig.savefig("out.png", dpi=300)
plt.show()
if plot3d:
@ -310,7 +353,7 @@ def precalculate_halo_membership(df_comp, df_comp_halo):
print_progress(i, len(df_comp_halo), halo["Sizes"])
size = int(halo["Sizes"])
halo_id = int(i)
halo_particles = df_comp.iloc[pointer:pointer + size]
halo_particles = df_comp.iloc[pointer : pointer + size]
# check_id = halo_particles["FOFGroupIDs"].to_numpy()
# assert (check_id == i).all()
@ -321,7 +364,7 @@ def precalculate_halo_membership(df_comp, df_comp_halo):
return comp_halo_lookup
if __name__ == '__main__':
if __name__ == "__main__":
compare_halo_resolutions(
ref_waveform="shannon",
comp_waveform="shannon",
@ -332,5 +375,5 @@ if __name__ == '__main__':
plot_cic=False,
velo_halos=True,
single=False,
force=True
force=True,
)

View file

@ -21,32 +21,32 @@ from utils import figsize_from_page_fraction, rowcolumn_labels, waveforms, tex_f
G = 43.022682 # in Mpc (km/s)^2 / (10^10 Msun)
vmaxs = {
"Mvir": 52,
"Vmax": 93,
"cNFW": 31
}
vmaxs = {"Mvir": 52, "Vmax": 93, "cNFW": 31}
units = {
"distance": "Mpc",
"Mvir": r"10^{10} \textrm{M}_\odot",
"Vmax": r"\textrm{km} \textrm{s}^{-1}" # TODO
"Mvir": r"10^{10} \textrm{ M}_\odot",
"Vmax": r"\textrm{km } \textrm{s}^{-1}", # TODO
}
def concentration(row, halo_type: str) -> bool:
r_200crit = row[f'{halo_type}_R_200crit']
r_200crit = row[f"{halo_type}_R_200crit"]
if r_200crit <= 0:
cnfw = -1
colour = 'orange'
colour = "orange"
return False
# return cnfw, colour
r_size = row[f'{halo_type}_R_size'] # largest difference from center of mass to any halo particle
m_200crit = row[f'{halo_type}_Mass_200crit']
vmax = row[f'{halo_type}_Vmax'] # largest velocity coming from enclosed mass profile calculation
rmax = row[f'{halo_type}_Rmax']
npart = row[f'{halo_type}_npart']
r_size = row[
f"{halo_type}_R_size"
] # largest difference from center of mass to any halo particle
m_200crit = row[f"{halo_type}_Mass_200crit"]
vmax = row[
f"{halo_type}_Vmax"
] # largest velocity coming from enclosed mass profile calculation
rmax = row[f"{halo_type}_Rmax"]
npart = row[f"{halo_type}_npart"]
VmaxVvir2 = vmax ** 2 * r_200crit / (G * m_200crit)
if VmaxVvir2 <= 1.05:
if m_200crit == 0:
@ -59,7 +59,7 @@ def concentration(row, halo_type: str) -> bool:
# colour = 'white'
else:
if npart >= 100: # only calculate cnfw for groups with more than 100 particles
cnfw = row[f'{halo_type}_cNFW']
cnfw = row[f"{halo_type}_cNFW"]
return True
# colour = 'black'
else:
@ -91,12 +91,12 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
max_x = max([max(df[x_col]), max(df[y_col])])
num_bins = 100
bins = np.geomspace(min_x, max_x, num_bins)
if property == 'cNFW':
if property == "cNFW":
rows = []
for i, row in df.iterrows():
comp_cnfw_normal = concentration(row, halo_type="comp")
ref_cnfw_normal = concentration(row, halo_type='ref')
ref_cnfw_normal = concentration(row, halo_type="ref")
cnfw_normal = comp_cnfw_normal and ref_cnfw_normal
if cnfw_normal:
rows.append(row)
@ -118,13 +118,10 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
stds.append(std)
means = np.array(means)
stds = np.array(stds)
args = {
"color": "C2",
"zorder": 10
}
ax.fill_between(bins, means - stds, means + stds, alpha=.2, **args)
ax.plot(bins, means + stds, alpha=.5, **args)
ax.plot(bins, means - stds, alpha=.5, **args)
args = {"color": "C2", "zorder": 10}
ax.fill_between(bins, means - stds, means + stds, alpha=0.2, **args)
ax.plot(bins, means + stds, alpha=0.5, **args)
ax.plot(bins, means - stds, alpha=0.5, **args)
# ax_scatter.plot(bins, stds, label=f"{file.stem}")
if property in vmaxs:
@ -133,8 +130,13 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
vmax = None
print("WARNING: vmax not set")
image: QuadMesh
_, _, _, image = ax.hist2d(df[x_col], df[y_col] / df[x_col], bins=(bins, np.linspace(0, 2, num_bins)),
norm=LogNorm(vmax=vmax), rasterized=True)
_, _, _, image = ax.hist2d(
df[x_col],
df[y_col] / df[x_col],
bins=(bins, np.linspace(0, 2, num_bins)),
norm=LogNorm(vmax=vmax),
rasterized=True,
)
# ax.plot([rep_x_left, rep_x_left], [mean - std, mean + std], c="C1")
# ax.annotate(
# text=f"std={std:.2f}", xy=(rep_x_left, mean + std),
@ -148,7 +150,9 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
# ax.set_yscale("log")
ax.set_xlim(min(df[x_col]), max(df[y_col]))
ax.plot([min(df[x_col]), max(df[y_col])], [1, 1], linewidth=1, color="C1", zorder=10)
ax.plot(
[min(df[x_col]), max(df[y_col])], [1, 1], linewidth=1, color="C1", zorder=10
)
return x_col, y_col
# ax.set_title(file.name)
@ -193,7 +197,9 @@ def plot_comparison_hist(ax: Axes, file: Path, property: str, m_min=None, m_max=
ax.plot(bin_centers, hist_val, label=label)
else:
patches: List[Polygon]
hist_val, bin_edges, patches = ax.hist(df[property], bins=bins, histtype=histtype, label=label, density=density)
hist_val, bin_edges, patches = ax.hist(
df[property], bins=bins, histtype=histtype, label=label, density=density
)
comparisons_dir = base_dir / "comparisons"
@ -206,8 +212,10 @@ def compare_property(property, show: bool):
is_hist_property = property in hist_properties
fig: Figure
fig, axes = plt.subplots(
len(waveforms), len(comparisons),
sharey="all", sharex="all",
len(waveforms),
len(comparisons),
sharey="all",
sharex="all",
figsize=figsize_from_page_fraction(columns=2),
)
for i, waveform in enumerate(waveforms):
@ -227,24 +235,82 @@ def compare_property(property, show: bool):
}
x_col, y_col = plot_comparison_hist2d(ax, file, property)
lab_a, lab_b = x_labels[property]
unit = f"[{units[property]}]" if property in units and units[property] else ""
unit = (
f"[{units[property]}]"
if property in units and units[property]
else ""
)
if is_bottom_row:
if lab_b:
ax.set_xlabel(tex_fmt(r"$AA_{\textrm{BB},CC} DD$", lab_a, lab_b, ref_res, unit))
ax.set_xlabel(
tex_fmt(
r"$AA_{\textrm{BB},\textrm{ CC}} \textrm{ } DD$",
lab_a,
lab_b,
ref_res,
unit,
)
)
# fig.supxlabel(tex_fmt(r"$AA_{\textrm{BB},\textrm{ } CC} \textrm{ } DD$", lab_a, lab_b, ref_res, unit), fontsize='medium')
else:
ax.set_xlabel(tex_fmt(r"$AA_{BB} CC$", lab_a, ref_res, unit))
ax.set_xlabel(
tex_fmt(
r"$AA_{\textrm{BB}} \textrm{ } CC$",
lab_a,
ref_res,
unit,
)
)
# fig.supxlabel(tex_fmt(r"$AA_{BB} \textrm{ } CC$", lab_a, ref_res, unit), fontsize='medium')
if is_left_col:
if lab_b:
ax.set_ylabel(
tex_fmt(r"$AA_{\textrm{BB},\textrm{comp}} / AA_{\textrm{BB},\textrm{CC}}$",
lab_a, lab_b, ref_res))
# ax.set_ylabel(
# tex_fmt(r"$AA_{\textrm{BB},\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{CC}}$",
# lab_a, lab_b, ref_res))
# fig.text(0.015, 0.5, tex_fmt(r"$AA_{\textrm{BB},\textrm{ comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{ CC}}$", lab_a, lab_b, ref_res), va='center', rotation='vertical', size='medium')
fig.supylabel(
tex_fmt(
r"$AA_{\textrm{BB},\textrm{ comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{ CC}}$",
lab_a,
lab_b,
ref_res,
),
fontsize="medium",
fontvariant="small-caps",
)
else:
ax.set_ylabel(
tex_fmt(r"$AA_{\textrm{comp}} / AA_{\textrm{BB}}$",
lab_a, ref_res))
# ax.set_ylabel(
# tex_fmt(r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$",
# lab_a, ref_res))
# fig.text(0.015, 0.5, tex_fmt(r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$", lab_a, ref_res), va='center', rotation='vertical', size='medium')
fig.supylabel(
tex_fmt(
r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$",
lab_a,
ref_res,
),
fontsize="medium",
)
# ax.set_ylabel(f"{property}_{{comp}}/{property}_{ref_res}")
ax.text(
0.975,
0.9,
f"comp = {comp_res}",
horizontalalignment="right",
verticalalignment="top",
transform=ax.transAxes,
)
else:
if property == "match":
if not (is_bottom_row and is_left_col):
ax.text(
0.05,
0.9,
f"comp = {comp_res}",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
# mass_bins = np.geomspace(10, 30000, num_mass_bins)
plot_comparison_hist(ax, file, property)
@ -257,18 +323,25 @@ def compare_property(property, show: bool):
ax.legend()
else:
ax.text(
0.05,
0.9,
f"comp = {comp_res}",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
plot_comparison_hist(ax, file, property)
x_labels = {
"match": "$J$",
"distance": "$D$ [$R_{vir}$]"
}
x_labels = {"match": "$J$", "distance": "$D$ [$R_\mathrm{{vir}}$]"}
if is_bottom_row:
ax.set_xlabel(x_labels[property])
if is_left_col:
if property == "match":
ax.set_ylabel(r"$p(J)$")
# ax.set_ylabel(r"$p(J)$")
fig.supylabel(r"$p(J)$", fontsize="medium")
else:
ax.set_ylabel(r"\# Halos")
# ax.set_ylabel(r"\# Halos")
fig.supylabel(r"\# Halos", fontsize="medium")
if property == "distance":
ax.set_xscale("log")
ax.set_yscale("log")
@ -278,11 +351,7 @@ def compare_property(property, show: bool):
last_ytick: YTick = ax.yaxis.get_major_ticks()[-1]
last_ytick.set_visible(False)
if property == "Mvir" and is_top_row:
particle_masses = {
256: 0.23524624,
512: 0.02940578,
1024: 0.0036757225
}
particle_masses = {256: 0.23524624, 512: 0.02940578, 1024: 0.0036757225}
partmass = particle_masses[ref_res]
def mass2partnum(mass: float) -> float:
@ -291,10 +360,12 @@ def compare_property(property, show: bool):
def partnum2mass(partnum: float) -> float:
return partnum * partmass
sec_ax = ax.secondary_xaxis("top", functions=(mass2partnum, partnum2mass))
sec_ax.set_xlabel(r"[\# \textrm{particles}]")
sec_ax = ax.secondary_xaxis(
"top", functions=(mass2partnum, partnum2mass)
)
sec_ax.set_xlabel(r"\textrm{Halo Size }[\# \textrm{particles}]")
rowcolumn_labels(axes, comparisons, isrow=False)
# rowcolumn_labels(axes, comparisons, isrow=False)
rowcolumn_labels(axes, waveforms, isrow=True)
fig.tight_layout()
fig.subplots_adjust(hspace=0)
@ -315,5 +386,5 @@ def main():
compare_property(property, show=len(argv) == 2)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -17,20 +17,22 @@ const_boltzmann_k_cgs = 1.380649e-16
def calculate_gas_internal_energy(omegab, hubble_param_, zstart_):
astart_ = 1.0 / (1.0 + zstart_)
if fabs(1.0 - gamma) > 1e-7:
npol = 1.0 / (gamma - 1.)
npol = 1.0 / (gamma - 1.0)
else:
npol = 1.0
unitv = 1e5
adec = 1.0 / (160. * (omegab * hubble_param_ * hubble_param_ / 0.022) ** (2.0 / 5.0))
if (astart_ < adec):
adec = 1.0 / (
160.0 * (omegab * hubble_param_ * hubble_param_ / 0.022) ** (2.0 / 5.0)
)
if astart_ < adec:
Tini = Tcmb0 / astart_
else:
Tini = Tcmb0 / astart_ / astart_ * adec
print("Tini", Tini)
if Tini > 1.e4:
mu = 4.0 / (8. - 5. * YHe)
if Tini > 1.0e4:
mu = 4.0 / (8.0 - 5.0 * YHe)
else:
mu = 4.0 / (1. + 3. * (1. - YHe))
mu = 4.0 / (1.0 + 3.0 * (1.0 - YHe))
print("mu", mu)
ceint_ = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv
print("ceint", ceint_)
@ -50,40 +52,45 @@ def fix_initial_conditions():
zstart = f["Header"].attrs["Redshift"]
boxsize = f["Header"].attrs["BoxSize"]
levelmax = f["Header"].attrs["Music_levelmax"]
internal_energy = calculate_gas_internal_energy(omegab=omegab, hubble_param_=h, zstart_=zstart)
smoothing_length = calculate_smoothing_length(boxsize=boxsize, hubble_param_=h, levelmax=levelmax)
internal_energy = calculate_gas_internal_energy(
omegab=omegab, hubble_param_=h, zstart_=zstart
)
smoothing_length = calculate_smoothing_length(
boxsize=boxsize, hubble_param_=h, levelmax=levelmax
)
# exit()
bary_mass = f["Header"].attrs["MassTable"][0]
bary_count = f["Header"].attrs["NumPart_Total"][0]
print("mass table", f["Header"].attrs["MassTable"])
pt1 = f["PartType0"]
masses_column = pt1.create_dataset(
"Masses",
data=np.full(bary_count, bary_mass),
compression='gzip'
"Masses", data=np.full(bary_count, bary_mass), compression="gzip"
)
smoothing_length_column = pt1.create_dataset(
"SmoothingLength",
data=np.full(bary_count, smoothing_length),
compression='gzip'
compression="gzip",
)
internal_energy_column = pt1.create_dataset(
"InternalEnergy",
data=np.full(bary_count, internal_energy),
compression='gzip'
compression="gzip",
)
hydro_gamma_minus_one = gamma - 1
const_primordial_He_fraction_cgs = 0.248
hydrogen_mass_function = 1 - const_primordial_He_fraction_cgs
mu_neutral = 4. / (1. + 3. * hydrogen_mass_function)
mu_ionised = 4. / (8. - 5. * (1. - hydrogen_mass_function))
T_transition = 1.e4
mu_neutral = 4.0 / (1.0 + 3.0 * hydrogen_mass_function)
mu_ionised = 4.0 / (8.0 - 5.0 * (1.0 - hydrogen_mass_function))
T_transition = 1.0e4
@njit
def calculate_T(u):
T_over_mu = hydro_gamma_minus_one * u * const_proton_mass_cgs / const_boltzmann_k_cgs
T_over_mu = (
hydro_gamma_minus_one * u * const_proton_mass_cgs / const_boltzmann_k_cgs
)
if T_over_mu > (T_transition + 1) / mu_ionised:
return T_over_mu / mu_ionised
elif T_over_mu < (T_transition - 1) / mu_neutral:
@ -109,6 +116,6 @@ def add_temperature_column():
)
if __name__ == '__main__':
if __name__ == "__main__":
# fix_initial_conditions()
add_temperature_column()

View file

@ -5,11 +5,14 @@ print("digraph G {")
with open(argv[1]) as f:
next(f)
for line in f:
if line.startswith("#"): continue
if line.startswith("#"):
continue
cols = line.split()
if len(cols) < 5: continue
if len(cols) < 5:
continue
progenitor = int(cols[1])
descendant = int(cols[3])
if descendant==-1: continue
if descendant == -1:
continue
print(f" {progenitor} -> {descendant};")
print("}")

View file

@ -2,6 +2,7 @@ from math import log
from pathlib import Path
import numpy as np
# from colossus.cosmology import cosmology
# from colossus.lss import mass_function
from matplotlib import pyplot as plt
@ -14,7 +15,7 @@ from utils import print_progress, figsize_from_page_fraction
def counts_without_inf(number_halos):
with np.errstate(divide='ignore', invalid='ignore'):
with np.errstate(divide="ignore", invalid="ignore"):
number_halos_inverse = 1 / np.sqrt(number_halos)
number_halos_inverse[np.abs(number_halos_inverse) == np.inf] = 0
@ -41,20 +42,37 @@ def monofonic_tests():
# halos.to_csv("weird_halos.csv")
halo_masses: np.ndarray = halos["Mvir"].to_numpy()
Ns, deltas, left_edges, number_densities, lower_error_limit, upper_error_limit = halo_mass_function(
halo_masses)
(
Ns,
deltas,
left_edges,
number_densities,
lower_error_limit,
upper_error_limit,
) = halo_mass_function(halo_masses)
ax.set_xscale("log")
ax.set_yscale("log")
# ax.bar(centers, number_densities, width=widths, log=True, fill=False)
name = f"{waveform} {resolution}"
ax.step(left_edges, number_densities, where="post", color=f"C{i}", linestyle=linestyles[j], label=name)
ax.step(
left_edges,
number_densities,
where="post",
color=f"C{i}",
linestyle=linestyles[j],
label=name,
)
ax.fill_between(
left_edges,
lower_error_limit,
upper_error_limit, alpha=.5, linewidth=0, step='post')
upper_error_limit,
alpha=0.5,
linewidth=0,
step="post",
)
# break
# break
@ -73,7 +91,7 @@ def halo_mass_function(halo_masses, num_bins=30, sim_volume=100 ** 3):
Ns = []
deltas = []
for bin_id in range(num_bins):
print_progress(bin_id+1, num_bins)
print_progress(bin_id + 1, num_bins)
mass_low = bins[bin_id]
mass_high = bins[bin_id + 1]
counter = 0
@ -102,7 +120,14 @@ def halo_mass_function(halo_masses, num_bins=30, sim_volume=100 ** 3):
lower_error_limit = number_densities - counts_without_inf(Ns) / sim_volume / deltas
upper_error_limit = number_densities + counts_without_inf(Ns) / sim_volume / deltas
return Ns, deltas, left_edges, number_densities, lower_error_limit, upper_error_limit
return (
Ns,
deltas,
left_edges,
number_densities,
lower_error_limit,
upper_error_limit,
)
def hmf_from_rockstar_tree(file: Path):
@ -118,11 +143,14 @@ def hmf_from_rockstar_tree(file: Path):
# agora_box_h = 0.702
# masses /= agora_box_h
box_size = 85.47
Ns, deltas, left_edges, number_densities, lower_error_limit, upper_error_limit = halo_mass_function(
masses,
num_bins=50,
sim_volume=box_size ** 3
)
(
Ns,
deltas,
left_edges,
number_densities,
lower_error_limit,
upper_error_limit,
) = halo_mass_function(masses, num_bins=50, sim_volume=box_size ** 3)
fig: Figure = plt.figure()
ax: Axes = fig.gca()
@ -131,29 +159,35 @@ def hmf_from_rockstar_tree(file: Path):
ax.set_xlabel("Halo Mass [$M_\\odot$]")
ax.set_ylabel("Number Density [$\\textrm{\\#}/Mpc^3/dlogM$]")
ax.step(left_edges, number_densities, where="post")
plank_cosmo = cosmology.cosmologies['planck18']
plank_cosmo = cosmology.cosmologies["planck18"]
auriga_cosmo = {
"sigma8": 0.807,
"H0": 70.2,
"Om0": 0.272,
"Ob0": 0.0455,
"ns": 0.961
"ns": 0.961,
}
cosmology.addCosmology('aurigaCosmo', params={**plank_cosmo, **auriga_cosmo})
cosmology.setCosmology('aurigaCosmo')
cosmology.addCosmology("aurigaCosmo", params={**plank_cosmo, **auriga_cosmo})
cosmology.setCosmology("aurigaCosmo")
print(cosmology.getCurrent())
mfunc = mass_function.massFunction(left_edges, 1, mdef='vir', model='tinker08', q_out='dndlnM')
mfunc = mass_function.massFunction(
left_edges, 1, mdef="vir", model="tinker08", q_out="dndlnM"
)
ax.plot(left_edges, mfunc)
ax.fill_between(
left_edges,
lower_error_limit,
upper_error_limit, alpha=.5, linewidth=0, step='post')
upper_error_limit,
alpha=0.5,
linewidth=0,
step="post",
)
plt.show()
if __name__ == '__main__':
if __name__ == "__main__":
monofonic_tests()
# hmf_from_rockstar_tree(Path(argv[1]))

View file

@ -15,8 +15,15 @@ def V(r):
return 4 / 3 * np.pi * r ** 3
def halo_mass_profile(particles: pd.DataFrame, center: np.ndarray,
particles_meta: ParticlesMeta, vmin: float, vmax: float, plot=False, num_bins=30):
def halo_mass_profile(
particles: pd.DataFrame,
center: np.ndarray,
particles_meta: ParticlesMeta,
vmin: float,
vmax: float,
plot=False,
num_bins=30,
):
center = find_center(particles, center)
positions = particles[["X", "Y", "Z"]].to_numpy()
distances = np.linalg.norm(positions - center, axis=1)
@ -50,7 +57,7 @@ def halo_mass_profile(particles: pd.DataFrame, center: np.ndarray,
ax.loglog(log_radial_bins[:-1], bin_masses, label="counts")
ax2.loglog(log_radial_bins[:-1], bin_densities, label="densities", c="C1")
# ax.set_xlabel(r'R / R$_\mathrm{group}$')
ax.set_ylabel(r'M [$10^{10} \mathrm{M}_\odot$]')
ax.set_ylabel(r"M [$10^{10} \mathrm{M}_\odot$]")
ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]")
plt.legend()
plt.show()
@ -58,7 +65,7 @@ def halo_mass_profile(particles: pd.DataFrame, center: np.ndarray,
return log_radial_bins, bin_masses, bin_densities, center
if __name__ == '__main__':
if __name__ == "__main__":
input_file = Path(sys.argv[1])
df, particles_meta = read_file(input_file)
df_halos = read_halo_file(input_file.with_name("fof_" + input_file.name))

View file

@ -27,10 +27,8 @@ def in_area(coords: Coords, xobj, yobj, zobj, factor=1.3) -> bool:
radius *= factor
return (
(xcenter - radius < xobj < xcenter + radius)
and
(ycenter - radius < yobj < ycenter + radius)
and
(zcenter - radius < zobj < zcenter + radius)
and (ycenter - radius < yobj < ycenter + radius)
and (zcenter - radius < zobj < zcenter + radius)
)
@ -40,8 +38,12 @@ def main():
initial_halo_id = int(argv[1])
if has_1024_simulations:
resolutions.append(1024)
fig: Figure = plt.figure(figsize=figsize_from_page_fraction(columns=2, height_to_width=1))
axes: List[List[Axes]] = fig.subplots(len(waveforms), len(resolutions), sharex="row", sharey="row")
fig: Figure = plt.figure(
figsize=figsize_from_page_fraction(columns=2, height_to_width=1)
)
axes: List[List[Axes]] = fig.subplots(
len(waveforms), len(resolutions), sharex="row", sharey="row"
)
with h5py.File(vis_datafile) as vis_out:
halo_group = vis_out[str(initial_halo_id)]
@ -62,8 +64,13 @@ def main():
vmax_scaled = (vmax + offset) * mass
rho = (rho + offset) * mass
extent = coord_to_2d_extent(coords)
img = ax.imshow(rho.T, norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled), extent=extent,
origin="lower",cmap="Greys")
img = ax.imshow(
rho.T,
norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled),
extent=extent,
origin="lower",
cmap="Greys",
)
found_main_halo = False
for halo_id, halo in halos.iterrows():
if halo["Vmax"] > 75:
@ -79,8 +86,12 @@ def main():
print("plotting main halo")
circle = Circle(
(halo.X, halo.Y),
halo["Rvir"], zorder=10,
linewidth=1, edgecolor=color, fill=None, alpha=.2
halo["Rvir"],
zorder=10,
linewidth=1,
edgecolor=color,
fill=None,
alpha=0.2,
)
ax.add_artist(circle)
# assert found_main_halo
@ -92,9 +103,11 @@ def main():
if j == 0:
scalebar = AnchoredSizeBar(
ax.transData,
1, '1 Mpc', 'lower left',
1,
"1 Mpc",
"lower left",
# pad=0.1,
color='white',
color="white",
frameon=False,
# size_vertical=1
)
@ -116,5 +129,5 @@ def main():
plt.show()
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -20,7 +20,9 @@ Coords = Tuple[float, float, float, float] # radius, X, Y, Z
def load_halo_data(waveform: str, resolution: int, halo_id: int, coords: Coords):
dir = base_dir / f"{waveform}_{resolution}_100"
df, meta = read_file(dir / "output_0004.hdf5")
df_halo, halo_lookup, unbound = read_velo_halo_particles(dir, skip_halo_particle_ids=all_in_area)
df_halo, halo_lookup, unbound = read_velo_halo_particles(
dir, skip_halo_particle_ids=all_in_area
)
halo = df_halo.loc[halo_id]
if coords:
@ -43,12 +45,29 @@ def load_halo_data(waveform: str, resolution: int, halo_id: int, coords: Coords)
return halo, halo_particles, meta, coords
def get_comp_id(ref_waveform: str, reference_resolution: int, comp_waveform: str, comp_resolution: int):
def get_comp_id(
ref_waveform: str,
reference_resolution: int,
comp_waveform: str,
comp_resolution: int,
):
return f"{ref_waveform}_{reference_resolution}_100_{comp_waveform}_{comp_resolution}_100_velo.csv"
def map_halo_id(halo_id: int, ref_waveform: str, reference_resolution: int, comp_waveform: str, comp_resolution: int):
file = base_dir / "comparisons" / get_comp_id(ref_waveform, reference_resolution, comp_waveform, comp_resolution)
def map_halo_id(
halo_id: int,
ref_waveform: str,
reference_resolution: int,
comp_waveform: str,
comp_resolution: int,
):
file = (
base_dir
/ "comparisons"
/ get_comp_id(
ref_waveform, reference_resolution, comp_waveform, comp_resolution
)
)
print("opening", file)
df = pd.read_csv(file)
mapping = {}
@ -93,9 +112,16 @@ def main():
halo_id = initial_halo_id
first_halo = False
else:
halo_id = map_halo_id(initial_halo_id, ref_waveform, ref_resolution, waveform, resolution)
halo, halo_particles, meta, image_coords = load_halo_data(waveform, resolution, halo_id,
coords[waveform])
halo_id = map_halo_id(
initial_halo_id,
ref_waveform,
ref_resolution,
waveform,
resolution,
)
halo, halo_particles, meta, image_coords = load_halo_data(
waveform, resolution, halo_id, coords[waveform]
)
if not coords[waveform]:
coords[waveform] = image_coords
print(coords[waveform])
@ -104,19 +130,30 @@ def main():
# sleep(100)
radius, X, Y, Z = coords[waveform]
rho, _ = cic_from_radius(
halo_particles.X.to_numpy(), halo_particles.Y.to_numpy(),
1000, X, Y, radius, periodic=False)
halo_particles.X.to_numpy(),
halo_particles.Y.to_numpy(),
1000,
X,
Y,
radius,
periodic=False,
)
rhos[(waveform, resolution)] = rho
vmin = min(rho.min(), vmin)
vmax = max(rho.max(), vmax)
dataset_group = halo_group.create_group(f"{waveform}_{resolution}")
dataset_group.create_dataset("rho", data=rho, compression='gzip', compression_opts=5)
dataset_group.create_dataset(
"rho", data=rho, compression="gzip", compression_opts=5
)
dataset_group.create_dataset("coords", data=coords[waveform])
dataset_group.create_dataset("mass", data=meta.particle_mass)
dataset_group.create_dataset("halo_id", data=halo_id)
imsave(rho, f"out_halo{initial_halo_id}_{waveform}_{resolution}_{halo_id}.png")
imsave(
rho,
f"out_halo{initial_halo_id}_{waveform}_{resolution}_{halo_id}.png",
)
halo_group.create_dataset("vmin_vmax", data=[vmin, vmax])
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -44,10 +44,12 @@ def main():
else:
all_data = np.vstack(column_data[column])
out_column = outpart.create_dataset(column, data=all_data, compression='gzip' if column == "Masses" else None)
out_column = outpart.create_dataset(
column, data=all_data, compression="gzip" if column == "Masses" else None
)
print(len(out_column))
f_out.close()
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -14,7 +14,9 @@ comparisons_dir = base_dir / "comparisons"
def read(mode, ref_res, comp_res):
df = pd.read_csv(comparisons_dir / get_comp_id(waveform, ref_res, waveform, comp_res))
df = pd.read_csv(
comparisons_dir / get_comp_id(waveform, ref_res, waveform, comp_res)
)
# df = pd.read_csv(f"{mode}_{ref_res}_100_{mode}_{comp_res}_100.csv")
print(min(df.ref_Mvir), max(df.ref_Mvir))
@ -23,7 +25,9 @@ def read(mode, ref_res, comp_res):
for i in range(num_bins):
values = np.where(digits == i + 1)
in_bin = df.iloc[values]
matches = np.array(in_bin.match) # TODO: or instead fraction of halos that are matching?
matches = np.array(
in_bin.match
) # TODO: or instead fraction of halos that are matching?
bin_means.append(matches.mean())
return bin_means
@ -48,7 +52,9 @@ ax.set_yticklabels(["{:.2f}".format(a) for a in bins])
for x in range(data.shape[0]):
for y in range(data.shape[1]):
text = ax.text(y, x, "{:.2f}".format(data[x, y]), ha="center", va="center", color="w")
text = ax.text(
y, x, "{:.2f}".format(data[x, y]), ha="center", va="center", color="w"
)
# print(data)
p = ax.imshow(data, origin="lower", vmin=0.5, vmax=1)

10
nfw.py
View file

@ -9,8 +9,12 @@ def nfw(r, rho_0, r_s):
def fit_nfw(radius, densities):
popt, pcov = curve_fit(
nfw, radius, densities,
verbose=1, method="trf", max_nfev=1000,
bounds=([0, 0], [inf, 1])
nfw,
radius,
densities,
verbose=1,
method="trf",
max_nfev=1000,
bounds=([0, 0], [inf, 1]),
)
return popt

View file

@ -26,5 +26,5 @@ for i, profile in enumerate(density_profiles):
sleep(1)
color = "red" if is_odd_halo else "lightgray"
plt.loglog(bin_edges, profile, color=color, alpha=.1)
plt.loglog(bin_edges, profile, color=color, alpha=0.1)
plt.show()

View file

@ -28,7 +28,9 @@ def all_children(df: DataFrame, id: int):
yield from all_children(df, sh)
def particles_in_halo(offsets: Dict[int, int], particle_ids: np.ndarray) -> HaloParticleMapping:
def particles_in_halo(
offsets: Dict[int, int], particle_ids: np.ndarray
) -> HaloParticleMapping:
"""
get mapping from halo ID to particle ID set by using the offset and a lookup in the particle array
"""
@ -80,7 +82,12 @@ def cached_particles_in_halo(file: Path, *args, **kwargs) -> HaloParticleMapping
print("saving to cache")
with h5py.File(file, "w") as data_file:
for key, valueset in halo_particle_ids.items():
data_file.create_dataset(str(key), data=list(valueset), compression='gzip', compression_opts=5)
data_file.create_dataset(
str(key),
data=list(valueset),
compression="gzip",
compression_opts=5,
)
return halo_particle_ids
@ -116,7 +123,9 @@ def read_velo_halos(directory: Path, veloname="vroutput"):
"offset_unbound": group_catalog["Offset_unbound"],
"parent_halo_id": group_catalog["Parent_halo_ID"],
}
df = pd.DataFrame({**data, **scalar_properties}) # create dataframe from two merged dicts
df = pd.DataFrame(
{**data, **scalar_properties}
) # create dataframe from two merged dicts
df.index += 1 # Halo IDs start at 1
return df
@ -153,7 +162,9 @@ def read_velo_halo_particles(
else:
print("look up unbound particle IDs")
halo_unbound_offsets = dict(df["offset_unbound"])
halo_particle_unbound_ids = particles_in_halo(halo_unbound_offsets, particle_ids_unbound)
halo_particle_unbound_ids = particles_in_halo(
halo_unbound_offsets, particle_ids_unbound
)
return df, halo_particle_ids, halo_particle_unbound_ids
@ -179,7 +190,9 @@ def main():
Nres = 512
directory = base_dir / f"{waveform}_{Nres}_100"
df_halo, halo_particle_ids, halo_particle_unbound_ids = read_velo_halo_particles(directory)
df_halo, halo_particle_ids, halo_particle_unbound_ids = read_velo_halo_particles(
directory
)
particles, meta = read_file(directory)
HALO = 1000
while True:
@ -196,5 +209,5 @@ def main():
HALO += 1
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -24,12 +24,18 @@ def read_file(file: Path) -> Tuple[pd.DataFrame, ParticlesMeta]:
masses = reference_file["PartType1"]["Masses"]
if not np.all(masses == masses[0]):
raise ValueError("only equal mass particles are supported for now")
df = pd.DataFrame(reference_file["PartType1"]["Coordinates"], columns=["X", "Y", "Z"])
df = pd.DataFrame(
reference_file["PartType1"]["Coordinates"], columns=["X", "Y", "Z"]
)
if has_fof:
df2 = pd.DataFrame(reference_file["PartType1"]["FOFGroupIDs"], columns=["FOFGroupIDs"]).astype("category")
df2 = pd.DataFrame(
reference_file["PartType1"]["FOFGroupIDs"], columns=["FOFGroupIDs"]
).astype("category")
df = df.merge(df2, "outer", left_index=True, right_index=True)
del df2
df3 = pd.DataFrame(reference_file["PartType1"]["ParticleIDs"], columns=["ParticleIDs"])
df3 = pd.DataFrame(
reference_file["PartType1"]["ParticleIDs"], columns=["ParticleIDs"]
)
df = df.merge(df3, "outer", left_index=True, right_index=True)
del df3
@ -37,9 +43,7 @@ def read_file(file: Path) -> Tuple[pd.DataFrame, ParticlesMeta]:
if has_fof:
print("sorting")
df.sort_values("FOFGroupIDs", inplace=True)
meta = ParticlesMeta(
particle_mass=masses[0]
)
meta = ParticlesMeta(particle_mass=masses[0])
print("saving cache")
with meta_cache_file.open("wb") as f:
pickle.dump(meta, f)

View file

@ -25,7 +25,9 @@ class IDScaler:
mult = orig * self.N
for shift in self.shifts:
variant = mult + shift
yield ((variant[0] * self.Nres_max) + variant[1]) * self.Nres_max + variant[2]
yield ((variant[0] * self.Nres_max) + variant[1]) * self.Nres_max + variant[
2
]
def downscale(self, particle_ID: int):
orig = self.original_position(self.Nres_max, particle_ID)
@ -40,7 +42,9 @@ def test():
Nres_1 = 128
Nres_2 = 256
test_particle_id = ((test_particle[0] * Nres_1) + test_particle[1]) * Nres_1 + test_particle[2]
test_particle_id = (
(test_particle[0] * Nres_1) + test_particle[1]
) * Nres_1 + test_particle[2]
print(test_particle_id)
scaler = IDScaler(Nres_1, Nres_2)
@ -61,7 +65,9 @@ def benchmark():
test_particle = np.random.randint(0, 127, size=(3, 10_000_000))
for part in test_particle:
test_particle_id = ((test_particle[0] * Nres_1) + test_particle[1]) * Nres_1 + test_particle[2]
test_particle_id = (
(test_particle[0] * Nres_1) + test_particle[1]
) * Nres_1 + test_particle[2]
particle_ID_1_converted = scaler.upscale(test_particle_id)

View file

@ -10,7 +10,9 @@ from scipy.interpolate import griddata
from utils import create_figure
def create_2d_slice(input_file: Path, center: List[float], property: str, axis="Z", thickness=3):
def create_2d_slice(
input_file: Path, center: List[float], property: str, axis="Z", thickness=3
):
axis_names = ["X", "Y", "Z"]
cut_axis = axis_names.index(axis)
with h5py.File(input_file) as f:
@ -24,11 +26,7 @@ def create_2d_slice(input_file: Path, center: List[float], property: str, axis="
# coords_in_slice = coords[in_slice]
# data_in_slice = data[in_slice]
print("stats")
other_axis = {
"X": ("Y", "Z"),
"Y": ("X", "Z"),
"Z": ("X", "Y")
}
other_axis = {"X": ("Y", "Z"), "Y": ("X", "Z"), "Z": ("X", "Y")}
x_axis_label, y_axis_label = other_axis[axis]
x_axis = axis_names.index(x_axis_label)
y_axis = axis_names.index(y_axis_label)
@ -36,12 +34,7 @@ def create_2d_slice(input_file: Path, center: List[float], property: str, axis="
yrange = np.linspace(coords[::, y_axis].min(), coords[::, y_axis].max(), 1000)
gx, gy, gz = np.meshgrid(xrange, yrange, center[cut_axis])
print("interpolating")
grid = griddata(
coords,
data,
(gx, gy, gz),
method="linear"
)[::, ::, 0]
grid = griddata(coords, data, (gx, gy, gz), method="linear")[::, ::, 0]
print(grid.shape)
# stats, x_edge, y_edge, _ = binned_statistic_2d(
# coords_in_slice[::, x_axis],
@ -56,8 +49,8 @@ def create_2d_slice(input_file: Path, center: List[float], property: str, axis="
img = ax.imshow(
grid.T,
norm=LogNorm(),
interpolation='nearest',
extent=[xrange[0], xrange[-1], yrange[0], yrange[-1]]
interpolation="nearest",
extent=[xrange[0], xrange[-1], yrange[0], yrange[-1]],
)
ax.set_title(input_file.parent.stem)
ax.set_xlabel(x_axis_label)

View file

@ -11,84 +11,108 @@ from paths import base_dir, spectra_dir
from spectra_plot import waveforms
def run_spectra(waveform: str, resolution_1: int, resolution_2: int, Lbox: int, time: str):
def run_spectra(
waveform: str, resolution_1: int, resolution_2: int, Lbox: int, time: str
):
print("starting")
setup_1 = f'{waveform}_{resolution_1}_{Lbox}'
setup_2 = f'{waveform}_{resolution_2}_{Lbox}'
setup_1 = f"{waveform}_{resolution_1}_{Lbox}"
setup_2 = f"{waveform}_{resolution_2}_{Lbox}"
# #For ICs: time == 'ics'
if time == 'ics':
output_file = base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}_cross_spectrum.txt'
if time == "ics":
output_file = (
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}_cross_spectrum.txt"
)
if output_file.exists():
print(f'{output_file} already exists, skipping.')
print(f"{output_file} already exists, skipping.")
return
subprocess.run([
subprocess.run(
[
str(spectra),
'--ngrid',
'2048',
'--format=4', # This seems to work, but is not as readable
'--output',
str(base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}'),
'--input',
str(base_dir / f'{setup_1}/ics_{setup_1}.hdf5'),
'--input',
str(base_dir / f'{setup_2}/ics_{setup_2}.hdf5')
], check=True)
"--ngrid",
"2048",
"--format=4", # This seems to work, but is not as readable
"--output",
str(
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}"
),
"--input",
str(base_dir / f"{setup_1}/ics_{setup_1}.hdf5"),
"--input",
str(base_dir / f"{setup_2}/ics_{setup_2}.hdf5"),
],
check=True,
)
# #For evaluation of results at redshift z=1: time == 'z=1' | NOT ADAPTED FOR VSC5 YET!
elif time == 'z=1':
output_file = base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}_cross_spectrum.txt'
elif time == "z=1":
output_file = (
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}_cross_spectrum.txt"
)
if output_file.exists():
print(f'{output_file} already exists, skipping.')
print(f"{output_file} already exists, skipping.")
return
subprocess.run([
subprocess.run(
[
str(spectra),
'--ngrid',
'1024',
'--format=3',
'--output',
str(base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}'),
'--input',
str(base_dir / f'{setup_1}/output_0002.hdf5'),
'--input',
str(base_dir / f'{setup_2}/output_0002.hdf5')
], check=True)
"--ngrid",
"1024",
"--format=3",
"--output",
str(
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}"
),
"--input",
str(base_dir / f"{setup_1}/output_0002.hdf5"),
"--input",
str(base_dir / f"{setup_2}/output_0002.hdf5"),
],
check=True,
)
# #For evaluation of final results: time == 'end'
elif time == 'end':
output_file = base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}_cross_spectrum.txt'
elif time == "end":
output_file = (
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}_cross_spectrum.txt"
)
if output_file.exists():
print(f'{output_file} already exists, skipping.')
print(f"{output_file} already exists, skipping.")
return
subprocess.run([
subprocess.run(
[
str(spectra),
'--ngrid',
'2048',
'--format=3',
'--output',
str(base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}'),
'--input',
str(base_dir / f'{setup_1}/output_0004.hdf5'),
'--input',
str(base_dir / f'{setup_2}/output_0004.hdf5')
], check=True)
"--ngrid",
"2048",
"--format=3",
"--output",
str(
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}"
),
"--input",
str(base_dir / f"{setup_1}/output_0004.hdf5"),
"--input",
str(base_dir / f"{setup_2}/output_0004.hdf5"),
],
check=True,
)
else:
raise ValueError(f"invalid time ({time})")
print("end")
def power_run(resolutions: list, Lbox: int, time: str):
args = []
for waveform in waveforms:
for resolution in resolutions:
args.append((
waveform,
resolution,
resolution,
Lbox,
time
))
args.append((waveform, resolution, resolution, Lbox, time))
return args
@ -96,28 +120,22 @@ def cross_run(resolutions: list, Lbox: int, time: str):
args = []
for waveform in waveforms:
for res1, res2 in itertools.combinations(resolutions, 2):
args.append((
waveform,
res1,
res2,
Lbox,
time
))
args.append((waveform, res1, res2, Lbox, time))
return args
if __name__ == '__main__':
# input("are you sure you want to run this? This might need a large amount of memory")
if __name__ == "__main__":
# input("are you sure you want to run this? This might need a large amount of memory")
Lbox = 100
resolutions = [128, 256, 512, 1024]
spectra = spectra_dir / 'spectra'
spectra = spectra_dir / "spectra"
time = argv[1]
if argv[2] == 'power':
if argv[2] == "power":
args = power_run(resolutions=resolutions, Lbox=Lbox, time=time)
elif argv[2] == 'cross':
elif argv[2] == "cross":
args = cross_run(resolutions=resolutions, Lbox=Lbox, time=time)
else:
raise ValueError("missing argv[2] (power|cross)")

View file

@ -82,7 +82,10 @@ def create_plot(mode):
fig: Figure
combination_list = list(itertools.combinations(resolutions, 2))
fig, axes = plt.subplots(
len(waveforms), 3, sharex=True, sharey=True,
len(waveforms),
3,
sharex=True,
sharey=True,
figsize=figsize_from_page_fraction(columns=2),
)
crossings = np.zeros((len(waveforms), len(combination_list)))
@ -94,7 +97,7 @@ def create_plot(mode):
# TODO: better names
ax_ics: "ics",
ax_z1: "z=1",
ax_end: "z=0"
ax_end: "z=0",
}
bottom_row = i == len(waveforms) - 1
top_row = i == 0
@ -118,7 +121,9 @@ def create_plot(mode):
verticalalignment="top",
transform=ax.transAxes,
)
for j, res in enumerate(resolutions[:-1] if mode == "cross" else resolutions):
for j, res in enumerate(
resolutions[:-1] if mode == "cross" else resolutions
):
ax.axvline(
k0 * res,
color=colors[j],
@ -129,26 +134,34 @@ def create_plot(mode):
# ax.set_yticklabels([])
if mode == "power":
ax_ics.set_ylabel("$\\mathrm{{P}}_\\mathrm{{X}}$ / $\\mathrm{{P}}_{{1024}}$")
ax_ics.set_ylabel(
"$\\mathrm{{P}}_\\mathrm{{X}}$ / $\\mathrm{{P}}_{{1024}}$"
)
for j, resolution in enumerate(resolutions):
ics_data = spectra_data(waveform, resolution, resolution, Lbox, "ics")
ics_k = ics_data["k [Mpc]"]
ics_p1 = ics_data["P1"]
comp_data = spectra_data(waveform, resolutions[-1], resolutions[-1], Lbox, "ics")
comp_data = spectra_data(
waveform, resolutions[-1], resolutions[-1], Lbox, "ics"
)
comp_p1 = comp_data["P1"]
ics_p1 /= comp_p1
end_data = spectra_data(waveform, resolution, resolution, Lbox, "end")
end_k = end_data["k [Mpc]"]
end_p1 = end_data["P1"]
comp_data = spectra_data(waveform, resolutions[-1], resolutions[-1], Lbox, "end")
comp_data = spectra_data(
waveform, resolutions[-1], resolutions[-1], Lbox, "end"
)
comp_p1 = comp_data["P1"]
end_p1 /= comp_p1
z1_data = spectra_data(waveform, resolution, resolution, Lbox, "z=1")
z1_k = z1_data["k [Mpc]"]
z1_p1 = z1_data["P1"]
comp_data = spectra_data(waveform, resolutions[-1], resolutions[-1], Lbox, 'z=1')
comp_data = spectra_data(
waveform, resolutions[-1], resolutions[-1], Lbox, "z=1"
)
comp_p1 = comp_data["P1"]
z1_p1 /= comp_p1
@ -158,8 +171,7 @@ def create_plot(mode):
for ax in [ax_ics, ax_z1, ax_end]:
ax.set_ylim(0.9, 1.10)
ax.set_axisbelow(True)
ax.grid(color='black', linestyle=':', linewidth=0.5, alpha=0.5)
ax.grid(color="black", linestyle=":", linewidth=0.5, alpha=0.5)
# fig.suptitle(f"Power Spectra {time}") #Not needed for paper
# fig.tight_layout()
@ -168,39 +180,47 @@ def create_plot(mode):
ax_ics.set_ylabel("C")
# ax_end.set_ylabel("C")
for j, (res1, res2) in enumerate(combination_list):
ics_data = spectra_data(waveform, res1, res2, Lbox, 'ics')
ics_data = spectra_data(waveform, res1, res2, Lbox, "ics")
ics_k = ics_data["k [Mpc]"]
ics_pcross = ics_data["Pcross"]
ax_ics.semilogx(ics_k, ics_pcross, color=colors[j + 3], label=f'{res1} vs {res2}')
ax_ics.semilogx(
ics_k, ics_pcross, color=colors[j + 3], label=f"{res1} vs {res2}"
)
z1_data = spectra_data(waveform, res1, res2, Lbox, 'z=1')
z1_data = spectra_data(waveform, res1, res2, Lbox, "z=1")
z1_k = z1_data["k [Mpc]"]
z1_pcross = z1_data["Pcross"]
ax_z1.semilogx(z1_k, z1_pcross, color=colors[j + 3], label=f'{res1} vs {res2}')
ax_z1.semilogx(
z1_k, z1_pcross, color=colors[j + 3], label=f"{res1} vs {res2}"
)
end_data = spectra_data(waveform, res1, res2, Lbox, 'end')
end_data = spectra_data(waveform, res1, res2, Lbox, "end")
end_k = end_data["k [Mpc]"]
end_pcross = end_data["Pcross"]
ax_end.semilogx(end_k, end_pcross, color=colors[j + 3], label=f'{res1} vs {res2}')
ax_end.semilogx(
end_k, end_pcross, color=colors[j + 3], label=f"{res1} vs {res2}"
)
# #Put this here to enable changing time of crossing measurement more easily
smaller_res = min(res1, res2)
crossing_index = np.searchsorted(end_k.to_list(), k0 * smaller_res) # change here
crossing_index = np.searchsorted(
end_k.to_list(), k0 * smaller_res
) # change here
crossing_value = end_pcross[crossing_index] # and here
crossings[i][j] = crossing_value
for ax in [ax_ics, ax_z1, ax_end]:
ax.set_axisbelow(True)
ax.grid(color='black', linestyle=':', linewidth=0.5, alpha=0.5)
ax.grid(color="black", linestyle=":", linewidth=0.5, alpha=0.5)
ax_end.set_xlim(right=k0 * resolutions[-1])
ax_end.set_ylim(0.8, 1.02)
if bottom_row:
# ax_z1.legend()
ax_ics.legend(loc='lower left')
ax_ics.legend(loc="lower left")
if not bottom_row:
last_xtick: XTick = ax_ics.yaxis.get_major_ticks()[0]
last_xtick.set_visible(False)
@ -212,7 +232,7 @@ def create_plot(mode):
# print(crossings_df.to_markdown())
print(crossings_df.to_latex())
fig.tight_layout()
fig.subplots_adjust(wspace=0,hspace=0)
fig.subplots_adjust(wspace=0, hspace=0)
fig.savefig(Path(f"~/tmp/spectra_{mode}.pdf").expanduser())

View file

@ -42,11 +42,12 @@ def plotdf3d(pl: Plotter, df: DataFrame, color="white"):
glrenderer = vtk.vtkOpenGLRenderer.SafeDownCast(renderer)
glrenderer.SetPass(blur_pass)
def df_to_coords(df: pd.DataFrame):
return df[["X", "Y", "Z"]].to_numpy()
if __name__ == '__main__':
if __name__ == "__main__":
# HALO = 1
# reference_dir = base_dir / "shannon_512_100"
# df, _ = read_file(reference_dir / "output_0004.hdf5")

View file

@ -12,7 +12,11 @@ waveforms = ["DB2", "DB4", "DB8", "shannon"]
def print_progress(i, total, extra_data=""):
print(f"{i} of {total} ({extra_data})" + " " * 20, end="\r" if i != total else "\n", flush=True)
print(
f"{i} of {total} ({extra_data})" + " " * 20,
end="\r" if i != total else "\n",
flush=True,
)
def memory_usage(df: pd.DataFrame):
@ -35,7 +39,7 @@ def read_swift_config(dir: Path):
def print_wall_time(dir: Path):
with(dir / "swift.log").open() as f:
with (dir / "swift.log").open() as f:
last_line = f.readlines()[-1]
print(last_line)
assert "main: done. Bye." in last_line
@ -66,17 +70,24 @@ def rowcolumn_labels(axes, labels, isrow: bool, pad=5) -> None:
xy = (0, 0.5)
xytext = (-ax.yaxis.labelpad - pad, 0)
xycoords = ax.yaxis.label
ha = 'right'
va = 'center'
ha = "right"
va = "center"
else:
xy = (0.5, 1)
xytext = (0, pad)
xycoords = 'axes fraction'
ha = 'center'
va = 'baseline'
ax.annotate(label, xy=xy, xytext=xytext,
xycoords=xycoords, textcoords='offset points',
size='large', ha=ha, va=va)
xycoords = "axes fraction"
ha = "center"
va = "baseline"
ax.annotate(
label,
xy=xy,
xytext=xytext,
xycoords=xycoords,
textcoords="offset points",
size="medium",
ha=ha,
va=va,
)
def tex_fmt(format_str: str, *args) -> str: