1
0
Fork 0
mirror of https://github.com/Findus23/halo_comparison.git synced 2024-09-19 16:03:50 +02:00

Changed layout of comparison figures: introduced sup(x/y)label but kept old structure, changed size of rowcolumn_labels to fit other labels, changed position of comp information

Formatted everything with black
This commit is contained in:
glatterf42 2022-08-10 16:26:30 +02:00
parent 60bd933469
commit 39bb626a42
25 changed files with 947 additions and 427 deletions

View file

@ -33,10 +33,12 @@ from utils import figsize_from_page_fraction
# rc('ytick',direction='in') # rc('ytick',direction='in')
# rc('legend',fontsize='x-large') # rc('legend',fontsize='x-large')
base_shape = {'u': [np.array([0, 1]), np.array([1, 0]), np.array([0, -1])], base_shape = {
'd': [np.array([0, -1]), np.array([-1, 0]), np.array([0, 1])], "u": [np.array([0, 1]), np.array([1, 0]), np.array([0, -1])],
'r': [np.array([1, 0]), np.array([0, 1]), np.array([-1, 0])], "d": [np.array([0, -1]), np.array([-1, 0]), np.array([0, 1])],
'l': [np.array([-1, 0]), np.array([0, -1]), np.array([1, 0])]} "r": [np.array([1, 0]), np.array([0, 1]), np.array([-1, 0])],
"l": [np.array([-1, 0]), np.array([0, -1]), np.array([1, 0])],
}
def hilbert_curve(order, orientation): def hilbert_curve(order, orientation):
@ -44,26 +46,46 @@ def hilbert_curve(order, orientation):
Recursively creates the structure for a hilbert curve of given order Recursively creates the structure for a hilbert curve of given order
""" """
if order > 1: if order > 1:
if orientation == 'u': if orientation == "u":
return hilbert_curve(order - 1, 'r') + [np.array([0, 1])] + \ return (
hilbert_curve(order - 1, 'u') + [np.array([1, 0])] + \ hilbert_curve(order - 1, "r")
hilbert_curve(order - 1, 'u') + [np.array([0, -1])] + \ + [np.array([0, 1])]
hilbert_curve(order - 1, 'l') + hilbert_curve(order - 1, "u")
elif orientation == 'd': + [np.array([1, 0])]
return hilbert_curve(order - 1, 'l') + [np.array([0, -1])] + \ + hilbert_curve(order - 1, "u")
hilbert_curve(order - 1, 'd') + [np.array([-1, 0])] + \ + [np.array([0, -1])]
hilbert_curve(order - 1, 'd') + [np.array([0, 1])] + \ + hilbert_curve(order - 1, "l")
hilbert_curve(order - 1, 'r') )
elif orientation == 'r': elif orientation == "d":
return hilbert_curve(order - 1, 'u') + [np.array([1, 0])] + \ return (
hilbert_curve(order - 1, 'r') + [np.array([0, 1])] + \ hilbert_curve(order - 1, "l")
hilbert_curve(order - 1, 'r') + [np.array([-1, 0])] + \ + [np.array([0, -1])]
hilbert_curve(order - 1, 'd') + hilbert_curve(order - 1, "d")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "d")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "r")
)
elif orientation == "r":
return (
hilbert_curve(order - 1, "u")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "r")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "r")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "d")
)
else: else:
return hilbert_curve(order - 1, 'd') + [np.array([-1, 0])] + \ return (
hilbert_curve(order - 1, 'l') + [np.array([0, -1])] + \ hilbert_curve(order - 1, "d")
hilbert_curve(order - 1, 'l') + [np.array([1, 0])] + \ + [np.array([-1, 0])]
hilbert_curve(order - 1, 'u') + hilbert_curve(order - 1, "l")
+ [np.array([0, -1])]
+ hilbert_curve(order - 1, "l")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "u")
)
else: else:
return base_shape[orientation] return base_shape[orientation]
@ -88,15 +110,17 @@ def hilbert_curve(order, orientation):
order = 6 order = 6
curve = hilbert_curve(order, 'u') curve = hilbert_curve(order, "u")
curve = np.array(curve) * 4 curve = np.array(curve) * 4
cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)])
cumulative_curve = (np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2) / 2 ** (order + 2) cumulative_curve = (
np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2
) / 2 ** (order + 2)
# plot curve using plt # plot curve using plt
N = 2 ** (2 * order) N = 2 ** (2 * order)
sublevel = order - 4 sublevel = order - 4
cmap = cm.get_cmap('jet') cmap = cm.get_cmap("jet")
fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1)) fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1))
t = {} t = {}
@ -104,31 +128,38 @@ sublevel = 7
for i in range(2 ** (2 * sublevel)): for i in range(2 ** (2 * sublevel)):
il = i * N // (2 ** (2 * sublevel)) il = i * N // (2 ** (2 * sublevel))
ir = (i + 1) * N // 2 ** (2 * sublevel) ir = (i + 1) * N // 2 ** (2 * sublevel)
plt.plot(cumulative_curve[il:ir + 1, 0], cumulative_curve[il:ir + 1, 1], lw=0.5, c=cmap(i / 2 ** (2 * sublevel))) plt.plot(
cumulative_curve[il : ir + 1, 0],
cumulative_curve[il : ir + 1, 1],
lw=0.5,
c=cmap(i / 2 ** (2 * sublevel)),
)
plt.xlabel('$x$') plt.xlabel("$x$")
plt.ylabel('$y$') plt.ylabel("$y$")
plt.tight_layout() plt.tight_layout()
plt.savefig(Path(f"~/tmp/hilbert_indexcolor.eps").expanduser()) plt.savefig(Path(f"~/tmp/hilbert_indexcolor.eps").expanduser())
key = b'0123456789ABCDEF' key = b"0123456789ABCDEF"
num = 123 num = 123
print(siphash.SipHash_2_4(key, bytes(num)).hash()) print(siphash.SipHash_2_4(key, bytes(num)).hash())
order = 6 order = 6
curve = hilbert_curve(order, 'u') curve = hilbert_curve(order, "u")
curve = np.array(curve) * 4 curve = np.array(curve) * 4
cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)])
cumulative_curve = (np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2) / 2 ** (order + 2) cumulative_curve = (
np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2
) / 2 ** (order + 2)
# plot curve using plt # plot curve using plt
N = 2 ** (2 * order) N = 2 ** (2 * order)
sublevel = order - 4 sublevel = order - 4
cmap = cm.get_cmap('jet') cmap = cm.get_cmap("jet")
plt.figure() plt.figure()
key = b'0123456789ABCDEF' key = b"0123456789ABCDEF"
fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1)) fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1))
t = {} t = {}
@ -137,10 +168,15 @@ for i in range(2 ** (2 * sublevel)):
il = i * N // (2 ** (2 * sublevel)) il = i * N // (2 ** (2 * sublevel))
ir = (i + 1) * N // 2 ** (2 * sublevel) ir = (i + 1) * N // 2 ** (2 * sublevel)
sipkey = siphash.SipHash_2_4(key, bytes(il)).hash() sipkey = siphash.SipHash_2_4(key, bytes(il)).hash()
plt.plot(cumulative_curve[il:ir + 1, 0], cumulative_curve[il:ir + 1, 1], lw=0.5, c=cmap(sipkey / 2 ** 64)) plt.plot(
cumulative_curve[il : ir + 1, 0],
cumulative_curve[il : ir + 1, 1],
lw=0.5,
c=cmap(sipkey / 2 ** 64),
)
plt.xlabel('$x$') plt.xlabel("$x$")
plt.ylabel('$y$') plt.ylabel("$y$")
plt.tight_layout() plt.tight_layout()
plt.savefig(Path(f"~/tmp/hilbert_indexcolor_scrambled.eps").expanduser()) plt.savefig(Path(f"~/tmp/hilbert_indexcolor_scrambled.eps").expanduser())
plt.show() plt.show()

View file

@ -8,6 +8,7 @@ from typing import List
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
# two-fold upsampling -- https://cnx.org/contents/xsppCgXj@8.18:H_wA16rf@16/Upsampling # two-fold upsampling -- https://cnx.org/contents/xsppCgXj@8.18:H_wA16rf@16/Upsampling
from matplotlib.axes import Axes from matplotlib.axes import Axes
from matplotlib.figure import Figure from matplotlib.figure import Figure
@ -69,12 +70,12 @@ def cascade_algorithm(h, g, maxit):
for it in range(maxit): for it in range(maxit):
# perform repeated convolutions # perform repeated convolutions
phi_it = np.sqrt(2) * np.convolve(h_it, phi_it, mode='full') phi_it = np.sqrt(2) * np.convolve(h_it, phi_it, mode="full")
if it != maxit - 1: if it != maxit - 1:
psi_it = np.sqrt(2) * np.convolve(h_it, psi_it, mode='full') psi_it = np.sqrt(2) * np.convolve(h_it, psi_it, mode="full")
else: else:
psi_it = np.sqrt(2) * np.convolve(g_it, psi_it, mode='full') psi_it = np.sqrt(2) * np.convolve(g_it, psi_it, mode="full")
# upsample the coefficients # upsample the coefficients
h_it = upsample(h_it) h_it = upsample(h_it)
@ -108,55 +109,173 @@ xdb2, phidb2, psidb2 = cascade_algorithm(h_DB2, g_DB2, maxit)
# DB3 -- http://wavelets.pybytes.com/wavelet/db3/ # DB3 -- http://wavelets.pybytes.com/wavelet/db3/
h_DB3 = np.array( h_DB3 = np.array(
[0.3326705529509569, 0.8068915093133388, 0.4598775021193313, -0.13501102001039084, -0.08544127388224149, [
0.035226291882100656]) 0.3326705529509569,
0.8068915093133388,
0.4598775021193313,
-0.13501102001039084,
-0.08544127388224149,
0.035226291882100656,
]
)
g_DB3 = np.array( g_DB3 = np.array(
[0.035226291882100656, 0.08544127388224149, -0.13501102001039084, -0.4598775021193313, 0.8068915093133388, [
-0.3326705529509569]) 0.035226291882100656,
0.08544127388224149,
-0.13501102001039084,
-0.4598775021193313,
0.8068915093133388,
-0.3326705529509569,
]
)
xdb3, phidb3, psidb3 = cascade_algorithm(h_DB3, g_DB3, maxit) xdb3, phidb3, psidb3 = cascade_algorithm(h_DB3, g_DB3, maxit)
# DB4 -- http://wavelets.pybytes.com/wavelet/db4/ # DB4 -- http://wavelets.pybytes.com/wavelet/db4/
h_DB4 = np.array( h_DB4 = np.array(
[0.23037781330885523, 0.7148465705525415, 0.6308807679295904, -0.02798376941698385, -0.18703481171888114, [
0.030841381835986965, 0.032883011666982945, -0.010597401784997278]) 0.23037781330885523,
0.7148465705525415,
0.6308807679295904,
-0.02798376941698385,
-0.18703481171888114,
0.030841381835986965,
0.032883011666982945,
-0.010597401784997278,
]
)
g_DB4 = np.array( g_DB4 = np.array(
[-0.010597401784997278, -0.032883011666982945, 0.030841381835986965, 0.18703481171888114, -0.02798376941698385, [
-0.6308807679295904, 0.7148465705525415, -0.23037781330885523]) -0.010597401784997278,
-0.032883011666982945,
0.030841381835986965,
0.18703481171888114,
-0.02798376941698385,
-0.6308807679295904,
0.7148465705525415,
-0.23037781330885523,
]
)
xdb4, phidb4, psidb4 = cascade_algorithm(h_DB4, g_DB4, maxit) xdb4, phidb4, psidb4 = cascade_algorithm(h_DB4, g_DB4, maxit)
# DB8 -- http://wavelets.pybytes.com/wavelet/db8/ # DB8 -- http://wavelets.pybytes.com/wavelet/db8/
h_DB8 = np.array( h_DB8 = np.array(
[0.05441584224308161, 0.3128715909144659, 0.6756307362980128, 0.5853546836548691, -0.015829105256023893, [
-0.2840155429624281, 0.00047248457399797254, 0.128747426620186, -0.01736930100202211, -0.04408825393106472, 0.05441584224308161,
0.013981027917015516, 0.008746094047015655, -0.00487035299301066, -0.0003917403729959771, 0.0006754494059985568, 0.3128715909144659,
-0.00011747678400228192]) 0.6756307362980128,
0.5853546836548691,
-0.015829105256023893,
-0.2840155429624281,
0.00047248457399797254,
0.128747426620186,
-0.01736930100202211,
-0.04408825393106472,
0.013981027917015516,
0.008746094047015655,
-0.00487035299301066,
-0.0003917403729959771,
0.0006754494059985568,
-0.00011747678400228192,
]
)
g_DB8 = np.array( g_DB8 = np.array(
[-0.00011747678400228192, -0.0006754494059985568, -0.0003917403729959771, 0.00487035299301066, 0.008746094047015655, [
-0.013981027917015516, -0.04408825393106472, 0.01736930100202211, 0.128747426620186, -0.00047248457399797254, -0.00011747678400228192,
-0.2840155429624281, 0.015829105256023893, 0.5853546836548691, -0.6756307362980128, 0.3128715909144659, -0.0006754494059985568,
-0.05441584224308161]) -0.0003917403729959771,
0.00487035299301066,
0.008746094047015655,
-0.013981027917015516,
-0.04408825393106472,
0.01736930100202211,
0.128747426620186,
-0.00047248457399797254,
-0.2840155429624281,
0.015829105256023893,
0.5853546836548691,
-0.6756307362980128,
0.3128715909144659,
-0.05441584224308161,
]
)
xdb8, phidb8, psidb8 = cascade_algorithm(h_DB8, g_DB8, maxit) xdb8, phidb8, psidb8 = cascade_algorithm(h_DB8, g_DB8, maxit)
# DB16 -- # DB16 --
h_DB16 = np.array( h_DB16 = np.array(
[0.0031892209253436892, 0.03490771432362905, 0.1650642834886438, 0.43031272284545874, 0.6373563320829833, [
0.44029025688580486, -0.08975108940236352, -0.3270633105274758, -0.02791820813292813, 0.21119069394696974, 0.0031892209253436892,
0.027340263752899923, -0.13238830556335474, -0.006239722752156254, 0.07592423604445779, -0.007588974368642594, 0.03490771432362905,
-0.036888397691556774, 0.010297659641009963, 0.013993768859843242, -0.006990014563390751, -0.0036442796214883506, 0.1650642834886438,
0.00312802338120381, 0.00040789698084934395, -0.0009410217493585433, 0.00011424152003843815, 0.43031272284545874,
0.00017478724522506327, -6.103596621404321e-05, -1.394566898819319e-05, 1.133660866126152e-05, 0.6373563320829833,
-1.0435713423102517e-06, -7.363656785441815e-07, 2.3087840868545578e-07, -2.1093396300980412e-08]) 0.44029025688580486,
g_DB16 = np.array([-2.1093396300980412e-08, -2.3087840868545578e-07, -7.363656785441815e-07, 1.0435713423102517e-06, -0.08975108940236352,
1.133660866126152e-05, 1.394566898819319e-05, -6.103596621404321e-05, -0.00017478724522506327, -0.3270633105274758,
0.00011424152003843815, 0.0009410217493585433, 0.00040789698084934395, -0.00312802338120381, -0.02791820813292813,
-0.0036442796214883506, 0.006990014563390751, 0.013993768859843242, -0.010297659641009963, 0.21119069394696974,
-0.036888397691556774, 0.007588974368642594, 0.07592423604445779, 0.006239722752156254, 0.027340263752899923,
-0.13238830556335474, -0.027340263752899923, 0.21119069394696974, 0.02791820813292813, -0.13238830556335474,
-0.3270633105274758, 0.08975108940236352, 0.44029025688580486, -0.6373563320829833, -0.006239722752156254,
0.43031272284545874, -0.1650642834886438, 0.03490771432362905, -0.0031892209253436892]) 0.07592423604445779,
-0.007588974368642594,
-0.036888397691556774,
0.010297659641009963,
0.013993768859843242,
-0.006990014563390751,
-0.0036442796214883506,
0.00312802338120381,
0.00040789698084934395,
-0.0009410217493585433,
0.00011424152003843815,
0.00017478724522506327,
-6.103596621404321e-05,
-1.394566898819319e-05,
1.133660866126152e-05,
-1.0435713423102517e-06,
-7.363656785441815e-07,
2.3087840868545578e-07,
-2.1093396300980412e-08,
]
)
g_DB16 = np.array(
[
-2.1093396300980412e-08,
-2.3087840868545578e-07,
-7.363656785441815e-07,
1.0435713423102517e-06,
1.133660866126152e-05,
1.394566898819319e-05,
-6.103596621404321e-05,
-0.00017478724522506327,
0.00011424152003843815,
0.0009410217493585433,
0.00040789698084934395,
-0.00312802338120381,
-0.0036442796214883506,
0.006990014563390751,
0.013993768859843242,
-0.010297659641009963,
-0.036888397691556774,
0.007588974368642594,
0.07592423604445779,
0.006239722752156254,
-0.13238830556335474,
-0.027340263752899923,
0.21119069394696974,
0.02791820813292813,
-0.3270633105274758,
0.08975108940236352,
0.44029025688580486,
-0.6373563320829833,
0.43031272284545874,
-0.1650642834886438,
0.03490771432362905,
-0.0031892209253436892,
]
)
xdb16, phidb16, psidb16 = cascade_algorithm(h_DB16, g_DB16, maxit) xdb16, phidb16, psidb16 = cascade_algorithm(h_DB16, g_DB16, maxit)
@ -164,14 +283,15 @@ xdb16, phidb16, psidb16 = cascade_algorithm(h_DB16, g_DB16, maxit)
fig: Figure fig: Figure
fig, ax = plt.subplots( fig, ax = plt.subplots(
4, 2, 4,
2,
figsize=figsize_from_page_fraction(height_to_width=12 / 8), figsize=figsize_from_page_fraction(height_to_width=12 / 8),
# sharex="all", sharey="all" # sharex="all", sharey="all"
) )
labels = ['Haar', 'DB2', 'DB4', 'DB8', 'DB16'] labels = ["Haar", "DB2", "DB4", "DB8", "DB16"]
ax[0, 0].set_title('scaling functions $\\varphi$') ax[0, 0].set_title("scaling functions $\\varphi$")
ax[0, 1].set_title('wavelets $\\psi$') ax[0, 1].set_title("wavelets $\\psi$")
ax[0, 0].plot(xhaar, phihaar, lw=1) ax[0, 0].plot(xhaar, phihaar, lw=1)
ax[0, 1].plot(xhaar, psihaar, lw=1) ax[0, 1].plot(xhaar, psihaar, lw=1)
@ -188,7 +308,7 @@ ax[3, 1].plot(xdb8, psidb8, lw=1)
# ax[4, 0].plot(xdb16, phidb16, lw=1) # ax[4, 0].plot(xdb16, phidb16, lw=1)
# ax[4, 1].plot(xdb16, psidb16, lw=1) # ax[4, 1].plot(xdb16, psidb16, lw=1)
for a in ax.flatten(): for a in ax.flatten():
a.set_xlabel('t') a.set_xlabel("t")
def inset_label(ax: Axes, text: str): def inset_label(ax: Axes, text: str):
@ -198,7 +318,7 @@ def inset_label(ax: Axes, text: str):
text, text,
horizontalalignment="left", horizontalalignment="left",
verticalalignment="bottom", verticalalignment="bottom",
transform=ax.transAxes transform=ax.transAxes,
) )
@ -238,32 +358,63 @@ def fourier_wavelet(h, g, n):
# ax.plot([0, np.pi], [1., 1.], 'k:') # ax.plot([0, np.pi], [1., 1.], 'k:')
kh, fphih, fpsih = fourier_wavelet(h_Haar, g_Haar, 256) kh, fphih, fpsih = fourier_wavelet(h_Haar, g_Haar, 256)
ax.plot(kh, np.abs(fphih) ** 2, label=r'$\hat\varphi_\textrm{Haar}$', c="C0") ax.plot(kh, np.abs(fphih) ** 2, label=r"$\hat\varphi_\textrm{Haar}$", c="C0")
ax.plot(kh, np.abs(fpsih) ** 2, label=r'$\hat\psi_\textrm{Haar}$', c="C0", linestyle="dashed") ax.plot(
kh,
np.abs(fpsih) ** 2,
label=r"$\hat\psi_\textrm{Haar}$",
c="C0",
linestyle="dashed",
)
kdb2, fphidb2, fpsidb2 = fourier_wavelet(h_DB2, g_DB2, 256) kdb2, fphidb2, fpsidb2 = fourier_wavelet(h_DB2, g_DB2, 256)
ax.plot(kdb2, np.abs(fphidb2) ** 2, label=r'$\hat\varphi_\textrm{DB2}$', c="C1") ax.plot(kdb2, np.abs(fphidb2) ** 2, label=r"$\hat\varphi_\textrm{DB2}$", c="C1")
ax.plot(kdb2, np.abs(fpsidb2) ** 2, label=r'$\hat\psi_\textrm{DB2}$', c="C1", linestyle="dashed") ax.plot(
kdb2,
np.abs(fpsidb2) ** 2,
label=r"$\hat\psi_\textrm{DB2}$",
c="C1",
linestyle="dashed",
)
kdb4, fphidb4, fpsidb4 = fourier_wavelet(h_DB4, g_DB4, 256) kdb4, fphidb4, fpsidb4 = fourier_wavelet(h_DB4, g_DB4, 256)
ax.plot(kdb4, np.abs(fphidb4) ** 2, label=r'$\hat\varphi_\textrm{DB4}$', c="C2") ax.plot(kdb4, np.abs(fphidb4) ** 2, label=r"$\hat\varphi_\textrm{DB4}$", c="C2")
ax.plot(kdb4, np.abs(fpsidb4) ** 2, label=r'$\hat\psi_\textrm{DB4}$', c="C2", linestyle="dashed") ax.plot(
kdb4,
np.abs(fpsidb4) ** 2,
label=r"$\hat\psi_\textrm{DB4}$",
c="C2",
linestyle="dashed",
)
kdb8, fphidb8, fpsidb8 = fourier_wavelet(h_DB8, g_DB8, 256) kdb8, fphidb8, fpsidb8 = fourier_wavelet(h_DB8, g_DB8, 256)
ax.plot(kdb8, np.abs(fphidb8) ** 2, label=r'$\hat\varphi_\textrm{DB8}$', c="C3") ax.plot(kdb8, np.abs(fphidb8) ** 2, label=r"$\hat\varphi_\textrm{DB8}$", c="C3")
ax.plot(kdb8, np.abs(fpsidb8) ** 2, label=r'$\hat\psi_\textrm{DB8}$', c="C3", linestyle="dashed") ax.plot(
kdb8,
np.abs(fpsidb8) ** 2,
label=r"$\hat\psi_\textrm{DB8}$",
c="C3",
linestyle="dashed",
)
# all k* are np.linspace(0, np.pi, 256), so we can also use them for shannon # all k* are np.linspace(0, np.pi, 256), so we can also use them for shannon
def shannon(k): def shannon(k):
y = np.zeros_like(k) y = np.zeros_like(k)
y[k > pi / 2] = 1 y[k > pi / 2] = 1
return y return y
ax.plot(kdb8, 1 - shannon(kdb8), label=r'$\hat\varphi_\textrm{shannon}$', c="C4") ax.plot(kdb8, 1 - shannon(kdb8), label=r"$\hat\varphi_\textrm{shannon}$", c="C4")
ax.plot(kdb8, shannon(kdb8), label=r'$\hat\psi_\textrm{shannon}$', c="C4", linestyle="dashed") ax.plot(
kdb8,
shannon(kdb8),
label=r"$\hat\psi_\textrm{shannon}$",
c="C4",
linestyle="dashed",
)
# ax.plot(kdb8, np.abs(fpsidb8) ** 2, label='$\\hat\\psi_{DB8}$', c="C3", linestyle="dashed") # ax.plot(kdb8, np.abs(fpsidb8) ** 2, label='$\\hat\\psi_{DB8}$', c="C3", linestyle="dashed")
# kdb16, fphidb16, fpsidb16 = fourier_wavelet(h_DB16, g_DB16, 256) # kdb16, fphidb16, fpsidb16 = fourier_wavelet(h_DB16, g_DB16, 256)
@ -282,10 +433,12 @@ leg1 = ax.legend(frameon=False, handles=philines, loc="center left")
leg2 = ax.legend(frameon=False, handles=psilines, loc="center right") leg2 = ax.legend(frameon=False, handles=psilines, loc="center right")
ax.add_artist(leg1) ax.add_artist(leg1)
ax.add_artist(leg2) ax.add_artist(leg2)
ax.set_xlabel('k') ax.set_xlabel("k")
ax.set_ylabel('P(k)') ax.set_ylabel("P(k)")
ax.set_xticks([0, pi / 2, pi]) ax.set_xticks([0, pi / 2, pi])
ax.set_xticklabels(["0", r"$k_\textrm{coarse}^\textrm{ny}$", r"$k_\textrm{fine}^\textrm{ny}$"]) ax.set_xticklabels(
["0", r"$k_\textrm{coarse}^\textrm{ny}$", r"$k_\textrm{fine}^\textrm{ny}$"]
)
# plt.semilogy() # plt.semilogy()
# plt.ylim([1e-4,2.0]) # plt.ylim([1e-4,2.0])

View file

@ -32,7 +32,14 @@ mode = Mode.richings
def dir_name_to_parameter(dir_name: str): def dir_name_to_parameter(dir_name: str):
return map(int, dir_name.lstrip("auriga6_halo").lstrip("richings21_").lstrip("bary_").lstrip("ramses_").split("_")) return map(
int,
dir_name.lstrip("auriga6_halo")
.lstrip("richings21_")
.lstrip("bary_")
.lstrip("ramses_")
.split("_"),
)
def levelmax_to_softening_length(levelmax: int) -> float: def levelmax_to_softening_length(levelmax: int) -> float:
@ -46,8 +53,8 @@ fig2: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax2: Axes = fig2.gca() ax2: Axes = fig2.gca()
for ax in [ax1, ax2]: for ax in [ax1, ax2]:
ax.set_xlabel(r'R [Mpc]') ax.set_xlabel(r"R [Mpc]")
ax1.set_ylabel(r'M [$10^{10} \mathrm{M}_\odot$]') ax1.set_ylabel(r"M [$10^{10} \mathrm{M}_\odot$]")
ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]") ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]")
part_numbers = [] part_numbers = []
@ -107,8 +114,10 @@ for dir in sorted(root_dir.glob("*")):
ideal_softening_length = levelmax_to_softening_length(levelmax) ideal_softening_length = levelmax_to_softening_length(levelmax)
if not np.isclose(softening_length, levelmax_to_softening_length(levelmax)): if not np.isclose(softening_length, levelmax_to_softening_length(levelmax)):
raise ValueError(f"softening length for levelmax {levelmax} should be {ideal_softening_length} " raise ValueError(
f"but is {softening_length}") f"softening length for levelmax {levelmax} should be {ideal_softening_length} "
f"but is {softening_length}"
)
print(input_file) print(input_file)
if mode == Mode.richings and is_by_adrian: if mode == Mode.richings and is_by_adrian:
h = 0.6777 h = 0.6777
@ -141,12 +150,16 @@ for dir in sorted(root_dir.glob("*")):
# halo = halos.loc[1] # halo = halos.loc[1]
center = np.array([halo.X, halo.Y, halo.Z]) center = np.array([halo.X, halo.Y, halo.Z])
log_radial_bins, bin_masses, bin_densities, center = halo_mass_profile( log_radial_bins, bin_masses, bin_densities, center = halo_mass_profile(
df, center, particles_meta, plot=False, num_bins=100, df, center, particles_meta, plot=False, num_bins=100, vmin=0.002, vmax=6.5
vmin=0.002, vmax=6.5
) )
i_min_border = np.argmax(0.01 < log_radial_bins) # first bin outside of specific radius i_min_border = np.argmax(
0.01 < log_radial_bins
) # first bin outside of specific radius
i_max_border = np.argmax(1.5 < log_radial_bins) i_max_border = np.argmax(1.5 < log_radial_bins)
popt = fit_nfw(log_radial_bins[i_min_border:i_max_border], bin_densities[i_min_border:i_max_border]) # = rho_0, r_s popt = fit_nfw(
log_radial_bins[i_min_border:i_max_border],
bin_densities[i_min_border:i_max_border],
) # = rho_0, r_s
print(popt) print(popt)
# # Plot NFW profile # # Plot NFW profile
# ax.loglog( # ax.loglog(
@ -176,11 +189,11 @@ for dir in sorted(root_dir.glob("*")):
ref_log_radial_bins, ref_bin_masses, ref_bin_densities = data ref_log_radial_bins, ref_bin_masses, ref_bin_densities = data
mass_deviation: np.ndarray = np.abs(bin_masses - ref_bin_masses) mass_deviation: np.ndarray = np.abs(bin_masses - ref_bin_masses)
density_deviation: np.ndarray = np.abs(bin_densities - ref_bin_densities) density_deviation: np.ndarray = np.abs(bin_densities - ref_bin_densities)
ax1.loglog(log_radial_bins[:-1], mass_deviation, c=f"C{i}", ax1.loglog(log_radial_bins[:-1], mass_deviation, c=f"C{i}", linestyle="dotted")
linestyle="dotted")
ax2.loglog(log_radial_bins[:-1], density_deviation, c=f"C{i}", ax2.loglog(
linestyle="dotted") log_radial_bins[:-1], density_deviation, c=f"C{i}", linestyle="dotted"
)
accuracy = mass_deviation / ref_bin_masses accuracy = mass_deviation / ref_bin_masses
print(accuracy) print(accuracy)
print("mean accuracy", accuracy.mean()) print("mean accuracy", accuracy.mean())
@ -209,11 +222,13 @@ for dir in sorted(root_dir.glob("*")):
vmin = min(vmin, rho.min()) vmin = min(vmin, rho.min())
vmax = max(vmax, rho.max()) vmax = max(vmax, rho.max())
images.append(Result( images.append(
rho=rho, Result(
title=str(dir.name), rho=rho,
levels=(levelmin, levelmin_TF, levelmax) if levelmin else None title=str(dir.name),
)) levels=(levelmin, levelmin_TF, levelmax) if levelmin else None,
)
)
i += 1 i += 1
# plot_cic( # plot_cic(
# rho, extent, # rho, extent,
@ -226,15 +241,21 @@ fig2.tight_layout()
# fig3: Figure = plt.figure(figsize=(9, 9)) # fig3: Figure = plt.figure(figsize=(9, 9))
# axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten() # axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
fig3: Figure = plt.figure(figsize=figsize_from_page_fraction(columns=2, height_to_width=1)) fig3: Figure = plt.figure(
figsize=figsize_from_page_fraction(columns=2, height_to_width=1)
)
axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten() axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
for result, ax in zip(images, axes): for result, ax in zip(images, axes):
data = 1.1 + result.rho data = 1.1 + result.rho
vmin_scaled = 1.1 + vmin vmin_scaled = 1.1 + vmin
vmax_scaled = 1.1 + vmax vmax_scaled = 1.1 + vmax
img = ax.imshow(data.T, norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled), extent=extent, img = ax.imshow(
origin="lower") data.T,
norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled),
extent=extent,
origin="lower",
)
ax.set_title(result.title) ax.set_title(result.title)
fig3.tight_layout() fig3.tight_layout()

51
cic.py
View file

@ -39,10 +39,15 @@ def cic_deposit(X, Y, ngrid, periodic=True) -> np.ndarray:
def cic_range( def cic_range(
X: np.ndarray, Y: np.ndarray, X: np.ndarray,
ngrid: int, Y: np.ndarray,
xmin: float, xmax: float, ngrid: int,
ymin: float, ymax: float, *args, **kwargs xmin: float,
xmax: float,
ymin: float,
ymax: float,
*args,
**kwargs
) -> Tuple[np.ndarray, Extent]: ) -> Tuple[np.ndarray, Extent]:
xrange = xmax - xmin xrange = xmax - xmin
yrange = ymax - ymin yrange = ymax - ymin
@ -57,16 +62,25 @@ def cic_range(
def cic_from_radius( def cic_from_radius(
X: np.ndarray, Y: np.ndarray, X: np.ndarray,
ngrid: int, Y: np.ndarray,
x_center: float, y_center: float, ngrid: int,
radius: float, *args, **kwargs x_center: float,
y_center: float,
radius: float,
*args,
**kwargs
) -> Tuple[np.ndarray, Extent]: ) -> Tuple[np.ndarray, Extent]:
return cic_range( return cic_range(
X, Y, ngrid, X,
x_center - radius, x_center + radius, Y,
y_center - radius, y_center + radius, ngrid,
*args, **kwargs x_center - radius,
x_center + radius,
y_center - radius,
y_center + radius,
*args,
**kwargs
) )
@ -87,18 +101,21 @@ def plot_cic(rho: np.ndarray, extent: Extent, title: str):
data = np.log(data) data = np.log(data)
norm = plt.Normalize(vmin=data.min(), vmax=data.max()) norm = plt.Normalize(vmin=data.min(), vmax=data.max())
image = cmap(norm(data.T)) image = cmap(norm(data.T))
plt.imsave((Path("~/tmp").expanduser() / title).with_suffix(".png"), image, origin="lower") plt.imsave(
(Path("~/tmp").expanduser() / title).with_suffix(".png"), image, origin="lower"
)
# ax.hist2d(df.X, df.Y, bins=500, norm=LogNorm()) # ax.hist2d(df.X, df.Y, bins=500, norm=LogNorm())
# ax.hist2d(df2.X, df2.Y, bins=1000, norm=LogNorm()) # ax.hist2d(df2.X, df2.Y, bins=1000, norm=LogNorm())
if __name__ == '__main__': if __name__ == "__main__":
input_file = Path(sys.argv[1]) input_file = Path(sys.argv[1])
df_ref, _ = read_file(input_file) df_ref, _ = read_file(input_file)
rho, extent = cic_from_radius(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1500, 48.8, 57, 1, periodic=False) rho, extent = cic_from_radius(
df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1500, 48.8, 57, 1, periodic=False
)
# rho, extent = cic_range(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 800, 0, 85.47, 0, 85.47, periodic=False) # rho, extent = cic_range(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 800, 0, 85.47, 0, 85.47, periodic=False)
plot_cic( plot_cic(
rho, extent, rho, extent, title=str(input_file.relative_to(input_file.parent.parent).name)
title=str(input_file.relative_to(input_file.parent.parent).name)
) )

View file

@ -16,7 +16,7 @@ for wf in ["DB2", "DB4", "DB8", "shannon"]:
plot=False, plot=False,
plot3d=False, plot3d=False,
velo_halos=True, velo_halos=True,
single=False single=False,
) )
except Exception as e: except Exception as e:
traceback.print_exc() traceback.print_exc()

View file

@ -45,10 +45,16 @@ def apply_offset(value, offset):
def compare_halo_resolutions( def compare_halo_resolutions(
ref_waveform: str, comp_waveform: str, ref_waveform: str,
reference_resolution: int, comparison_resolution: int, comp_waveform: str,
plot=False, plot3d=False, plot_cic=False, reference_resolution: int,
single=False, velo_halos=False, force=False comparison_resolution: int,
plot=False,
plot3d=False,
plot_cic=False,
single=False,
velo_halos=False,
force=False,
): ):
reference_dir = base_dir / f"{ref_waveform}_{reference_resolution}_100" reference_dir = base_dir / f"{ref_waveform}_{reference_resolution}_100"
comparison_dir = base_dir / f"{comp_waveform}_{comparison_resolution}_100/" comparison_dir = base_dir / f"{comp_waveform}_{comparison_resolution}_100/"
@ -68,14 +74,18 @@ def compare_halo_resolutions(
print("reading reference file") print("reading reference file")
df_ref, ref_meta = read_file(reference_dir / "output_0004.hdf5") df_ref, ref_meta = read_file(reference_dir / "output_0004.hdf5")
if velo_halos: if velo_halos:
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(reference_dir) df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(
reference_dir
)
else: else:
df_ref_halo = read_halo_file(reference_dir / "fof_output_0004.hdf5") df_ref_halo = read_halo_file(reference_dir / "fof_output_0004.hdf5")
print("reading comparison file") print("reading comparison file")
df_comp, comp_meta = read_file(comparison_dir / "output_0004.hdf5") df_comp, comp_meta = read_file(comparison_dir / "output_0004.hdf5")
if velo_halos: if velo_halos:
df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halo_particles(comparison_dir) df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halo_particles(
comparison_dir
)
else: else:
df_comp_halo = read_halo_file(comparison_dir / "fof_output_0004.hdf5") df_comp_halo = read_halo_file(comparison_dir / "fof_output_0004.hdf5")
@ -137,18 +147,22 @@ def compare_halo_resolutions(
print(f"{prev_len} => {after_len} (factor {prev_len / after_len:.2f})") print(f"{prev_len} => {after_len} (factor {prev_len / after_len:.2f})")
halo_distances = np.linalg.norm( halo_distances = np.linalg.norm(
ref_halo[['X', 'Y', 'Z']].values ref_halo[["X", "Y", "Z"]].values - df_comp_halo[["X", "Y", "Z"]].values,
- df_comp_halo[['X', 'Y', 'Z']].values, axis=1,
axis=1) )
# print(list(halo_distances)) # print(list(halo_distances))
print(f"find nearby halos (50x{ref_halo.Rvir:.1f})") print(f"find nearby halos (50x{ref_halo.Rvir:.1f})")
print(ref_halo[['X', 'Y', 'Z']].values) print(ref_halo[["X", "Y", "Z"]].values)
# Find IDs of halos that are less than 50 Rvir away # Find IDs of halos that are less than 50 Rvir away
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 50].index.to_list()) nearby_halos = set(
df_comp_halo.loc[halo_distances < ref_halo.Rvir * 50].index.to_list()
)
if len(nearby_halos) < 10: if len(nearby_halos) < 10:
print(f"only {len(nearby_halos)} halos, expanding to 150xRvir") print(f"only {len(nearby_halos)} halos, expanding to 150xRvir")
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 150].index.to_list()) nearby_halos = set(
df_comp_halo.loc[halo_distances < ref_halo.Rvir * 150].index.to_list()
)
counters.checking_150 += 1 counters.checking_150 += 1
if not nearby_halos: if not nearby_halos:
@ -179,9 +193,13 @@ def compare_halo_resolutions(
if plot: if plot:
fig: Figure = plt.figure() fig: Figure = plt.figure()
ax: Axes = fig.gca() ax: Axes = fig.gca()
ax.scatter(apply_offset_to_list(halo_particles["X"], offset_x), ax.scatter(
apply_offset_to_list(halo_particles["Y"], offset_y), s=1, apply_offset_to_list(halo_particles["X"], offset_x),
alpha=.3, label="Halo") apply_offset_to_list(halo_particles["Y"], offset_y),
s=1,
alpha=0.3,
label="Halo",
)
if plot_cic: if plot_cic:
diameter = ref_halo["R_size"] diameter = ref_halo["R_size"]
X = ref_halo["Xc"] X = ref_halo["Xc"]
@ -207,6 +225,7 @@ def compare_halo_resolutions(
if plot3d: if plot3d:
from pyvista import Plotter from pyvista import Plotter
pl = Plotter() pl = Plotter()
plotdf3d(pl, halo_particles, color="#b3cde3") # light blue plotdf3d(pl, halo_particles, color="#b3cde3") # light blue
pl.set_focus((ref_halo.X, ref_halo.Y, ref_halo.Z)) pl.set_focus((ref_halo.X, ref_halo.Y, ref_halo.Z))
@ -223,7 +242,11 @@ def compare_halo_resolutions(
particle_ids_in_comp_halo = comp_halo_lookup[halo_id] particle_ids_in_comp_halo = comp_halo_lookup[halo_id]
mass_factor_limit = 5 mass_factor_limit = 5
if not (1 / mass_factor_limit < (comp_halo_masses[halo_id] / ref_halo_mass) < mass_factor_limit): if not (
1 / mass_factor_limit
< (comp_halo_masses[halo_id] / ref_halo_mass)
< mass_factor_limit
):
# print("mass not similar, skipping") # print("mass not similar, skipping")
num_skipped_for_mass += 1 num_skipped_for_mass += 1
continue continue
@ -235,7 +258,10 @@ def compare_halo_resolutions(
# similarity = len(shared_particles) / len(union_particles) # similarity = len(shared_particles) / len(union_particles)
similarity = len(shared_particles) / ( similarity = len(shared_particles) / (
len(halo_particle_ids) + len(particle_ids_in_comp_halo) - len(shared_particles)) len(halo_particle_ids)
+ len(particle_ids_in_comp_halo)
- len(shared_particles)
)
# assert similarity_orig == similarity # assert similarity_orig == similarity
# print(shared_size) # print(shared_size)
# if not similarity: # if not similarity:
@ -247,12 +273,24 @@ def compare_halo_resolutions(
color = f"C{i + 1}" color = f"C{i + 1}"
comp_halo: pd.Series = df_comp_halo.loc[halo_id] comp_halo: pd.Series = df_comp_halo.loc[halo_id]
ax.scatter(apply_offset_to_list(df["X"], offset_x), apply_offset_to_list(df["Y"], offset_y), s=1, ax.scatter(
alpha=.3, c=color) apply_offset_to_list(df["X"], offset_x),
circle = Circle((apply_offset(comp_halo.X, offset_x), apply_offset(comp_halo.Y, offset_y)), apply_offset_to_list(df["Y"], offset_y),
comp_halo["Rvir"], zorder=10, s=1,
linewidth=1, edgecolor=color, fill=None alpha=0.3,
) c=color,
)
circle = Circle(
(
apply_offset(comp_halo.X, offset_x),
apply_offset(comp_halo.Y, offset_y),
),
comp_halo["Rvir"],
zorder=10,
linewidth=1,
edgecolor=color,
fill=None,
)
ax.add_artist(circle) ax.add_artist(circle)
if plot3d: if plot3d:
plotdf3d(pl, df, color="#fed9a6") # light orange plotdf3d(pl, df, color="#fed9a6") # light orange
@ -270,13 +308,16 @@ def compare_halo_resolutions(
comp_halo: pd.Series = df_comp_halo.loc[best_halo] comp_halo: pd.Series = df_comp_halo.loc[best_halo]
# merge the data of the two halos with fitting prefixes # merge the data of the two halos with fitting prefixes
halo_data = pd.concat([ halo_data = pd.concat(
ref_halo.add_prefix("ref_"), [ref_halo.add_prefix("ref_"), comp_halo.add_prefix("comp_")]
comp_halo.add_prefix("comp_") )
]) distance = (
distance = linalg.norm( linalg.norm(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z]) - np.array([comp_halo.X, comp_halo.Y, comp_halo.Z]) np.array([ref_halo.X, ref_halo.Y, ref_halo.Z])
) / ref_halo.Rvir - np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
)
/ ref_halo.Rvir
)
halo_data["distance"] = distance halo_data["distance"] = distance
halo_data["match"] = best_halo_match halo_data["match"] = best_halo_match
halo_data["num_skipped_for_mass"] = num_skipped_for_mass halo_data["num_skipped_for_mass"] = num_skipped_for_mass
@ -285,7 +326,9 @@ def compare_halo_resolutions(
if plot: if plot:
print(f"plotting with offsets ({offset_x},{offset_y})") print(f"plotting with offsets ({offset_x},{offset_y})")
# ax.legend() # ax.legend()
ax.set_title(f"{reference_dir.name} vs. {comparison_dir.name} (Halo {index})") ax.set_title(
f"{reference_dir.name} vs. {comparison_dir.name} (Halo {index})"
)
fig.savefig("out.png", dpi=300) fig.savefig("out.png", dpi=300)
plt.show() plt.show()
if plot3d: if plot3d:
@ -310,7 +353,7 @@ def precalculate_halo_membership(df_comp, df_comp_halo):
print_progress(i, len(df_comp_halo), halo["Sizes"]) print_progress(i, len(df_comp_halo), halo["Sizes"])
size = int(halo["Sizes"]) size = int(halo["Sizes"])
halo_id = int(i) halo_id = int(i)
halo_particles = df_comp.iloc[pointer:pointer + size] halo_particles = df_comp.iloc[pointer : pointer + size]
# check_id = halo_particles["FOFGroupIDs"].to_numpy() # check_id = halo_particles["FOFGroupIDs"].to_numpy()
# assert (check_id == i).all() # assert (check_id == i).all()
@ -321,7 +364,7 @@ def precalculate_halo_membership(df_comp, df_comp_halo):
return comp_halo_lookup return comp_halo_lookup
if __name__ == '__main__': if __name__ == "__main__":
compare_halo_resolutions( compare_halo_resolutions(
ref_waveform="shannon", ref_waveform="shannon",
comp_waveform="shannon", comp_waveform="shannon",
@ -332,5 +375,5 @@ if __name__ == '__main__':
plot_cic=False, plot_cic=False,
velo_halos=True, velo_halos=True,
single=False, single=False,
force=True force=True,
) )

View file

@ -21,32 +21,32 @@ from utils import figsize_from_page_fraction, rowcolumn_labels, waveforms, tex_f
G = 43.022682 # in Mpc (km/s)^2 / (10^10 Msun) G = 43.022682 # in Mpc (km/s)^2 / (10^10 Msun)
vmaxs = { vmaxs = {"Mvir": 52, "Vmax": 93, "cNFW": 31}
"Mvir": 52,
"Vmax": 93,
"cNFW": 31
}
units = { units = {
"distance": "Mpc", "distance": "Mpc",
"Mvir": r"10^{10} \textrm{M}_\odot", "Mvir": r"10^{10} \textrm{ M}_\odot",
"Vmax": r"\textrm{km} \textrm{s}^{-1}" # TODO "Vmax": r"\textrm{km } \textrm{s}^{-1}", # TODO
} }
def concentration(row, halo_type: str) -> bool: def concentration(row, halo_type: str) -> bool:
r_200crit = row[f'{halo_type}_R_200crit'] r_200crit = row[f"{halo_type}_R_200crit"]
if r_200crit <= 0: if r_200crit <= 0:
cnfw = -1 cnfw = -1
colour = 'orange' colour = "orange"
return False return False
# return cnfw, colour # return cnfw, colour
r_size = row[f'{halo_type}_R_size'] # largest difference from center of mass to any halo particle r_size = row[
m_200crit = row[f'{halo_type}_Mass_200crit'] f"{halo_type}_R_size"
vmax = row[f'{halo_type}_Vmax'] # largest velocity coming from enclosed mass profile calculation ] # largest difference from center of mass to any halo particle
rmax = row[f'{halo_type}_Rmax'] m_200crit = row[f"{halo_type}_Mass_200crit"]
npart = row[f'{halo_type}_npart'] vmax = row[
f"{halo_type}_Vmax"
] # largest velocity coming from enclosed mass profile calculation
rmax = row[f"{halo_type}_Rmax"]
npart = row[f"{halo_type}_npart"]
VmaxVvir2 = vmax ** 2 * r_200crit / (G * m_200crit) VmaxVvir2 = vmax ** 2 * r_200crit / (G * m_200crit)
if VmaxVvir2 <= 1.05: if VmaxVvir2 <= 1.05:
if m_200crit == 0: if m_200crit == 0:
@ -59,7 +59,7 @@ def concentration(row, halo_type: str) -> bool:
# colour = 'white' # colour = 'white'
else: else:
if npart >= 100: # only calculate cnfw for groups with more than 100 particles if npart >= 100: # only calculate cnfw for groups with more than 100 particles
cnfw = row[f'{halo_type}_cNFW'] cnfw = row[f"{halo_type}_cNFW"]
return True return True
# colour = 'black' # colour = 'black'
else: else:
@ -91,12 +91,12 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
max_x = max([max(df[x_col]), max(df[y_col])]) max_x = max([max(df[x_col]), max(df[y_col])])
num_bins = 100 num_bins = 100
bins = np.geomspace(min_x, max_x, num_bins) bins = np.geomspace(min_x, max_x, num_bins)
if property == 'cNFW': if property == "cNFW":
rows = [] rows = []
for i, row in df.iterrows(): for i, row in df.iterrows():
comp_cnfw_normal = concentration(row, halo_type="comp") comp_cnfw_normal = concentration(row, halo_type="comp")
ref_cnfw_normal = concentration(row, halo_type='ref') ref_cnfw_normal = concentration(row, halo_type="ref")
cnfw_normal = comp_cnfw_normal and ref_cnfw_normal cnfw_normal = comp_cnfw_normal and ref_cnfw_normal
if cnfw_normal: if cnfw_normal:
rows.append(row) rows.append(row)
@ -118,13 +118,10 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
stds.append(std) stds.append(std)
means = np.array(means) means = np.array(means)
stds = np.array(stds) stds = np.array(stds)
args = { args = {"color": "C2", "zorder": 10}
"color": "C2", ax.fill_between(bins, means - stds, means + stds, alpha=0.2, **args)
"zorder": 10 ax.plot(bins, means + stds, alpha=0.5, **args)
} ax.plot(bins, means - stds, alpha=0.5, **args)
ax.fill_between(bins, means - stds, means + stds, alpha=.2, **args)
ax.plot(bins, means + stds, alpha=.5, **args)
ax.plot(bins, means - stds, alpha=.5, **args)
# ax_scatter.plot(bins, stds, label=f"{file.stem}") # ax_scatter.plot(bins, stds, label=f"{file.stem}")
if property in vmaxs: if property in vmaxs:
@ -133,8 +130,13 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
vmax = None vmax = None
print("WARNING: vmax not set") print("WARNING: vmax not set")
image: QuadMesh image: QuadMesh
_, _, _, image = ax.hist2d(df[x_col], df[y_col] / df[x_col], bins=(bins, np.linspace(0, 2, num_bins)), _, _, _, image = ax.hist2d(
norm=LogNorm(vmax=vmax), rasterized=True) df[x_col],
df[y_col] / df[x_col],
bins=(bins, np.linspace(0, 2, num_bins)),
norm=LogNorm(vmax=vmax),
rasterized=True,
)
# ax.plot([rep_x_left, rep_x_left], [mean - std, mean + std], c="C1") # ax.plot([rep_x_left, rep_x_left], [mean - std, mean + std], c="C1")
# ax.annotate( # ax.annotate(
# text=f"std={std:.2f}", xy=(rep_x_left, mean + std), # text=f"std={std:.2f}", xy=(rep_x_left, mean + std),
@ -148,7 +150,9 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
# ax.set_yscale("log") # ax.set_yscale("log")
ax.set_xlim(min(df[x_col]), max(df[y_col])) ax.set_xlim(min(df[x_col]), max(df[y_col]))
ax.plot([min(df[x_col]), max(df[y_col])], [1, 1], linewidth=1, color="C1", zorder=10) ax.plot(
[min(df[x_col]), max(df[y_col])], [1, 1], linewidth=1, color="C1", zorder=10
)
return x_col, y_col return x_col, y_col
# ax.set_title(file.name) # ax.set_title(file.name)
@ -193,7 +197,9 @@ def plot_comparison_hist(ax: Axes, file: Path, property: str, m_min=None, m_max=
ax.plot(bin_centers, hist_val, label=label) ax.plot(bin_centers, hist_val, label=label)
else: else:
patches: List[Polygon] patches: List[Polygon]
hist_val, bin_edges, patches = ax.hist(df[property], bins=bins, histtype=histtype, label=label, density=density) hist_val, bin_edges, patches = ax.hist(
df[property], bins=bins, histtype=histtype, label=label, density=density
)
comparisons_dir = base_dir / "comparisons" comparisons_dir = base_dir / "comparisons"
@ -206,8 +212,10 @@ def compare_property(property, show: bool):
is_hist_property = property in hist_properties is_hist_property = property in hist_properties
fig: Figure fig: Figure
fig, axes = plt.subplots( fig, axes = plt.subplots(
len(waveforms), len(comparisons), len(waveforms),
sharey="all", sharex="all", len(comparisons),
sharey="all",
sharex="all",
figsize=figsize_from_page_fraction(columns=2), figsize=figsize_from_page_fraction(columns=2),
) )
for i, waveform in enumerate(waveforms): for i, waveform in enumerate(waveforms):
@ -227,24 +235,82 @@ def compare_property(property, show: bool):
} }
x_col, y_col = plot_comparison_hist2d(ax, file, property) x_col, y_col = plot_comparison_hist2d(ax, file, property)
lab_a, lab_b = x_labels[property] lab_a, lab_b = x_labels[property]
unit = f"[{units[property]}]" if property in units and units[property] else "" unit = (
f"[{units[property]}]"
if property in units and units[property]
else ""
)
if is_bottom_row: if is_bottom_row:
if lab_b: if lab_b:
ax.set_xlabel(tex_fmt(r"$AA_{\textrm{BB},CC} DD$", lab_a, lab_b, ref_res, unit)) ax.set_xlabel(
tex_fmt(
r"$AA_{\textrm{BB},\textrm{ CC}} \textrm{ } DD$",
lab_a,
lab_b,
ref_res,
unit,
)
)
# fig.supxlabel(tex_fmt(r"$AA_{\textrm{BB},\textrm{ } CC} \textrm{ } DD$", lab_a, lab_b, ref_res, unit), fontsize='medium')
else: else:
ax.set_xlabel(tex_fmt(r"$AA_{BB} CC$", lab_a, ref_res, unit)) ax.set_xlabel(
tex_fmt(
r"$AA_{\textrm{BB}} \textrm{ } CC$",
lab_a,
ref_res,
unit,
)
)
# fig.supxlabel(tex_fmt(r"$AA_{BB} \textrm{ } CC$", lab_a, ref_res, unit), fontsize='medium')
if is_left_col: if is_left_col:
if lab_b: if lab_b:
ax.set_ylabel( # ax.set_ylabel(
tex_fmt(r"$AA_{\textrm{BB},\textrm{comp}} / AA_{\textrm{BB},\textrm{CC}}$", # tex_fmt(r"$AA_{\textrm{BB},\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{CC}}$",
lab_a, lab_b, ref_res)) # lab_a, lab_b, ref_res))
# fig.text(0.015, 0.5, tex_fmt(r"$AA_{\textrm{BB},\textrm{ comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{ CC}}$", lab_a, lab_b, ref_res), va='center', rotation='vertical', size='medium')
fig.supylabel(
tex_fmt(
r"$AA_{\textrm{BB},\textrm{ comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{ CC}}$",
lab_a,
lab_b,
ref_res,
),
fontsize="medium",
fontvariant="small-caps",
)
else: else:
ax.set_ylabel( # ax.set_ylabel(
tex_fmt(r"$AA_{\textrm{comp}} / AA_{\textrm{BB}}$", # tex_fmt(r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$",
lab_a, ref_res)) # lab_a, ref_res))
# fig.text(0.015, 0.5, tex_fmt(r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$", lab_a, ref_res), va='center', rotation='vertical', size='medium')
fig.supylabel(
tex_fmt(
r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$",
lab_a,
ref_res,
),
fontsize="medium",
)
# ax.set_ylabel(f"{property}_{{comp}}/{property}_{ref_res}") # ax.set_ylabel(f"{property}_{{comp}}/{property}_{ref_res}")
ax.text(
0.975,
0.9,
f"comp = {comp_res}",
horizontalalignment="right",
verticalalignment="top",
transform=ax.transAxes,
)
else: else:
if property == "match": if property == "match":
if not (is_bottom_row and is_left_col):
ax.text(
0.05,
0.9,
f"comp = {comp_res}",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
# mass_bins = np.geomspace(10, 30000, num_mass_bins) # mass_bins = np.geomspace(10, 30000, num_mass_bins)
plot_comparison_hist(ax, file, property) plot_comparison_hist(ax, file, property)
@ -257,18 +323,25 @@ def compare_property(property, show: bool):
ax.legend() ax.legend()
else: else:
ax.text(
0.05,
0.9,
f"comp = {comp_res}",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
plot_comparison_hist(ax, file, property) plot_comparison_hist(ax, file, property)
x_labels = { x_labels = {"match": "$J$", "distance": "$D$ [$R_\mathrm{{vir}}$]"}
"match": "$J$",
"distance": "$D$ [$R_{vir}$]"
}
if is_bottom_row: if is_bottom_row:
ax.set_xlabel(x_labels[property]) ax.set_xlabel(x_labels[property])
if is_left_col: if is_left_col:
if property == "match": if property == "match":
ax.set_ylabel(r"$p(J)$") # ax.set_ylabel(r"$p(J)$")
fig.supylabel(r"$p(J)$", fontsize="medium")
else: else:
ax.set_ylabel(r"\# Halos") # ax.set_ylabel(r"\# Halos")
fig.supylabel(r"\# Halos", fontsize="medium")
if property == "distance": if property == "distance":
ax.set_xscale("log") ax.set_xscale("log")
ax.set_yscale("log") ax.set_yscale("log")
@ -278,11 +351,7 @@ def compare_property(property, show: bool):
last_ytick: YTick = ax.yaxis.get_major_ticks()[-1] last_ytick: YTick = ax.yaxis.get_major_ticks()[-1]
last_ytick.set_visible(False) last_ytick.set_visible(False)
if property == "Mvir" and is_top_row: if property == "Mvir" and is_top_row:
particle_masses = { particle_masses = {256: 0.23524624, 512: 0.02940578, 1024: 0.0036757225}
256: 0.23524624,
512: 0.02940578,
1024: 0.0036757225
}
partmass = particle_masses[ref_res] partmass = particle_masses[ref_res]
def mass2partnum(mass: float) -> float: def mass2partnum(mass: float) -> float:
@ -291,10 +360,12 @@ def compare_property(property, show: bool):
def partnum2mass(partnum: float) -> float: def partnum2mass(partnum: float) -> float:
return partnum * partmass return partnum * partmass
sec_ax = ax.secondary_xaxis("top", functions=(mass2partnum, partnum2mass)) sec_ax = ax.secondary_xaxis(
sec_ax.set_xlabel(r"[\# \textrm{particles}]") "top", functions=(mass2partnum, partnum2mass)
)
sec_ax.set_xlabel(r"\textrm{Halo Size }[\# \textrm{particles}]")
rowcolumn_labels(axes, comparisons, isrow=False) # rowcolumn_labels(axes, comparisons, isrow=False)
rowcolumn_labels(axes, waveforms, isrow=True) rowcolumn_labels(axes, waveforms, isrow=True)
fig.tight_layout() fig.tight_layout()
fig.subplots_adjust(hspace=0) fig.subplots_adjust(hspace=0)
@ -315,5 +386,5 @@ def main():
compare_property(property, show=len(argv) == 2) compare_property(property, show=len(argv) == 2)
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View file

@ -17,20 +17,22 @@ const_boltzmann_k_cgs = 1.380649e-16
def calculate_gas_internal_energy(omegab, hubble_param_, zstart_): def calculate_gas_internal_energy(omegab, hubble_param_, zstart_):
astart_ = 1.0 / (1.0 + zstart_) astart_ = 1.0 / (1.0 + zstart_)
if fabs(1.0 - gamma) > 1e-7: if fabs(1.0 - gamma) > 1e-7:
npol = 1.0 / (gamma - 1.) npol = 1.0 / (gamma - 1.0)
else: else:
npol = 1.0 npol = 1.0
unitv = 1e5 unitv = 1e5
adec = 1.0 / (160. * (omegab * hubble_param_ * hubble_param_ / 0.022) ** (2.0 / 5.0)) adec = 1.0 / (
if (astart_ < adec): 160.0 * (omegab * hubble_param_ * hubble_param_ / 0.022) ** (2.0 / 5.0)
)
if astart_ < adec:
Tini = Tcmb0 / astart_ Tini = Tcmb0 / astart_
else: else:
Tini = Tcmb0 / astart_ / astart_ * adec Tini = Tcmb0 / astart_ / astart_ * adec
print("Tini", Tini) print("Tini", Tini)
if Tini > 1.e4: if Tini > 1.0e4:
mu = 4.0 / (8. - 5. * YHe) mu = 4.0 / (8.0 - 5.0 * YHe)
else: else:
mu = 4.0 / (1. + 3. * (1. - YHe)) mu = 4.0 / (1.0 + 3.0 * (1.0 - YHe))
print("mu", mu) print("mu", mu)
ceint_ = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv ceint_ = 1.3806e-16 / 1.6726e-24 * Tini * npol / mu / unitv / unitv
print("ceint", ceint_) print("ceint", ceint_)
@ -50,40 +52,45 @@ def fix_initial_conditions():
zstart = f["Header"].attrs["Redshift"] zstart = f["Header"].attrs["Redshift"]
boxsize = f["Header"].attrs["BoxSize"] boxsize = f["Header"].attrs["BoxSize"]
levelmax = f["Header"].attrs["Music_levelmax"] levelmax = f["Header"].attrs["Music_levelmax"]
internal_energy = calculate_gas_internal_energy(omegab=omegab, hubble_param_=h, zstart_=zstart) internal_energy = calculate_gas_internal_energy(
smoothing_length = calculate_smoothing_length(boxsize=boxsize, hubble_param_=h, levelmax=levelmax) omegab=omegab, hubble_param_=h, zstart_=zstart
)
smoothing_length = calculate_smoothing_length(
boxsize=boxsize, hubble_param_=h, levelmax=levelmax
)
# exit() # exit()
bary_mass = f["Header"].attrs["MassTable"][0] bary_mass = f["Header"].attrs["MassTable"][0]
bary_count = f["Header"].attrs["NumPart_Total"][0] bary_count = f["Header"].attrs["NumPart_Total"][0]
print("mass table", f["Header"].attrs["MassTable"]) print("mass table", f["Header"].attrs["MassTable"])
pt1 = f["PartType0"] pt1 = f["PartType0"]
masses_column = pt1.create_dataset( masses_column = pt1.create_dataset(
"Masses", "Masses", data=np.full(bary_count, bary_mass), compression="gzip"
data=np.full(bary_count, bary_mass),
compression='gzip'
) )
smoothing_length_column = pt1.create_dataset( smoothing_length_column = pt1.create_dataset(
"SmoothingLength", "SmoothingLength",
data=np.full(bary_count, smoothing_length), data=np.full(bary_count, smoothing_length),
compression='gzip' compression="gzip",
) )
internal_energy_column = pt1.create_dataset( internal_energy_column = pt1.create_dataset(
"InternalEnergy", "InternalEnergy",
data=np.full(bary_count, internal_energy), data=np.full(bary_count, internal_energy),
compression='gzip' compression="gzip",
) )
hydro_gamma_minus_one = gamma - 1 hydro_gamma_minus_one = gamma - 1
const_primordial_He_fraction_cgs = 0.248 const_primordial_He_fraction_cgs = 0.248
hydrogen_mass_function = 1 - const_primordial_He_fraction_cgs hydrogen_mass_function = 1 - const_primordial_He_fraction_cgs
mu_neutral = 4. / (1. + 3. * hydrogen_mass_function) mu_neutral = 4.0 / (1.0 + 3.0 * hydrogen_mass_function)
mu_ionised = 4. / (8. - 5. * (1. - hydrogen_mass_function)) mu_ionised = 4.0 / (8.0 - 5.0 * (1.0 - hydrogen_mass_function))
T_transition = 1.e4 T_transition = 1.0e4
@njit @njit
def calculate_T(u): def calculate_T(u):
T_over_mu = hydro_gamma_minus_one * u * const_proton_mass_cgs / const_boltzmann_k_cgs T_over_mu = (
hydro_gamma_minus_one * u * const_proton_mass_cgs / const_boltzmann_k_cgs
)
if T_over_mu > (T_transition + 1) / mu_ionised: if T_over_mu > (T_transition + 1) / mu_ionised:
return T_over_mu / mu_ionised return T_over_mu / mu_ionised
elif T_over_mu < (T_transition - 1) / mu_neutral: elif T_over_mu < (T_transition - 1) / mu_neutral:
@ -109,6 +116,6 @@ def add_temperature_column():
) )
if __name__ == '__main__': if __name__ == "__main__":
# fix_initial_conditions() # fix_initial_conditions()
add_temperature_column() add_temperature_column()

View file

@ -5,11 +5,14 @@ print("digraph G {")
with open(argv[1]) as f: with open(argv[1]) as f:
next(f) next(f)
for line in f: for line in f:
if line.startswith("#"): continue if line.startswith("#"):
continue
cols = line.split() cols = line.split()
if len(cols) < 5: continue if len(cols) < 5:
continue
progenitor = int(cols[1]) progenitor = int(cols[1])
descendant = int(cols[3]) descendant = int(cols[3])
if descendant==-1: continue if descendant == -1:
continue
print(f" {progenitor} -> {descendant};") print(f" {progenitor} -> {descendant};")
print("}") print("}")

View file

@ -2,6 +2,7 @@ from math import log
from pathlib import Path from pathlib import Path
import numpy as np import numpy as np
# from colossus.cosmology import cosmology # from colossus.cosmology import cosmology
# from colossus.lss import mass_function # from colossus.lss import mass_function
from matplotlib import pyplot as plt from matplotlib import pyplot as plt
@ -14,7 +15,7 @@ from utils import print_progress, figsize_from_page_fraction
def counts_without_inf(number_halos): def counts_without_inf(number_halos):
with np.errstate(divide='ignore', invalid='ignore'): with np.errstate(divide="ignore", invalid="ignore"):
number_halos_inverse = 1 / np.sqrt(number_halos) number_halos_inverse = 1 / np.sqrt(number_halos)
number_halos_inverse[np.abs(number_halos_inverse) == np.inf] = 0 number_halos_inverse[np.abs(number_halos_inverse) == np.inf] = 0
@ -41,20 +42,37 @@ def monofonic_tests():
# halos.to_csv("weird_halos.csv") # halos.to_csv("weird_halos.csv")
halo_masses: np.ndarray = halos["Mvir"].to_numpy() halo_masses: np.ndarray = halos["Mvir"].to_numpy()
Ns, deltas, left_edges, number_densities, lower_error_limit, upper_error_limit = halo_mass_function( (
halo_masses) Ns,
deltas,
left_edges,
number_densities,
lower_error_limit,
upper_error_limit,
) = halo_mass_function(halo_masses)
ax.set_xscale("log") ax.set_xscale("log")
ax.set_yscale("log") ax.set_yscale("log")
# ax.bar(centers, number_densities, width=widths, log=True, fill=False) # ax.bar(centers, number_densities, width=widths, log=True, fill=False)
name = f"{waveform} {resolution}" name = f"{waveform} {resolution}"
ax.step(left_edges, number_densities, where="post", color=f"C{i}", linestyle=linestyles[j], label=name) ax.step(
left_edges,
number_densities,
where="post",
color=f"C{i}",
linestyle=linestyles[j],
label=name,
)
ax.fill_between( ax.fill_between(
left_edges, left_edges,
lower_error_limit, lower_error_limit,
upper_error_limit, alpha=.5, linewidth=0, step='post') upper_error_limit,
alpha=0.5,
linewidth=0,
step="post",
)
# break # break
# break # break
@ -73,7 +91,7 @@ def halo_mass_function(halo_masses, num_bins=30, sim_volume=100 ** 3):
Ns = [] Ns = []
deltas = [] deltas = []
for bin_id in range(num_bins): for bin_id in range(num_bins):
print_progress(bin_id+1, num_bins) print_progress(bin_id + 1, num_bins)
mass_low = bins[bin_id] mass_low = bins[bin_id]
mass_high = bins[bin_id + 1] mass_high = bins[bin_id + 1]
counter = 0 counter = 0
@ -102,7 +120,14 @@ def halo_mass_function(halo_masses, num_bins=30, sim_volume=100 ** 3):
lower_error_limit = number_densities - counts_without_inf(Ns) / sim_volume / deltas lower_error_limit = number_densities - counts_without_inf(Ns) / sim_volume / deltas
upper_error_limit = number_densities + counts_without_inf(Ns) / sim_volume / deltas upper_error_limit = number_densities + counts_without_inf(Ns) / sim_volume / deltas
return Ns, deltas, left_edges, number_densities, lower_error_limit, upper_error_limit return (
Ns,
deltas,
left_edges,
number_densities,
lower_error_limit,
upper_error_limit,
)
def hmf_from_rockstar_tree(file: Path): def hmf_from_rockstar_tree(file: Path):
@ -118,11 +143,14 @@ def hmf_from_rockstar_tree(file: Path):
# agora_box_h = 0.702 # agora_box_h = 0.702
# masses /= agora_box_h # masses /= agora_box_h
box_size = 85.47 box_size = 85.47
Ns, deltas, left_edges, number_densities, lower_error_limit, upper_error_limit = halo_mass_function( (
masses, Ns,
num_bins=50, deltas,
sim_volume=box_size ** 3 left_edges,
) number_densities,
lower_error_limit,
upper_error_limit,
) = halo_mass_function(masses, num_bins=50, sim_volume=box_size ** 3)
fig: Figure = plt.figure() fig: Figure = plt.figure()
ax: Axes = fig.gca() ax: Axes = fig.gca()
@ -131,29 +159,35 @@ def hmf_from_rockstar_tree(file: Path):
ax.set_xlabel("Halo Mass [$M_\\odot$]") ax.set_xlabel("Halo Mass [$M_\\odot$]")
ax.set_ylabel("Number Density [$\\textrm{\\#}/Mpc^3/dlogM$]") ax.set_ylabel("Number Density [$\\textrm{\\#}/Mpc^3/dlogM$]")
ax.step(left_edges, number_densities, where="post") ax.step(left_edges, number_densities, where="post")
plank_cosmo = cosmology.cosmologies['planck18'] plank_cosmo = cosmology.cosmologies["planck18"]
auriga_cosmo = { auriga_cosmo = {
"sigma8": 0.807, "sigma8": 0.807,
"H0": 70.2, "H0": 70.2,
"Om0": 0.272, "Om0": 0.272,
"Ob0": 0.0455, "Ob0": 0.0455,
"ns": 0.961 "ns": 0.961,
} }
cosmology.addCosmology('aurigaCosmo', params={**plank_cosmo, **auriga_cosmo}) cosmology.addCosmology("aurigaCosmo", params={**plank_cosmo, **auriga_cosmo})
cosmology.setCosmology('aurigaCosmo') cosmology.setCosmology("aurigaCosmo")
print(cosmology.getCurrent()) print(cosmology.getCurrent())
mfunc = mass_function.massFunction(left_edges, 1, mdef='vir', model='tinker08', q_out='dndlnM') mfunc = mass_function.massFunction(
left_edges, 1, mdef="vir", model="tinker08", q_out="dndlnM"
)
ax.plot(left_edges, mfunc) ax.plot(left_edges, mfunc)
ax.fill_between( ax.fill_between(
left_edges, left_edges,
lower_error_limit, lower_error_limit,
upper_error_limit, alpha=.5, linewidth=0, step='post') upper_error_limit,
alpha=0.5,
linewidth=0,
step="post",
)
plt.show() plt.show()
if __name__ == '__main__': if __name__ == "__main__":
monofonic_tests() monofonic_tests()
# hmf_from_rockstar_tree(Path(argv[1])) # hmf_from_rockstar_tree(Path(argv[1]))

View file

@ -15,8 +15,15 @@ def V(r):
return 4 / 3 * np.pi * r ** 3 return 4 / 3 * np.pi * r ** 3
def halo_mass_profile(particles: pd.DataFrame, center: np.ndarray, def halo_mass_profile(
particles_meta: ParticlesMeta, vmin: float, vmax: float, plot=False, num_bins=30): particles: pd.DataFrame,
center: np.ndarray,
particles_meta: ParticlesMeta,
vmin: float,
vmax: float,
plot=False,
num_bins=30,
):
center = find_center(particles, center) center = find_center(particles, center)
positions = particles[["X", "Y", "Z"]].to_numpy() positions = particles[["X", "Y", "Z"]].to_numpy()
distances = np.linalg.norm(positions - center, axis=1) distances = np.linalg.norm(positions - center, axis=1)
@ -50,7 +57,7 @@ def halo_mass_profile(particles: pd.DataFrame, center: np.ndarray,
ax.loglog(log_radial_bins[:-1], bin_masses, label="counts") ax.loglog(log_radial_bins[:-1], bin_masses, label="counts")
ax2.loglog(log_radial_bins[:-1], bin_densities, label="densities", c="C1") ax2.loglog(log_radial_bins[:-1], bin_densities, label="densities", c="C1")
# ax.set_xlabel(r'R / R$_\mathrm{group}$') # ax.set_xlabel(r'R / R$_\mathrm{group}$')
ax.set_ylabel(r'M [$10^{10} \mathrm{M}_\odot$]') ax.set_ylabel(r"M [$10^{10} \mathrm{M}_\odot$]")
ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]") ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]")
plt.legend() plt.legend()
plt.show() plt.show()
@ -58,7 +65,7 @@ def halo_mass_profile(particles: pd.DataFrame, center: np.ndarray,
return log_radial_bins, bin_masses, bin_densities, center return log_radial_bins, bin_masses, bin_densities, center
if __name__ == '__main__': if __name__ == "__main__":
input_file = Path(sys.argv[1]) input_file = Path(sys.argv[1])
df, particles_meta = read_file(input_file) df, particles_meta = read_file(input_file)
df_halos = read_halo_file(input_file.with_name("fof_" + input_file.name)) df_halos = read_halo_file(input_file.with_name("fof_" + input_file.name))

View file

@ -26,11 +26,9 @@ def in_area(coords: Coords, xobj, yobj, zobj, factor=1.3) -> bool:
radius, xcenter, ycenter, zcenter = coords radius, xcenter, ycenter, zcenter = coords
radius *= factor radius *= factor
return ( return (
(xcenter - radius < xobj < xcenter + radius) (xcenter - radius < xobj < xcenter + radius)
and and (ycenter - radius < yobj < ycenter + radius)
(ycenter - radius < yobj < ycenter + radius) and (zcenter - radius < zobj < zcenter + radius)
and
(zcenter - radius < zobj < zcenter + radius)
) )
@ -40,8 +38,12 @@ def main():
initial_halo_id = int(argv[1]) initial_halo_id = int(argv[1])
if has_1024_simulations: if has_1024_simulations:
resolutions.append(1024) resolutions.append(1024)
fig: Figure = plt.figure(figsize=figsize_from_page_fraction(columns=2, height_to_width=1)) fig: Figure = plt.figure(
axes: List[List[Axes]] = fig.subplots(len(waveforms), len(resolutions), sharex="row", sharey="row") figsize=figsize_from_page_fraction(columns=2, height_to_width=1)
)
axes: List[List[Axes]] = fig.subplots(
len(waveforms), len(resolutions), sharex="row", sharey="row"
)
with h5py.File(vis_datafile) as vis_out: with h5py.File(vis_datafile) as vis_out:
halo_group = vis_out[str(initial_halo_id)] halo_group = vis_out[str(initial_halo_id)]
@ -62,8 +64,13 @@ def main():
vmax_scaled = (vmax + offset) * mass vmax_scaled = (vmax + offset) * mass
rho = (rho + offset) * mass rho = (rho + offset) * mass
extent = coord_to_2d_extent(coords) extent = coord_to_2d_extent(coords)
img = ax.imshow(rho.T, norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled), extent=extent, img = ax.imshow(
origin="lower",cmap="Greys") rho.T,
norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled),
extent=extent,
origin="lower",
cmap="Greys",
)
found_main_halo = False found_main_halo = False
for halo_id, halo in halos.iterrows(): for halo_id, halo in halos.iterrows():
if halo["Vmax"] > 75: if halo["Vmax"] > 75:
@ -79,8 +86,12 @@ def main():
print("plotting main halo") print("plotting main halo")
circle = Circle( circle = Circle(
(halo.X, halo.Y), (halo.X, halo.Y),
halo["Rvir"], zorder=10, halo["Rvir"],
linewidth=1, edgecolor=color, fill=None, alpha=.2 zorder=10,
linewidth=1,
edgecolor=color,
fill=None,
alpha=0.2,
) )
ax.add_artist(circle) ax.add_artist(circle)
# assert found_main_halo # assert found_main_halo
@ -92,9 +103,11 @@ def main():
if j == 0: if j == 0:
scalebar = AnchoredSizeBar( scalebar = AnchoredSizeBar(
ax.transData, ax.transData,
1, '1 Mpc', 'lower left', 1,
"1 Mpc",
"lower left",
# pad=0.1, # pad=0.1,
color='white', color="white",
frameon=False, frameon=False,
# size_vertical=1 # size_vertical=1
) )
@ -116,5 +129,5 @@ def main():
plt.show() plt.show()
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View file

@ -20,7 +20,9 @@ Coords = Tuple[float, float, float, float] # radius, X, Y, Z
def load_halo_data(waveform: str, resolution: int, halo_id: int, coords: Coords): def load_halo_data(waveform: str, resolution: int, halo_id: int, coords: Coords):
dir = base_dir / f"{waveform}_{resolution}_100" dir = base_dir / f"{waveform}_{resolution}_100"
df, meta = read_file(dir / "output_0004.hdf5") df, meta = read_file(dir / "output_0004.hdf5")
df_halo, halo_lookup, unbound = read_velo_halo_particles(dir, skip_halo_particle_ids=all_in_area) df_halo, halo_lookup, unbound = read_velo_halo_particles(
dir, skip_halo_particle_ids=all_in_area
)
halo = df_halo.loc[halo_id] halo = df_halo.loc[halo_id]
if coords: if coords:
@ -43,12 +45,29 @@ def load_halo_data(waveform: str, resolution: int, halo_id: int, coords: Coords)
return halo, halo_particles, meta, coords return halo, halo_particles, meta, coords
def get_comp_id(ref_waveform: str, reference_resolution: int, comp_waveform: str, comp_resolution: int): def get_comp_id(
ref_waveform: str,
reference_resolution: int,
comp_waveform: str,
comp_resolution: int,
):
return f"{ref_waveform}_{reference_resolution}_100_{comp_waveform}_{comp_resolution}_100_velo.csv" return f"{ref_waveform}_{reference_resolution}_100_{comp_waveform}_{comp_resolution}_100_velo.csv"
def map_halo_id(halo_id: int, ref_waveform: str, reference_resolution: int, comp_waveform: str, comp_resolution: int): def map_halo_id(
file = base_dir / "comparisons" / get_comp_id(ref_waveform, reference_resolution, comp_waveform, comp_resolution) halo_id: int,
ref_waveform: str,
reference_resolution: int,
comp_waveform: str,
comp_resolution: int,
):
file = (
base_dir
/ "comparisons"
/ get_comp_id(
ref_waveform, reference_resolution, comp_waveform, comp_resolution
)
)
print("opening", file) print("opening", file)
df = pd.read_csv(file) df = pd.read_csv(file)
mapping = {} mapping = {}
@ -93,9 +112,16 @@ def main():
halo_id = initial_halo_id halo_id = initial_halo_id
first_halo = False first_halo = False
else: else:
halo_id = map_halo_id(initial_halo_id, ref_waveform, ref_resolution, waveform, resolution) halo_id = map_halo_id(
halo, halo_particles, meta, image_coords = load_halo_data(waveform, resolution, halo_id, initial_halo_id,
coords[waveform]) ref_waveform,
ref_resolution,
waveform,
resolution,
)
halo, halo_particles, meta, image_coords = load_halo_data(
waveform, resolution, halo_id, coords[waveform]
)
if not coords[waveform]: if not coords[waveform]:
coords[waveform] = image_coords coords[waveform] = image_coords
print(coords[waveform]) print(coords[waveform])
@ -104,19 +130,30 @@ def main():
# sleep(100) # sleep(100)
radius, X, Y, Z = coords[waveform] radius, X, Y, Z = coords[waveform]
rho, _ = cic_from_radius( rho, _ = cic_from_radius(
halo_particles.X.to_numpy(), halo_particles.Y.to_numpy(), halo_particles.X.to_numpy(),
1000, X, Y, radius, periodic=False) halo_particles.Y.to_numpy(),
1000,
X,
Y,
radius,
periodic=False,
)
rhos[(waveform, resolution)] = rho rhos[(waveform, resolution)] = rho
vmin = min(rho.min(), vmin) vmin = min(rho.min(), vmin)
vmax = max(rho.max(), vmax) vmax = max(rho.max(), vmax)
dataset_group = halo_group.create_group(f"{waveform}_{resolution}") dataset_group = halo_group.create_group(f"{waveform}_{resolution}")
dataset_group.create_dataset("rho", data=rho, compression='gzip', compression_opts=5) dataset_group.create_dataset(
"rho", data=rho, compression="gzip", compression_opts=5
)
dataset_group.create_dataset("coords", data=coords[waveform]) dataset_group.create_dataset("coords", data=coords[waveform])
dataset_group.create_dataset("mass", data=meta.particle_mass) dataset_group.create_dataset("mass", data=meta.particle_mass)
dataset_group.create_dataset("halo_id", data=halo_id) dataset_group.create_dataset("halo_id", data=halo_id)
imsave(rho, f"out_halo{initial_halo_id}_{waveform}_{resolution}_{halo_id}.png") imsave(
rho,
f"out_halo{initial_halo_id}_{waveform}_{resolution}_{halo_id}.png",
)
halo_group.create_dataset("vmin_vmax", data=[vmin, vmax]) halo_group.create_dataset("vmin_vmax", data=[vmin, vmax])
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View file

@ -44,10 +44,12 @@ def main():
else: else:
all_data = np.vstack(column_data[column]) all_data = np.vstack(column_data[column])
out_column = outpart.create_dataset(column, data=all_data, compression='gzip' if column == "Masses" else None) out_column = outpart.create_dataset(
column, data=all_data, compression="gzip" if column == "Masses" else None
)
print(len(out_column)) print(len(out_column))
f_out.close() f_out.close()
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View file

@ -14,7 +14,9 @@ comparisons_dir = base_dir / "comparisons"
def read(mode, ref_res, comp_res): def read(mode, ref_res, comp_res):
df = pd.read_csv(comparisons_dir / get_comp_id(waveform, ref_res, waveform, comp_res)) df = pd.read_csv(
comparisons_dir / get_comp_id(waveform, ref_res, waveform, comp_res)
)
# df = pd.read_csv(f"{mode}_{ref_res}_100_{mode}_{comp_res}_100.csv") # df = pd.read_csv(f"{mode}_{ref_res}_100_{mode}_{comp_res}_100.csv")
print(min(df.ref_Mvir), max(df.ref_Mvir)) print(min(df.ref_Mvir), max(df.ref_Mvir))
@ -23,7 +25,9 @@ def read(mode, ref_res, comp_res):
for i in range(num_bins): for i in range(num_bins):
values = np.where(digits == i + 1) values = np.where(digits == i + 1)
in_bin = df.iloc[values] in_bin = df.iloc[values]
matches = np.array(in_bin.match) # TODO: or instead fraction of halos that are matching? matches = np.array(
in_bin.match
) # TODO: or instead fraction of halos that are matching?
bin_means.append(matches.mean()) bin_means.append(matches.mean())
return bin_means return bin_means
@ -48,7 +52,9 @@ ax.set_yticklabels(["{:.2f}".format(a) for a in bins])
for x in range(data.shape[0]): for x in range(data.shape[0]):
for y in range(data.shape[1]): for y in range(data.shape[1]):
text = ax.text(y, x, "{:.2f}".format(data[x, y]), ha="center", va="center", color="w") text = ax.text(
y, x, "{:.2f}".format(data[x, y]), ha="center", va="center", color="w"
)
# print(data) # print(data)
p = ax.imshow(data, origin="lower", vmin=0.5, vmax=1) p = ax.imshow(data, origin="lower", vmin=0.5, vmax=1)

10
nfw.py
View file

@ -9,8 +9,12 @@ def nfw(r, rho_0, r_s):
def fit_nfw(radius, densities): def fit_nfw(radius, densities):
popt, pcov = curve_fit( popt, pcov = curve_fit(
nfw, radius, densities, nfw,
verbose=1, method="trf", max_nfev=1000, radius,
bounds=([0, 0], [inf, 1]) densities,
verbose=1,
method="trf",
max_nfev=1000,
bounds=([0, 0], [inf, 1]),
) )
return popt return popt

View file

@ -26,5 +26,5 @@ for i, profile in enumerate(density_profiles):
sleep(1) sleep(1)
color = "red" if is_odd_halo else "lightgray" color = "red" if is_odd_halo else "lightgray"
plt.loglog(bin_edges, profile, color=color, alpha=.1) plt.loglog(bin_edges, profile, color=color, alpha=0.1)
plt.show() plt.show()

View file

@ -28,7 +28,9 @@ def all_children(df: DataFrame, id: int):
yield from all_children(df, sh) yield from all_children(df, sh)
def particles_in_halo(offsets: Dict[int, int], particle_ids: np.ndarray) -> HaloParticleMapping: def particles_in_halo(
offsets: Dict[int, int], particle_ids: np.ndarray
) -> HaloParticleMapping:
""" """
get mapping from halo ID to particle ID set by using the offset and a lookup in the particle array get mapping from halo ID to particle ID set by using the offset and a lookup in the particle array
""" """
@ -64,7 +66,7 @@ def cached_particles_in_halo(file: Path, *args, **kwargs) -> HaloParticleMapping
Unfortunatly this is magnitudes slower than doing the calculation itself as HDF5 is not Unfortunatly this is magnitudes slower than doing the calculation itself as HDF5 is not
intended for 100K small datasets making this whole function pointless. intended for 100K small datasets making this whole function pointless.
""" """
if file.exists(): if file.exists():
print("loading from cache") print("loading from cache")
@ -80,7 +82,12 @@ def cached_particles_in_halo(file: Path, *args, **kwargs) -> HaloParticleMapping
print("saving to cache") print("saving to cache")
with h5py.File(file, "w") as data_file: with h5py.File(file, "w") as data_file:
for key, valueset in halo_particle_ids.items(): for key, valueset in halo_particle_ids.items():
data_file.create_dataset(str(key), data=list(valueset), compression='gzip', compression_opts=5) data_file.create_dataset(
str(key),
data=list(valueset),
compression="gzip",
compression_opts=5,
)
return halo_particle_ids return halo_particle_ids
@ -116,13 +123,15 @@ def read_velo_halos(directory: Path, veloname="vroutput"):
"offset_unbound": group_catalog["Offset_unbound"], "offset_unbound": group_catalog["Offset_unbound"],
"parent_halo_id": group_catalog["Parent_halo_ID"], "parent_halo_id": group_catalog["Parent_halo_ID"],
} }
df = pd.DataFrame({**data, **scalar_properties}) # create dataframe from two merged dicts df = pd.DataFrame(
{**data, **scalar_properties}
) # create dataframe from two merged dicts
df.index += 1 # Halo IDs start at 1 df.index += 1 # Halo IDs start at 1
return df return df
def read_velo_halo_particles( def read_velo_halo_particles(
directory: Path, skip_halo_particle_ids=False, skip_unbound=True directory: Path, skip_halo_particle_ids=False, skip_unbound=True
) -> Tuple[DataFrame, Optional[HaloParticleMapping], Optional[HaloParticleMapping]]: ) -> Tuple[DataFrame, Optional[HaloParticleMapping], Optional[HaloParticleMapping]]:
""" """
This reads the output files of VELOCIraptor This reads the output files of VELOCIraptor
@ -153,7 +162,9 @@ def read_velo_halo_particles(
else: else:
print("look up unbound particle IDs") print("look up unbound particle IDs")
halo_unbound_offsets = dict(df["offset_unbound"]) halo_unbound_offsets = dict(df["offset_unbound"])
halo_particle_unbound_ids = particles_in_halo(halo_unbound_offsets, particle_ids_unbound) halo_particle_unbound_ids = particles_in_halo(
halo_unbound_offsets, particle_ids_unbound
)
return df, halo_particle_ids, halo_particle_unbound_ids return df, halo_particle_ids, halo_particle_unbound_ids
@ -179,7 +190,9 @@ def main():
Nres = 512 Nres = 512
directory = base_dir / f"{waveform}_{Nres}_100" directory = base_dir / f"{waveform}_{Nres}_100"
df_halo, halo_particle_ids, halo_particle_unbound_ids = read_velo_halo_particles(directory) df_halo, halo_particle_ids, halo_particle_unbound_ids = read_velo_halo_particles(
directory
)
particles, meta = read_file(directory) particles, meta = read_file(directory)
HALO = 1000 HALO = 1000
while True: while True:
@ -196,5 +209,5 @@ def main():
HALO += 1 HALO += 1
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View file

@ -24,12 +24,18 @@ def read_file(file: Path) -> Tuple[pd.DataFrame, ParticlesMeta]:
masses = reference_file["PartType1"]["Masses"] masses = reference_file["PartType1"]["Masses"]
if not np.all(masses == masses[0]): if not np.all(masses == masses[0]):
raise ValueError("only equal mass particles are supported for now") raise ValueError("only equal mass particles are supported for now")
df = pd.DataFrame(reference_file["PartType1"]["Coordinates"], columns=["X", "Y", "Z"]) df = pd.DataFrame(
reference_file["PartType1"]["Coordinates"], columns=["X", "Y", "Z"]
)
if has_fof: if has_fof:
df2 = pd.DataFrame(reference_file["PartType1"]["FOFGroupIDs"], columns=["FOFGroupIDs"]).astype("category") df2 = pd.DataFrame(
reference_file["PartType1"]["FOFGroupIDs"], columns=["FOFGroupIDs"]
).astype("category")
df = df.merge(df2, "outer", left_index=True, right_index=True) df = df.merge(df2, "outer", left_index=True, right_index=True)
del df2 del df2
df3 = pd.DataFrame(reference_file["PartType1"]["ParticleIDs"], columns=["ParticleIDs"]) df3 = pd.DataFrame(
reference_file["PartType1"]["ParticleIDs"], columns=["ParticleIDs"]
)
df = df.merge(df3, "outer", left_index=True, right_index=True) df = df.merge(df3, "outer", left_index=True, right_index=True)
del df3 del df3
@ -37,9 +43,7 @@ def read_file(file: Path) -> Tuple[pd.DataFrame, ParticlesMeta]:
if has_fof: if has_fof:
print("sorting") print("sorting")
df.sort_values("FOFGroupIDs", inplace=True) df.sort_values("FOFGroupIDs", inplace=True)
meta = ParticlesMeta( meta = ParticlesMeta(particle_mass=masses[0])
particle_mass=masses[0]
)
print("saving cache") print("saving cache")
with meta_cache_file.open("wb") as f: with meta_cache_file.open("wb") as f:
pickle.dump(meta, f) pickle.dump(meta, f)

View file

@ -25,7 +25,9 @@ class IDScaler:
mult = orig * self.N mult = orig * self.N
for shift in self.shifts: for shift in self.shifts:
variant = mult + shift variant = mult + shift
yield ((variant[0] * self.Nres_max) + variant[1]) * self.Nres_max + variant[2] yield ((variant[0] * self.Nres_max) + variant[1]) * self.Nres_max + variant[
2
]
def downscale(self, particle_ID: int): def downscale(self, particle_ID: int):
orig = self.original_position(self.Nres_max, particle_ID) orig = self.original_position(self.Nres_max, particle_ID)
@ -40,7 +42,9 @@ def test():
Nres_1 = 128 Nres_1 = 128
Nres_2 = 256 Nres_2 = 256
test_particle_id = ((test_particle[0] * Nres_1) + test_particle[1]) * Nres_1 + test_particle[2] test_particle_id = (
(test_particle[0] * Nres_1) + test_particle[1]
) * Nres_1 + test_particle[2]
print(test_particle_id) print(test_particle_id)
scaler = IDScaler(Nres_1, Nres_2) scaler = IDScaler(Nres_1, Nres_2)
@ -61,7 +65,9 @@ def benchmark():
test_particle = np.random.randint(0, 127, size=(3, 10_000_000)) test_particle = np.random.randint(0, 127, size=(3, 10_000_000))
for part in test_particle: for part in test_particle:
test_particle_id = ((test_particle[0] * Nres_1) + test_particle[1]) * Nres_1 + test_particle[2] test_particle_id = (
(test_particle[0] * Nres_1) + test_particle[1]
) * Nres_1 + test_particle[2]
particle_ID_1_converted = scaler.upscale(test_particle_id) particle_ID_1_converted = scaler.upscale(test_particle_id)

View file

@ -10,7 +10,9 @@ from scipy.interpolate import griddata
from utils import create_figure from utils import create_figure
def create_2d_slice(input_file: Path, center: List[float], property: str, axis="Z", thickness=3): def create_2d_slice(
input_file: Path, center: List[float], property: str, axis="Z", thickness=3
):
axis_names = ["X", "Y", "Z"] axis_names = ["X", "Y", "Z"]
cut_axis = axis_names.index(axis) cut_axis = axis_names.index(axis)
with h5py.File(input_file) as f: with h5py.File(input_file) as f:
@ -24,11 +26,7 @@ def create_2d_slice(input_file: Path, center: List[float], property: str, axis="
# coords_in_slice = coords[in_slice] # coords_in_slice = coords[in_slice]
# data_in_slice = data[in_slice] # data_in_slice = data[in_slice]
print("stats") print("stats")
other_axis = { other_axis = {"X": ("Y", "Z"), "Y": ("X", "Z"), "Z": ("X", "Y")}
"X": ("Y", "Z"),
"Y": ("X", "Z"),
"Z": ("X", "Y")
}
x_axis_label, y_axis_label = other_axis[axis] x_axis_label, y_axis_label = other_axis[axis]
x_axis = axis_names.index(x_axis_label) x_axis = axis_names.index(x_axis_label)
y_axis = axis_names.index(y_axis_label) y_axis = axis_names.index(y_axis_label)
@ -36,12 +34,7 @@ def create_2d_slice(input_file: Path, center: List[float], property: str, axis="
yrange = np.linspace(coords[::, y_axis].min(), coords[::, y_axis].max(), 1000) yrange = np.linspace(coords[::, y_axis].min(), coords[::, y_axis].max(), 1000)
gx, gy, gz = np.meshgrid(xrange, yrange, center[cut_axis]) gx, gy, gz = np.meshgrid(xrange, yrange, center[cut_axis])
print("interpolating") print("interpolating")
grid = griddata( grid = griddata(coords, data, (gx, gy, gz), method="linear")[::, ::, 0]
coords,
data,
(gx, gy, gz),
method="linear"
)[::, ::, 0]
print(grid.shape) print(grid.shape)
# stats, x_edge, y_edge, _ = binned_statistic_2d( # stats, x_edge, y_edge, _ = binned_statistic_2d(
# coords_in_slice[::, x_axis], # coords_in_slice[::, x_axis],
@ -56,8 +49,8 @@ def create_2d_slice(input_file: Path, center: List[float], property: str, axis="
img = ax.imshow( img = ax.imshow(
grid.T, grid.T,
norm=LogNorm(), norm=LogNorm(),
interpolation='nearest', interpolation="nearest",
extent=[xrange[0], xrange[-1], yrange[0], yrange[-1]] extent=[xrange[0], xrange[-1], yrange[0], yrange[-1]],
) )
ax.set_title(input_file.parent.stem) ax.set_title(input_file.parent.stem)
ax.set_xlabel(x_axis_label) ax.set_xlabel(x_axis_label)

View file

@ -11,84 +11,108 @@ from paths import base_dir, spectra_dir
from spectra_plot import waveforms from spectra_plot import waveforms
def run_spectra(waveform: str, resolution_1: int, resolution_2: int, Lbox: int, time: str): def run_spectra(
waveform: str, resolution_1: int, resolution_2: int, Lbox: int, time: str
):
print("starting") print("starting")
setup_1 = f'{waveform}_{resolution_1}_{Lbox}' setup_1 = f"{waveform}_{resolution_1}_{Lbox}"
setup_2 = f'{waveform}_{resolution_2}_{Lbox}' setup_2 = f"{waveform}_{resolution_2}_{Lbox}"
# #For ICs: time == 'ics' # #For ICs: time == 'ics'
if time == 'ics': if time == "ics":
output_file = base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}_cross_spectrum.txt' output_file = (
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}_cross_spectrum.txt"
)
if output_file.exists(): if output_file.exists():
print(f'{output_file} already exists, skipping.') print(f"{output_file} already exists, skipping.")
return return
subprocess.run([ subprocess.run(
str(spectra), [
'--ngrid', str(spectra),
'2048', "--ngrid",
'--format=4', # This seems to work, but is not as readable "2048",
'--output', "--format=4", # This seems to work, but is not as readable
str(base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}'), "--output",
'--input', str(
str(base_dir / f'{setup_1}/ics_{setup_1}.hdf5'), base_dir
'--input', / f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_ics_{resolution_1}_{resolution_2}"
str(base_dir / f'{setup_2}/ics_{setup_2}.hdf5') ),
], check=True) "--input",
str(base_dir / f"{setup_1}/ics_{setup_1}.hdf5"),
"--input",
str(base_dir / f"{setup_2}/ics_{setup_2}.hdf5"),
],
check=True,
)
# #For evaluation of results at redshift z=1: time == 'z=1' | NOT ADAPTED FOR VSC5 YET! # #For evaluation of results at redshift z=1: time == 'z=1' | NOT ADAPTED FOR VSC5 YET!
elif time == 'z=1': elif time == "z=1":
output_file = base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}_cross_spectrum.txt' output_file = (
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}_cross_spectrum.txt"
)
if output_file.exists(): if output_file.exists():
print(f'{output_file} already exists, skipping.') print(f"{output_file} already exists, skipping.")
return return
subprocess.run([ subprocess.run(
str(spectra), [
'--ngrid', str(spectra),
'1024', "--ngrid",
'--format=3', "1024",
'--output', "--format=3",
str(base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}'), "--output",
'--input', str(
str(base_dir / f'{setup_1}/output_0002.hdf5'), base_dir
'--input', / f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a2_{resolution_1}_{resolution_2}"
str(base_dir / f'{setup_2}/output_0002.hdf5') ),
], check=True) "--input",
str(base_dir / f"{setup_1}/output_0002.hdf5"),
"--input",
str(base_dir / f"{setup_2}/output_0002.hdf5"),
],
check=True,
)
# #For evaluation of final results: time == 'end' # #For evaluation of final results: time == 'end'
elif time == 'end': elif time == "end":
output_file = base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}_cross_spectrum.txt' output_file = (
base_dir
/ f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}_cross_spectrum.txt"
)
if output_file.exists(): if output_file.exists():
print(f'{output_file} already exists, skipping.') print(f"{output_file} already exists, skipping.")
return return
subprocess.run([ subprocess.run(
str(spectra), [
'--ngrid', str(spectra),
'2048', "--ngrid",
'--format=3', "2048",
'--output', "--format=3",
str(base_dir / f'spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}'), "--output",
'--input', str(
str(base_dir / f'{setup_1}/output_0004.hdf5'), base_dir
'--input', / f"spectra/{waveform}_{Lbox}/{waveform}_{Lbox}_a4_{resolution_1}_{resolution_2}"
str(base_dir / f'{setup_2}/output_0004.hdf5') ),
], check=True) "--input",
str(base_dir / f"{setup_1}/output_0004.hdf5"),
"--input",
str(base_dir / f"{setup_2}/output_0004.hdf5"),
],
check=True,
)
else: else:
raise ValueError(f"invalid time ({time})") raise ValueError(f"invalid time ({time})")
print("end") print("end")
def power_run(resolutions: list, Lbox: int, time: str): def power_run(resolutions: list, Lbox: int, time: str):
args = [] args = []
for waveform in waveforms: for waveform in waveforms:
for resolution in resolutions: for resolution in resolutions:
args.append(( args.append((waveform, resolution, resolution, Lbox, time))
waveform,
resolution,
resolution,
Lbox,
time
))
return args return args
@ -96,28 +120,22 @@ def cross_run(resolutions: list, Lbox: int, time: str):
args = [] args = []
for waveform in waveforms: for waveform in waveforms:
for res1, res2 in itertools.combinations(resolutions, 2): for res1, res2 in itertools.combinations(resolutions, 2):
args.append(( args.append((waveform, res1, res2, Lbox, time))
waveform,
res1,
res2,
Lbox,
time
))
return args return args
if __name__ == '__main__': if __name__ == "__main__":
# input("are you sure you want to run this? This might need a large amount of memory") # input("are you sure you want to run this? This might need a large amount of memory")
Lbox = 100 Lbox = 100
resolutions = [128, 256, 512, 1024] resolutions = [128, 256, 512, 1024]
spectra = spectra_dir / 'spectra' spectra = spectra_dir / "spectra"
time = argv[1] time = argv[1]
if argv[2] == 'power': if argv[2] == "power":
args = power_run(resolutions=resolutions, Lbox=Lbox, time=time) args = power_run(resolutions=resolutions, Lbox=Lbox, time=time)
elif argv[2] == 'cross': elif argv[2] == "cross":
args = cross_run(resolutions=resolutions, Lbox=Lbox, time=time) args = cross_run(resolutions=resolutions, Lbox=Lbox, time=time)
else: else:
raise ValueError("missing argv[2] (power|cross)") raise ValueError("missing argv[2] (power|cross)")

View file

@ -38,7 +38,7 @@ colors = [f"C{i}" for i in range(10)]
def spectra_data( def spectra_data(
waveform: str, resolution_1: int, resolution_2: int, Lbox: int, time: str waveform: str, resolution_1: int, resolution_2: int, Lbox: int, time: str
): ):
dir = base_dir / f"spectra/{waveform}_{Lbox}" dir = base_dir / f"spectra/{waveform}_{Lbox}"
@ -82,7 +82,10 @@ def create_plot(mode):
fig: Figure fig: Figure
combination_list = list(itertools.combinations(resolutions, 2)) combination_list = list(itertools.combinations(resolutions, 2))
fig, axes = plt.subplots( fig, axes = plt.subplots(
len(waveforms), 3, sharex=True, sharey=True, len(waveforms),
3,
sharex=True,
sharey=True,
figsize=figsize_from_page_fraction(columns=2), figsize=figsize_from_page_fraction(columns=2),
) )
crossings = np.zeros((len(waveforms), len(combination_list))) crossings = np.zeros((len(waveforms), len(combination_list)))
@ -94,7 +97,7 @@ def create_plot(mode):
# TODO: better names # TODO: better names
ax_ics: "ics", ax_ics: "ics",
ax_z1: "z=1", ax_z1: "z=1",
ax_end: "z=0" ax_end: "z=0",
} }
bottom_row = i == len(waveforms) - 1 bottom_row = i == len(waveforms) - 1
top_row = i == 0 top_row = i == 0
@ -118,7 +121,9 @@ def create_plot(mode):
verticalalignment="top", verticalalignment="top",
transform=ax.transAxes, transform=ax.transAxes,
) )
for j, res in enumerate(resolutions[:-1] if mode == "cross" else resolutions): for j, res in enumerate(
resolutions[:-1] if mode == "cross" else resolutions
):
ax.axvline( ax.axvline(
k0 * res, k0 * res,
color=colors[j], color=colors[j],
@ -129,26 +134,34 @@ def create_plot(mode):
# ax.set_yticklabels([]) # ax.set_yticklabels([])
if mode == "power": if mode == "power":
ax_ics.set_ylabel("$\\mathrm{{P}}_\\mathrm{{X}}$ / $\\mathrm{{P}}_{{1024}}$") ax_ics.set_ylabel(
"$\\mathrm{{P}}_\\mathrm{{X}}$ / $\\mathrm{{P}}_{{1024}}$"
)
for j, resolution in enumerate(resolutions): for j, resolution in enumerate(resolutions):
ics_data = spectra_data(waveform, resolution, resolution, Lbox, "ics") ics_data = spectra_data(waveform, resolution, resolution, Lbox, "ics")
ics_k = ics_data["k [Mpc]"] ics_k = ics_data["k [Mpc]"]
ics_p1 = ics_data["P1"] ics_p1 = ics_data["P1"]
comp_data = spectra_data(waveform, resolutions[-1], resolutions[-1], Lbox, "ics") comp_data = spectra_data(
waveform, resolutions[-1], resolutions[-1], Lbox, "ics"
)
comp_p1 = comp_data["P1"] comp_p1 = comp_data["P1"]
ics_p1 /= comp_p1 ics_p1 /= comp_p1
end_data = spectra_data(waveform, resolution, resolution, Lbox, "end") end_data = spectra_data(waveform, resolution, resolution, Lbox, "end")
end_k = end_data["k [Mpc]"] end_k = end_data["k [Mpc]"]
end_p1 = end_data["P1"] end_p1 = end_data["P1"]
comp_data = spectra_data(waveform, resolutions[-1], resolutions[-1], Lbox, "end") comp_data = spectra_data(
waveform, resolutions[-1], resolutions[-1], Lbox, "end"
)
comp_p1 = comp_data["P1"] comp_p1 = comp_data["P1"]
end_p1 /= comp_p1 end_p1 /= comp_p1
z1_data = spectra_data(waveform, resolution, resolution, Lbox, "z=1") z1_data = spectra_data(waveform, resolution, resolution, Lbox, "z=1")
z1_k = z1_data["k [Mpc]"] z1_k = z1_data["k [Mpc]"]
z1_p1 = z1_data["P1"] z1_p1 = z1_data["P1"]
comp_data = spectra_data(waveform, resolutions[-1], resolutions[-1], Lbox, 'z=1') comp_data = spectra_data(
waveform, resolutions[-1], resolutions[-1], Lbox, "z=1"
)
comp_p1 = comp_data["P1"] comp_p1 = comp_data["P1"]
z1_p1 /= comp_p1 z1_p1 /= comp_p1
@ -158,8 +171,7 @@ def create_plot(mode):
for ax in [ax_ics, ax_z1, ax_end]: for ax in [ax_ics, ax_z1, ax_end]:
ax.set_ylim(0.9, 1.10) ax.set_ylim(0.9, 1.10)
ax.set_axisbelow(True) ax.set_axisbelow(True)
ax.grid(color='black', linestyle=':', linewidth=0.5, alpha=0.5) ax.grid(color="black", linestyle=":", linewidth=0.5, alpha=0.5)
# fig.suptitle(f"Power Spectra {time}") #Not needed for paper # fig.suptitle(f"Power Spectra {time}") #Not needed for paper
# fig.tight_layout() # fig.tight_layout()
@ -168,39 +180,47 @@ def create_plot(mode):
ax_ics.set_ylabel("C") ax_ics.set_ylabel("C")
# ax_end.set_ylabel("C") # ax_end.set_ylabel("C")
for j, (res1, res2) in enumerate(combination_list): for j, (res1, res2) in enumerate(combination_list):
ics_data = spectra_data(waveform, res1, res2, Lbox, 'ics') ics_data = spectra_data(waveform, res1, res2, Lbox, "ics")
ics_k = ics_data["k [Mpc]"] ics_k = ics_data["k [Mpc]"]
ics_pcross = ics_data["Pcross"] ics_pcross = ics_data["Pcross"]
ax_ics.semilogx(ics_k, ics_pcross, color=colors[j + 3], label=f'{res1} vs {res2}') ax_ics.semilogx(
ics_k, ics_pcross, color=colors[j + 3], label=f"{res1} vs {res2}"
)
z1_data = spectra_data(waveform, res1, res2, Lbox, 'z=1') z1_data = spectra_data(waveform, res1, res2, Lbox, "z=1")
z1_k = z1_data["k [Mpc]"] z1_k = z1_data["k [Mpc]"]
z1_pcross = z1_data["Pcross"] z1_pcross = z1_data["Pcross"]
ax_z1.semilogx(z1_k, z1_pcross, color=colors[j + 3], label=f'{res1} vs {res2}') ax_z1.semilogx(
z1_k, z1_pcross, color=colors[j + 3], label=f"{res1} vs {res2}"
)
end_data = spectra_data(waveform, res1, res2, Lbox, 'end') end_data = spectra_data(waveform, res1, res2, Lbox, "end")
end_k = end_data["k [Mpc]"] end_k = end_data["k [Mpc]"]
end_pcross = end_data["Pcross"] end_pcross = end_data["Pcross"]
ax_end.semilogx(end_k, end_pcross, color=colors[j + 3], label=f'{res1} vs {res2}') ax_end.semilogx(
end_k, end_pcross, color=colors[j + 3], label=f"{res1} vs {res2}"
)
# #Put this here to enable changing time of crossing measurement more easily # #Put this here to enable changing time of crossing measurement more easily
smaller_res = min(res1, res2) smaller_res = min(res1, res2)
crossing_index = np.searchsorted(end_k.to_list(), k0 * smaller_res) # change here crossing_index = np.searchsorted(
end_k.to_list(), k0 * smaller_res
) # change here
crossing_value = end_pcross[crossing_index] # and here crossing_value = end_pcross[crossing_index] # and here
crossings[i][j] = crossing_value crossings[i][j] = crossing_value
for ax in [ax_ics, ax_z1, ax_end]: for ax in [ax_ics, ax_z1, ax_end]:
ax.set_axisbelow(True) ax.set_axisbelow(True)
ax.grid(color='black', linestyle=':', linewidth=0.5, alpha=0.5) ax.grid(color="black", linestyle=":", linewidth=0.5, alpha=0.5)
ax_end.set_xlim(right=k0 * resolutions[-1]) ax_end.set_xlim(right=k0 * resolutions[-1])
ax_end.set_ylim(0.8, 1.02) ax_end.set_ylim(0.8, 1.02)
if bottom_row: if bottom_row:
# ax_z1.legend() # ax_z1.legend()
ax_ics.legend(loc='lower left') ax_ics.legend(loc="lower left")
if not bottom_row: if not bottom_row:
last_xtick: XTick = ax_ics.yaxis.get_major_ticks()[0] last_xtick: XTick = ax_ics.yaxis.get_major_ticks()[0]
last_xtick.set_visible(False) last_xtick.set_visible(False)
@ -212,7 +232,7 @@ def create_plot(mode):
# print(crossings_df.to_markdown()) # print(crossings_df.to_markdown())
print(crossings_df.to_latex()) print(crossings_df.to_latex())
fig.tight_layout() fig.tight_layout()
fig.subplots_adjust(wspace=0,hspace=0) fig.subplots_adjust(wspace=0, hspace=0)
fig.savefig(Path(f"~/tmp/spectra_{mode}.pdf").expanduser()) fig.savefig(Path(f"~/tmp/spectra_{mode}.pdf").expanduser())

View file

@ -42,11 +42,12 @@ def plotdf3d(pl: Plotter, df: DataFrame, color="white"):
glrenderer = vtk.vtkOpenGLRenderer.SafeDownCast(renderer) glrenderer = vtk.vtkOpenGLRenderer.SafeDownCast(renderer)
glrenderer.SetPass(blur_pass) glrenderer.SetPass(blur_pass)
def df_to_coords(df: pd.DataFrame): def df_to_coords(df: pd.DataFrame):
return df[["X", "Y", "Z"]].to_numpy() return df[["X", "Y", "Z"]].to_numpy()
if __name__ == '__main__': if __name__ == "__main__":
# HALO = 1 # HALO = 1
# reference_dir = base_dir / "shannon_512_100" # reference_dir = base_dir / "shannon_512_100"
# df, _ = read_file(reference_dir / "output_0004.hdf5") # df, _ = read_file(reference_dir / "output_0004.hdf5")

View file

@ -12,7 +12,11 @@ waveforms = ["DB2", "DB4", "DB8", "shannon"]
def print_progress(i, total, extra_data=""): def print_progress(i, total, extra_data=""):
print(f"{i} of {total} ({extra_data})" + " " * 20, end="\r" if i != total else "\n", flush=True) print(
f"{i} of {total} ({extra_data})" + " " * 20,
end="\r" if i != total else "\n",
flush=True,
)
def memory_usage(df: pd.DataFrame): def memory_usage(df: pd.DataFrame):
@ -35,7 +39,7 @@ def read_swift_config(dir: Path):
def print_wall_time(dir: Path): def print_wall_time(dir: Path):
with(dir / "swift.log").open() as f: with (dir / "swift.log").open() as f:
last_line = f.readlines()[-1] last_line = f.readlines()[-1]
print(last_line) print(last_line)
assert "main: done. Bye." in last_line assert "main: done. Bye." in last_line
@ -66,17 +70,24 @@ def rowcolumn_labels(axes, labels, isrow: bool, pad=5) -> None:
xy = (0, 0.5) xy = (0, 0.5)
xytext = (-ax.yaxis.labelpad - pad, 0) xytext = (-ax.yaxis.labelpad - pad, 0)
xycoords = ax.yaxis.label xycoords = ax.yaxis.label
ha = 'right' ha = "right"
va = 'center' va = "center"
else: else:
xy = (0.5, 1) xy = (0.5, 1)
xytext = (0, pad) xytext = (0, pad)
xycoords = 'axes fraction' xycoords = "axes fraction"
ha = 'center' ha = "center"
va = 'baseline' va = "baseline"
ax.annotate(label, xy=xy, xytext=xytext, ax.annotate(
xycoords=xycoords, textcoords='offset points', label,
size='large', ha=ha, va=va) xy=xy,
xytext=xytext,
xycoords=xycoords,
textcoords="offset points",
size="medium",
ha=ha,
va=va,
)
def tex_fmt(format_str: str, *args) -> str: def tex_fmt(format_str: str, *args) -> str: