1
0
Fork 0
mirror of https://github.com/Findus23/halo_comparison.git synced 2024-09-13 09:03:49 +02:00

Changed layout of comparison figures: introduced sup(x/y)label but kept old structure, changed size of rowcolumn_labels to fit other labels, changed position of comp information

Formatted everything with black
This commit is contained in:
glatterf42 2022-08-10 16:26:30 +02:00
parent 60bd933469
commit 39bb626a42
25 changed files with 947 additions and 427 deletions

View file

@ -33,10 +33,12 @@ from utils import figsize_from_page_fraction
# rc('ytick',direction='in')
# rc('legend',fontsize='x-large')
base_shape = {'u': [np.array([0, 1]), np.array([1, 0]), np.array([0, -1])],
'd': [np.array([0, -1]), np.array([-1, 0]), np.array([0, 1])],
'r': [np.array([1, 0]), np.array([0, 1]), np.array([-1, 0])],
'l': [np.array([-1, 0]), np.array([0, -1]), np.array([1, 0])]}
base_shape = {
"u": [np.array([0, 1]), np.array([1, 0]), np.array([0, -1])],
"d": [np.array([0, -1]), np.array([-1, 0]), np.array([0, 1])],
"r": [np.array([1, 0]), np.array([0, 1]), np.array([-1, 0])],
"l": [np.array([-1, 0]), np.array([0, -1]), np.array([1, 0])],
}
def hilbert_curve(order, orientation):
@ -44,26 +46,46 @@ def hilbert_curve(order, orientation):
Recursively creates the structure for a hilbert curve of given order
"""
if order > 1:
if orientation == 'u':
return hilbert_curve(order - 1, 'r') + [np.array([0, 1])] + \
hilbert_curve(order - 1, 'u') + [np.array([1, 0])] + \
hilbert_curve(order - 1, 'u') + [np.array([0, -1])] + \
hilbert_curve(order - 1, 'l')
elif orientation == 'd':
return hilbert_curve(order - 1, 'l') + [np.array([0, -1])] + \
hilbert_curve(order - 1, 'd') + [np.array([-1, 0])] + \
hilbert_curve(order - 1, 'd') + [np.array([0, 1])] + \
hilbert_curve(order - 1, 'r')
elif orientation == 'r':
return hilbert_curve(order - 1, 'u') + [np.array([1, 0])] + \
hilbert_curve(order - 1, 'r') + [np.array([0, 1])] + \
hilbert_curve(order - 1, 'r') + [np.array([-1, 0])] + \
hilbert_curve(order - 1, 'd')
if orientation == "u":
return (
hilbert_curve(order - 1, "r")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "u")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "u")
+ [np.array([0, -1])]
+ hilbert_curve(order - 1, "l")
)
elif orientation == "d":
return (
hilbert_curve(order - 1, "l")
+ [np.array([0, -1])]
+ hilbert_curve(order - 1, "d")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "d")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "r")
)
elif orientation == "r":
return (
hilbert_curve(order - 1, "u")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "r")
+ [np.array([0, 1])]
+ hilbert_curve(order - 1, "r")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "d")
)
else:
return hilbert_curve(order - 1, 'd') + [np.array([-1, 0])] + \
hilbert_curve(order - 1, 'l') + [np.array([0, -1])] + \
hilbert_curve(order - 1, 'l') + [np.array([1, 0])] + \
hilbert_curve(order - 1, 'u')
return (
hilbert_curve(order - 1, "d")
+ [np.array([-1, 0])]
+ hilbert_curve(order - 1, "l")
+ [np.array([0, -1])]
+ hilbert_curve(order - 1, "l")
+ [np.array([1, 0])]
+ hilbert_curve(order - 1, "u")
)
else:
return base_shape[orientation]
@ -88,15 +110,17 @@ def hilbert_curve(order, orientation):
order = 6
curve = hilbert_curve(order, 'u')
curve = hilbert_curve(order, "u")
curve = np.array(curve) * 4
cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)])
cumulative_curve = (np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2) / 2 ** (order + 2)
cumulative_curve = (
np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2
) / 2 ** (order + 2)
# plot curve using plt
N = 2 ** (2 * order)
sublevel = order - 4
cmap = cm.get_cmap('jet')
cmap = cm.get_cmap("jet")
fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1))
t = {}
@ -104,31 +128,38 @@ sublevel = 7
for i in range(2 ** (2 * sublevel)):
il = i * N // (2 ** (2 * sublevel))
ir = (i + 1) * N // 2 ** (2 * sublevel)
plt.plot(cumulative_curve[il:ir + 1, 0], cumulative_curve[il:ir + 1, 1], lw=0.5, c=cmap(i / 2 ** (2 * sublevel)))
plt.plot(
cumulative_curve[il : ir + 1, 0],
cumulative_curve[il : ir + 1, 1],
lw=0.5,
c=cmap(i / 2 ** (2 * sublevel)),
)
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.xlabel("$x$")
plt.ylabel("$y$")
plt.tight_layout()
plt.savefig(Path(f"~/tmp/hilbert_indexcolor.eps").expanduser())
key = b'0123456789ABCDEF'
key = b"0123456789ABCDEF"
num = 123
print(siphash.SipHash_2_4(key, bytes(num)).hash())
order = 6
curve = hilbert_curve(order, 'u')
curve = hilbert_curve(order, "u")
curve = np.array(curve) * 4
cumulative_curve_int = np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)])
cumulative_curve = (np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2) / 2 ** (order + 2)
cumulative_curve = (
np.array([np.sum(curve[:i], 0) for i in range(len(curve) + 1)]) + 2
) / 2 ** (order + 2)
# plot curve using plt
N = 2 ** (2 * order)
sublevel = order - 4
cmap = cm.get_cmap('jet')
cmap = cm.get_cmap("jet")
plt.figure()
key = b'0123456789ABCDEF'
key = b"0123456789ABCDEF"
fig = plt.figure(figsize=figsize_from_page_fraction(height_to_width=1))
t = {}
@ -137,10 +168,15 @@ for i in range(2 ** (2 * sublevel)):
il = i * N // (2 ** (2 * sublevel))
ir = (i + 1) * N // 2 ** (2 * sublevel)
sipkey = siphash.SipHash_2_4(key, bytes(il)).hash()
plt.plot(cumulative_curve[il:ir + 1, 0], cumulative_curve[il:ir + 1, 1], lw=0.5, c=cmap(sipkey / 2 ** 64))
plt.plot(
cumulative_curve[il : ir + 1, 0],
cumulative_curve[il : ir + 1, 1],
lw=0.5,
c=cmap(sipkey / 2 ** 64),
)
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.xlabel("$x$")
plt.ylabel("$y$")
plt.tight_layout()
plt.savefig(Path(f"~/tmp/hilbert_indexcolor_scrambled.eps").expanduser())
plt.show()

View file

@ -8,6 +8,7 @@ from typing import List
import matplotlib.pyplot as plt
import numpy as np
# two-fold upsampling -- https://cnx.org/contents/xsppCgXj@8.18:H_wA16rf@16/Upsampling
from matplotlib.axes import Axes
from matplotlib.figure import Figure
@ -69,12 +70,12 @@ def cascade_algorithm(h, g, maxit):
for it in range(maxit):
# perform repeated convolutions
phi_it = np.sqrt(2) * np.convolve(h_it, phi_it, mode='full')
phi_it = np.sqrt(2) * np.convolve(h_it, phi_it, mode="full")
if it != maxit - 1:
psi_it = np.sqrt(2) * np.convolve(h_it, psi_it, mode='full')
psi_it = np.sqrt(2) * np.convolve(h_it, psi_it, mode="full")
else:
psi_it = np.sqrt(2) * np.convolve(g_it, psi_it, mode='full')
psi_it = np.sqrt(2) * np.convolve(g_it, psi_it, mode="full")
# upsample the coefficients
h_it = upsample(h_it)
@ -108,55 +109,173 @@ xdb2, phidb2, psidb2 = cascade_algorithm(h_DB2, g_DB2, maxit)
# DB3 -- http://wavelets.pybytes.com/wavelet/db3/
h_DB3 = np.array(
[0.3326705529509569, 0.8068915093133388, 0.4598775021193313, -0.13501102001039084, -0.08544127388224149,
0.035226291882100656])
[
0.3326705529509569,
0.8068915093133388,
0.4598775021193313,
-0.13501102001039084,
-0.08544127388224149,
0.035226291882100656,
]
)
g_DB3 = np.array(
[0.035226291882100656, 0.08544127388224149, -0.13501102001039084, -0.4598775021193313, 0.8068915093133388,
-0.3326705529509569])
[
0.035226291882100656,
0.08544127388224149,
-0.13501102001039084,
-0.4598775021193313,
0.8068915093133388,
-0.3326705529509569,
]
)
xdb3, phidb3, psidb3 = cascade_algorithm(h_DB3, g_DB3, maxit)
# DB4 -- http://wavelets.pybytes.com/wavelet/db4/
h_DB4 = np.array(
[0.23037781330885523, 0.7148465705525415, 0.6308807679295904, -0.02798376941698385, -0.18703481171888114,
0.030841381835986965, 0.032883011666982945, -0.010597401784997278])
[
0.23037781330885523,
0.7148465705525415,
0.6308807679295904,
-0.02798376941698385,
-0.18703481171888114,
0.030841381835986965,
0.032883011666982945,
-0.010597401784997278,
]
)
g_DB4 = np.array(
[-0.010597401784997278, -0.032883011666982945, 0.030841381835986965, 0.18703481171888114, -0.02798376941698385,
-0.6308807679295904, 0.7148465705525415, -0.23037781330885523])
[
-0.010597401784997278,
-0.032883011666982945,
0.030841381835986965,
0.18703481171888114,
-0.02798376941698385,
-0.6308807679295904,
0.7148465705525415,
-0.23037781330885523,
]
)
xdb4, phidb4, psidb4 = cascade_algorithm(h_DB4, g_DB4, maxit)
# DB8 -- http://wavelets.pybytes.com/wavelet/db8/
h_DB8 = np.array(
[0.05441584224308161, 0.3128715909144659, 0.6756307362980128, 0.5853546836548691, -0.015829105256023893,
-0.2840155429624281, 0.00047248457399797254, 0.128747426620186, -0.01736930100202211, -0.04408825393106472,
0.013981027917015516, 0.008746094047015655, -0.00487035299301066, -0.0003917403729959771, 0.0006754494059985568,
-0.00011747678400228192])
[
0.05441584224308161,
0.3128715909144659,
0.6756307362980128,
0.5853546836548691,
-0.015829105256023893,
-0.2840155429624281,
0.00047248457399797254,
0.128747426620186,
-0.01736930100202211,
-0.04408825393106472,
0.013981027917015516,
0.008746094047015655,
-0.00487035299301066,
-0.0003917403729959771,
0.0006754494059985568,
-0.00011747678400228192,
]
)
g_DB8 = np.array(
[-0.00011747678400228192, -0.0006754494059985568, -0.0003917403729959771, 0.00487035299301066, 0.008746094047015655,
-0.013981027917015516, -0.04408825393106472, 0.01736930100202211, 0.128747426620186, -0.00047248457399797254,
-0.2840155429624281, 0.015829105256023893, 0.5853546836548691, -0.6756307362980128, 0.3128715909144659,
-0.05441584224308161])
[
-0.00011747678400228192,
-0.0006754494059985568,
-0.0003917403729959771,
0.00487035299301066,
0.008746094047015655,
-0.013981027917015516,
-0.04408825393106472,
0.01736930100202211,
0.128747426620186,
-0.00047248457399797254,
-0.2840155429624281,
0.015829105256023893,
0.5853546836548691,
-0.6756307362980128,
0.3128715909144659,
-0.05441584224308161,
]
)
xdb8, phidb8, psidb8 = cascade_algorithm(h_DB8, g_DB8, maxit)
# DB16 --
# DB16 --
h_DB16 = np.array(
[0.0031892209253436892, 0.03490771432362905, 0.1650642834886438, 0.43031272284545874, 0.6373563320829833,
0.44029025688580486, -0.08975108940236352, -0.3270633105274758, -0.02791820813292813, 0.21119069394696974,
0.027340263752899923, -0.13238830556335474, -0.006239722752156254, 0.07592423604445779, -0.007588974368642594,
-0.036888397691556774, 0.010297659641009963, 0.013993768859843242, -0.006990014563390751, -0.0036442796214883506,
0.00312802338120381, 0.00040789698084934395, -0.0009410217493585433, 0.00011424152003843815,
0.00017478724522506327, -6.103596621404321e-05, -1.394566898819319e-05, 1.133660866126152e-05,
-1.0435713423102517e-06, -7.363656785441815e-07, 2.3087840868545578e-07, -2.1093396300980412e-08])
g_DB16 = np.array([-2.1093396300980412e-08, -2.3087840868545578e-07, -7.363656785441815e-07, 1.0435713423102517e-06,
1.133660866126152e-05, 1.394566898819319e-05, -6.103596621404321e-05, -0.00017478724522506327,
0.00011424152003843815, 0.0009410217493585433, 0.00040789698084934395, -0.00312802338120381,
-0.0036442796214883506, 0.006990014563390751, 0.013993768859843242, -0.010297659641009963,
-0.036888397691556774, 0.007588974368642594, 0.07592423604445779, 0.006239722752156254,
-0.13238830556335474, -0.027340263752899923, 0.21119069394696974, 0.02791820813292813,
-0.3270633105274758, 0.08975108940236352, 0.44029025688580486, -0.6373563320829833,
0.43031272284545874, -0.1650642834886438, 0.03490771432362905, -0.0031892209253436892])
[
0.0031892209253436892,
0.03490771432362905,
0.1650642834886438,
0.43031272284545874,
0.6373563320829833,
0.44029025688580486,
-0.08975108940236352,
-0.3270633105274758,
-0.02791820813292813,
0.21119069394696974,
0.027340263752899923,
-0.13238830556335474,
-0.006239722752156254,
0.07592423604445779,
-0.007588974368642594,
-0.036888397691556774,
0.010297659641009963,
0.013993768859843242,
-0.006990014563390751,
-0.0036442796214883506,
0.00312802338120381,
0.00040789698084934395,
-0.0009410217493585433,
0.00011424152003843815,
0.00017478724522506327,
-6.103596621404321e-05,
-1.394566898819319e-05,
1.133660866126152e-05,
-1.0435713423102517e-06,
-7.363656785441815e-07,
2.3087840868545578e-07,
-2.1093396300980412e-08,
]
)
g_DB16 = np.array(
[
-2.1093396300980412e-08,
-2.3087840868545578e-07,
-7.363656785441815e-07,
1.0435713423102517e-06,
1.133660866126152e-05,
1.394566898819319e-05,
-6.103596621404321e-05,
-0.00017478724522506327,
0.00011424152003843815,
0.0009410217493585433,
0.00040789698084934395,
-0.00312802338120381,
-0.0036442796214883506,
0.006990014563390751,
0.013993768859843242,
-0.010297659641009963,
-0.036888397691556774,
0.007588974368642594,
0.07592423604445779,
0.006239722752156254,
-0.13238830556335474,
-0.027340263752899923,
0.21119069394696974,
0.02791820813292813,
-0.3270633105274758,
0.08975108940236352,
0.44029025688580486,
-0.6373563320829833,
0.43031272284545874,
-0.1650642834886438,
0.03490771432362905,
-0.0031892209253436892,
]
)
xdb16, phidb16, psidb16 = cascade_algorithm(h_DB16, g_DB16, maxit)
@ -164,14 +283,15 @@ xdb16, phidb16, psidb16 = cascade_algorithm(h_DB16, g_DB16, maxit)
fig: Figure
fig, ax = plt.subplots(
4, 2,
4,
2,
figsize=figsize_from_page_fraction(height_to_width=12 / 8),
# sharex="all", sharey="all"
)
labels = ['Haar', 'DB2', 'DB4', 'DB8', 'DB16']
labels = ["Haar", "DB2", "DB4", "DB8", "DB16"]
ax[0, 0].set_title('scaling functions $\\varphi$')
ax[0, 1].set_title('wavelets $\\psi$')
ax[0, 0].set_title("scaling functions $\\varphi$")
ax[0, 1].set_title("wavelets $\\psi$")
ax[0, 0].plot(xhaar, phihaar, lw=1)
ax[0, 1].plot(xhaar, psihaar, lw=1)
@ -188,7 +308,7 @@ ax[3, 1].plot(xdb8, psidb8, lw=1)
# ax[4, 0].plot(xdb16, phidb16, lw=1)
# ax[4, 1].plot(xdb16, psidb16, lw=1)
for a in ax.flatten():
a.set_xlabel('t')
a.set_xlabel("t")
def inset_label(ax: Axes, text: str):
@ -198,7 +318,7 @@ def inset_label(ax: Axes, text: str):
text,
horizontalalignment="left",
verticalalignment="bottom",
transform=ax.transAxes
transform=ax.transAxes,
)
@ -238,32 +358,63 @@ def fourier_wavelet(h, g, n):
# ax.plot([0, np.pi], [1., 1.], 'k:')
kh, fphih, fpsih = fourier_wavelet(h_Haar, g_Haar, 256)
ax.plot(kh, np.abs(fphih) ** 2, label=r'$\hat\varphi_\textrm{Haar}$', c="C0")
ax.plot(kh, np.abs(fpsih) ** 2, label=r'$\hat\psi_\textrm{Haar}$', c="C0", linestyle="dashed")
ax.plot(kh, np.abs(fphih) ** 2, label=r"$\hat\varphi_\textrm{Haar}$", c="C0")
ax.plot(
kh,
np.abs(fpsih) ** 2,
label=r"$\hat\psi_\textrm{Haar}$",
c="C0",
linestyle="dashed",
)
kdb2, fphidb2, fpsidb2 = fourier_wavelet(h_DB2, g_DB2, 256)
ax.plot(kdb2, np.abs(fphidb2) ** 2, label=r'$\hat\varphi_\textrm{DB2}$', c="C1")
ax.plot(kdb2, np.abs(fpsidb2) ** 2, label=r'$\hat\psi_\textrm{DB2}$', c="C1", linestyle="dashed")
ax.plot(kdb2, np.abs(fphidb2) ** 2, label=r"$\hat\varphi_\textrm{DB2}$", c="C1")
ax.plot(
kdb2,
np.abs(fpsidb2) ** 2,
label=r"$\hat\psi_\textrm{DB2}$",
c="C1",
linestyle="dashed",
)
kdb4, fphidb4, fpsidb4 = fourier_wavelet(h_DB4, g_DB4, 256)
ax.plot(kdb4, np.abs(fphidb4) ** 2, label=r'$\hat\varphi_\textrm{DB4}$', c="C2")
ax.plot(kdb4, np.abs(fpsidb4) ** 2, label=r'$\hat\psi_\textrm{DB4}$', c="C2", linestyle="dashed")
ax.plot(kdb4, np.abs(fphidb4) ** 2, label=r"$\hat\varphi_\textrm{DB4}$", c="C2")
ax.plot(
kdb4,
np.abs(fpsidb4) ** 2,
label=r"$\hat\psi_\textrm{DB4}$",
c="C2",
linestyle="dashed",
)
kdb8, fphidb8, fpsidb8 = fourier_wavelet(h_DB8, g_DB8, 256)
ax.plot(kdb8, np.abs(fphidb8) ** 2, label=r'$\hat\varphi_\textrm{DB8}$', c="C3")
ax.plot(kdb8, np.abs(fpsidb8) ** 2, label=r'$\hat\psi_\textrm{DB8}$', c="C3", linestyle="dashed")
ax.plot(kdb8, np.abs(fphidb8) ** 2, label=r"$\hat\varphi_\textrm{DB8}$", c="C3")
ax.plot(
kdb8,
np.abs(fpsidb8) ** 2,
label=r"$\hat\psi_\textrm{DB8}$",
c="C3",
linestyle="dashed",
)
# all k* are np.linspace(0, np.pi, 256), so we can also use them for shannon
def shannon(k):
y = np.zeros_like(k)
y[k > pi / 2] = 1
return y
ax.plot(kdb8, 1 - shannon(kdb8), label=r'$\hat\varphi_\textrm{shannon}$', c="C4")
ax.plot(kdb8, shannon(kdb8), label=r'$\hat\psi_\textrm{shannon}$', c="C4", linestyle="dashed")
ax.plot(kdb8, 1 - shannon(kdb8), label=r"$\hat\varphi_\textrm{shannon}$", c="C4")
ax.plot(
kdb8,
shannon(kdb8),
label=r"$\hat\psi_\textrm{shannon}$",
c="C4",
linestyle="dashed",
)
# ax.plot(kdb8, np.abs(fpsidb8) ** 2, label='$\\hat\\psi_{DB8}$', c="C3", linestyle="dashed")
# kdb16, fphidb16, fpsidb16 = fourier_wavelet(h_DB16, g_DB16, 256)
@ -282,10 +433,12 @@ leg1 = ax.legend(frameon=False, handles=philines, loc="center left")
leg2 = ax.legend(frameon=False, handles=psilines, loc="center right")
ax.add_artist(leg1)
ax.add_artist(leg2)
ax.set_xlabel('k')
ax.set_ylabel('P(k)')
ax.set_xlabel("k")
ax.set_ylabel("P(k)")
ax.set_xticks([0, pi / 2, pi])
ax.set_xticklabels(["0", r"$k_\textrm{coarse}^\textrm{ny}$", r"$k_\textrm{fine}^\textrm{ny}$"])
ax.set_xticklabels(
["0", r"$k_\textrm{coarse}^\textrm{ny}$", r"$k_\textrm{fine}^\textrm{ny}$"]
)
# plt.semilogy()
# plt.ylim([1e-4,2.0])

View file

@ -32,7 +32,14 @@ mode = Mode.richings
def dir_name_to_parameter(dir_name: str):
return map(int, dir_name.lstrip("auriga6_halo").lstrip("richings21_").lstrip("bary_").lstrip("ramses_").split("_"))
return map(
int,
dir_name.lstrip("auriga6_halo")
.lstrip("richings21_")
.lstrip("bary_")
.lstrip("ramses_")
.split("_"),
)
def levelmax_to_softening_length(levelmax: int) -> float:
@ -46,8 +53,8 @@ fig2: Figure = plt.figure(figsize=figsize_from_page_fraction())
ax2: Axes = fig2.gca()
for ax in [ax1, ax2]:
ax.set_xlabel(r'R [Mpc]')
ax1.set_ylabel(r'M [$10^{10} \mathrm{M}_\odot$]')
ax.set_xlabel(r"R [Mpc]")
ax1.set_ylabel(r"M [$10^{10} \mathrm{M}_\odot$]")
ax2.set_ylabel("density [$\\frac{10^{10} \\mathrm{M}_\\odot}{Mpc^3}$]")
part_numbers = []
@ -107,8 +114,10 @@ for dir in sorted(root_dir.glob("*")):
ideal_softening_length = levelmax_to_softening_length(levelmax)
if not np.isclose(softening_length, levelmax_to_softening_length(levelmax)):
raise ValueError(f"softening length for levelmax {levelmax} should be {ideal_softening_length} "
f"but is {softening_length}")
raise ValueError(
f"softening length for levelmax {levelmax} should be {ideal_softening_length} "
f"but is {softening_length}"
)
print(input_file)
if mode == Mode.richings and is_by_adrian:
h = 0.6777
@ -141,12 +150,16 @@ for dir in sorted(root_dir.glob("*")):
# halo = halos.loc[1]
center = np.array([halo.X, halo.Y, halo.Z])
log_radial_bins, bin_masses, bin_densities, center = halo_mass_profile(
df, center, particles_meta, plot=False, num_bins=100,
vmin=0.002, vmax=6.5
df, center, particles_meta, plot=False, num_bins=100, vmin=0.002, vmax=6.5
)
i_min_border = np.argmax(0.01 < log_radial_bins) # first bin outside of specific radius
i_min_border = np.argmax(
0.01 < log_radial_bins
) # first bin outside of specific radius
i_max_border = np.argmax(1.5 < log_radial_bins)
popt = fit_nfw(log_radial_bins[i_min_border:i_max_border], bin_densities[i_min_border:i_max_border]) # = rho_0, r_s
popt = fit_nfw(
log_radial_bins[i_min_border:i_max_border],
bin_densities[i_min_border:i_max_border],
) # = rho_0, r_s
print(popt)
# # Plot NFW profile
# ax.loglog(
@ -176,11 +189,11 @@ for dir in sorted(root_dir.glob("*")):
ref_log_radial_bins, ref_bin_masses, ref_bin_densities = data
mass_deviation: np.ndarray = np.abs(bin_masses - ref_bin_masses)
density_deviation: np.ndarray = np.abs(bin_densities - ref_bin_densities)
ax1.loglog(log_radial_bins[:-1], mass_deviation, c=f"C{i}",
linestyle="dotted")
ax1.loglog(log_radial_bins[:-1], mass_deviation, c=f"C{i}", linestyle="dotted")
ax2.loglog(log_radial_bins[:-1], density_deviation, c=f"C{i}",
linestyle="dotted")
ax2.loglog(
log_radial_bins[:-1], density_deviation, c=f"C{i}", linestyle="dotted"
)
accuracy = mass_deviation / ref_bin_masses
print(accuracy)
print("mean accuracy", accuracy.mean())
@ -209,11 +222,13 @@ for dir in sorted(root_dir.glob("*")):
vmin = min(vmin, rho.min())
vmax = max(vmax, rho.max())
images.append(Result(
rho=rho,
title=str(dir.name),
levels=(levelmin, levelmin_TF, levelmax) if levelmin else None
))
images.append(
Result(
rho=rho,
title=str(dir.name),
levels=(levelmin, levelmin_TF, levelmax) if levelmin else None,
)
)
i += 1
# plot_cic(
# rho, extent,
@ -226,15 +241,21 @@ fig2.tight_layout()
# fig3: Figure = plt.figure(figsize=(9, 9))
# axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
fig3: Figure = plt.figure(figsize=figsize_from_page_fraction(columns=2, height_to_width=1))
fig3: Figure = plt.figure(
figsize=figsize_from_page_fraction(columns=2, height_to_width=1)
)
axes: List[Axes] = fig3.subplots(3, 3, sharex=True, sharey=True).flatten()
for result, ax in zip(images, axes):
data = 1.1 + result.rho
vmin_scaled = 1.1 + vmin
vmax_scaled = 1.1 + vmax
img = ax.imshow(data.T, norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled), extent=extent,
origin="lower")
img = ax.imshow(
data.T,
norm=LogNorm(vmin=vmin_scaled, vmax=vmax_scaled),
extent=extent,
origin="lower",
)
ax.set_title(result.title)
fig3.tight_layout()

51
cic.py
View file

@ -39,10 +39,15 @@ def cic_deposit(X, Y, ngrid, periodic=True) -> np.ndarray:
def cic_range(
X: np.ndarray, Y: np.ndarray,
ngrid: int,
xmin: float, xmax: float,
ymin: float, ymax: float, *args, **kwargs
X: np.ndarray,
Y: np.ndarray,
ngrid: int,
xmin: float,
xmax: float,
ymin: float,
ymax: float,
*args,
**kwargs
) -> Tuple[np.ndarray, Extent]:
xrange = xmax - xmin
yrange = ymax - ymin
@ -57,16 +62,25 @@ def cic_range(
def cic_from_radius(
X: np.ndarray, Y: np.ndarray,
ngrid: int,
x_center: float, y_center: float,
radius: float, *args, **kwargs
X: np.ndarray,
Y: np.ndarray,
ngrid: int,
x_center: float,
y_center: float,
radius: float,
*args,
**kwargs
) -> Tuple[np.ndarray, Extent]:
return cic_range(
X, Y, ngrid,
x_center - radius, x_center + radius,
y_center - radius, y_center + radius,
*args, **kwargs
X,
Y,
ngrid,
x_center - radius,
x_center + radius,
y_center - radius,
y_center + radius,
*args,
**kwargs
)
@ -87,18 +101,21 @@ def plot_cic(rho: np.ndarray, extent: Extent, title: str):
data = np.log(data)
norm = plt.Normalize(vmin=data.min(), vmax=data.max())
image = cmap(norm(data.T))
plt.imsave((Path("~/tmp").expanduser() / title).with_suffix(".png"), image, origin="lower")
plt.imsave(
(Path("~/tmp").expanduser() / title).with_suffix(".png"), image, origin="lower"
)
# ax.hist2d(df.X, df.Y, bins=500, norm=LogNorm())
# ax.hist2d(df2.X, df2.Y, bins=1000, norm=LogNorm())
if __name__ == '__main__':
if __name__ == "__main__":
input_file = Path(sys.argv[1])
df_ref, _ = read_file(input_file)
rho, extent = cic_from_radius(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1500, 48.8, 57, 1, periodic=False)
rho, extent = cic_from_radius(
df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 1500, 48.8, 57, 1, periodic=False
)
# rho, extent = cic_range(df_ref.X.to_numpy(), df_ref.Y.to_numpy(), 800, 0, 85.47, 0, 85.47, periodic=False)
plot_cic(
rho, extent,
title=str(input_file.relative_to(input_file.parent.parent).name)
rho, extent, title=str(input_file.relative_to(input_file.parent.parent).name)
)

View file

@ -16,7 +16,7 @@ for wf in ["DB2", "DB4", "DB8", "shannon"]:
plot=False,
plot3d=False,
velo_halos=True,
single=False
single=False,
)
except Exception as e:
traceback.print_exc()

View file

@ -45,10 +45,16 @@ def apply_offset(value, offset):
def compare_halo_resolutions(
ref_waveform: str, comp_waveform: str,
reference_resolution: int, comparison_resolution: int,
plot=False, plot3d=False, plot_cic=False,
single=False, velo_halos=False, force=False
ref_waveform: str,
comp_waveform: str,
reference_resolution: int,
comparison_resolution: int,
plot=False,
plot3d=False,
plot_cic=False,
single=False,
velo_halos=False,
force=False,
):
reference_dir = base_dir / f"{ref_waveform}_{reference_resolution}_100"
comparison_dir = base_dir / f"{comp_waveform}_{comparison_resolution}_100/"
@ -68,14 +74,18 @@ def compare_halo_resolutions(
print("reading reference file")
df_ref, ref_meta = read_file(reference_dir / "output_0004.hdf5")
if velo_halos:
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(reference_dir)
df_ref_halo, ref_halo_lookup, ref_unbound = read_velo_halo_particles(
reference_dir
)
else:
df_ref_halo = read_halo_file(reference_dir / "fof_output_0004.hdf5")
print("reading comparison file")
df_comp, comp_meta = read_file(comparison_dir / "output_0004.hdf5")
if velo_halos:
df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halo_particles(comparison_dir)
df_comp_halo, comp_halo_lookup, comp_unbound = read_velo_halo_particles(
comparison_dir
)
else:
df_comp_halo = read_halo_file(comparison_dir / "fof_output_0004.hdf5")
@ -137,18 +147,22 @@ def compare_halo_resolutions(
print(f"{prev_len} => {after_len} (factor {prev_len / after_len:.2f})")
halo_distances = np.linalg.norm(
ref_halo[['X', 'Y', 'Z']].values
- df_comp_halo[['X', 'Y', 'Z']].values,
axis=1)
ref_halo[["X", "Y", "Z"]].values - df_comp_halo[["X", "Y", "Z"]].values,
axis=1,
)
# print(list(halo_distances))
print(f"find nearby halos (50x{ref_halo.Rvir:.1f})")
print(ref_halo[['X', 'Y', 'Z']].values)
print(ref_halo[["X", "Y", "Z"]].values)
# Find IDs of halos that are less than 50 Rvir away
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 50].index.to_list())
nearby_halos = set(
df_comp_halo.loc[halo_distances < ref_halo.Rvir * 50].index.to_list()
)
if len(nearby_halos) < 10:
print(f"only {len(nearby_halos)} halos, expanding to 150xRvir")
nearby_halos = set(df_comp_halo.loc[halo_distances < ref_halo.Rvir * 150].index.to_list())
nearby_halos = set(
df_comp_halo.loc[halo_distances < ref_halo.Rvir * 150].index.to_list()
)
counters.checking_150 += 1
if not nearby_halos:
@ -179,9 +193,13 @@ def compare_halo_resolutions(
if plot:
fig: Figure = plt.figure()
ax: Axes = fig.gca()
ax.scatter(apply_offset_to_list(halo_particles["X"], offset_x),
apply_offset_to_list(halo_particles["Y"], offset_y), s=1,
alpha=.3, label="Halo")
ax.scatter(
apply_offset_to_list(halo_particles["X"], offset_x),
apply_offset_to_list(halo_particles["Y"], offset_y),
s=1,
alpha=0.3,
label="Halo",
)
if plot_cic:
diameter = ref_halo["R_size"]
X = ref_halo["Xc"]
@ -207,6 +225,7 @@ def compare_halo_resolutions(
if plot3d:
from pyvista import Plotter
pl = Plotter()
plotdf3d(pl, halo_particles, color="#b3cde3") # light blue
pl.set_focus((ref_halo.X, ref_halo.Y, ref_halo.Z))
@ -223,7 +242,11 @@ def compare_halo_resolutions(
particle_ids_in_comp_halo = comp_halo_lookup[halo_id]
mass_factor_limit = 5
if not (1 / mass_factor_limit < (comp_halo_masses[halo_id] / ref_halo_mass) < mass_factor_limit):
if not (
1 / mass_factor_limit
< (comp_halo_masses[halo_id] / ref_halo_mass)
< mass_factor_limit
):
# print("mass not similar, skipping")
num_skipped_for_mass += 1
continue
@ -235,7 +258,10 @@ def compare_halo_resolutions(
# similarity = len(shared_particles) / len(union_particles)
similarity = len(shared_particles) / (
len(halo_particle_ids) + len(particle_ids_in_comp_halo) - len(shared_particles))
len(halo_particle_ids)
+ len(particle_ids_in_comp_halo)
- len(shared_particles)
)
# assert similarity_orig == similarity
# print(shared_size)
# if not similarity:
@ -247,12 +273,24 @@ def compare_halo_resolutions(
color = f"C{i + 1}"
comp_halo: pd.Series = df_comp_halo.loc[halo_id]
ax.scatter(apply_offset_to_list(df["X"], offset_x), apply_offset_to_list(df["Y"], offset_y), s=1,
alpha=.3, c=color)
circle = Circle((apply_offset(comp_halo.X, offset_x), apply_offset(comp_halo.Y, offset_y)),
comp_halo["Rvir"], zorder=10,
linewidth=1, edgecolor=color, fill=None
)
ax.scatter(
apply_offset_to_list(df["X"], offset_x),
apply_offset_to_list(df["Y"], offset_y),
s=1,
alpha=0.3,
c=color,
)
circle = Circle(
(
apply_offset(comp_halo.X, offset_x),
apply_offset(comp_halo.Y, offset_y),
),
comp_halo["Rvir"],
zorder=10,
linewidth=1,
edgecolor=color,
fill=None,
)
ax.add_artist(circle)
if plot3d:
plotdf3d(pl, df, color="#fed9a6") # light orange
@ -270,13 +308,16 @@ def compare_halo_resolutions(
comp_halo: pd.Series = df_comp_halo.loc[best_halo]
# merge the data of the two halos with fitting prefixes
halo_data = pd.concat([
ref_halo.add_prefix("ref_"),
comp_halo.add_prefix("comp_")
])
distance = linalg.norm(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z]) - np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
) / ref_halo.Rvir
halo_data = pd.concat(
[ref_halo.add_prefix("ref_"), comp_halo.add_prefix("comp_")]
)
distance = (
linalg.norm(
np.array([ref_halo.X, ref_halo.Y, ref_halo.Z])
- np.array([comp_halo.X, comp_halo.Y, comp_halo.Z])
)
/ ref_halo.Rvir
)
halo_data["distance"] = distance
halo_data["match"] = best_halo_match
halo_data["num_skipped_for_mass"] = num_skipped_for_mass
@ -285,7 +326,9 @@ def compare_halo_resolutions(
if plot:
print(f"plotting with offsets ({offset_x},{offset_y})")
# ax.legend()
ax.set_title(f"{reference_dir.name} vs. {comparison_dir.name} (Halo {index})")
ax.set_title(
f"{reference_dir.name} vs. {comparison_dir.name} (Halo {index})"
)
fig.savefig("out.png", dpi=300)
plt.show()
if plot3d:
@ -310,7 +353,7 @@ def precalculate_halo_membership(df_comp, df_comp_halo):
print_progress(i, len(df_comp_halo), halo["Sizes"])
size = int(halo["Sizes"])
halo_id = int(i)
halo_particles = df_comp.iloc[pointer:pointer + size]
halo_particles = df_comp.iloc[pointer : pointer + size]
# check_id = halo_particles["FOFGroupIDs"].to_numpy()
# assert (check_id == i).all()
@ -321,7 +364,7 @@ def precalculate_halo_membership(df_comp, df_comp_halo):
return comp_halo_lookup
if __name__ == '__main__':
if __name__ == "__main__":
compare_halo_resolutions(
ref_waveform="shannon",
comp_waveform="shannon",
@ -332,5 +375,5 @@ if __name__ == '__main__':
plot_cic=False,
velo_halos=True,
single=False,
force=True
force=True,
)

View file

@ -21,32 +21,32 @@ from utils import figsize_from_page_fraction, rowcolumn_labels, waveforms, tex_f
G = 43.022682 # in Mpc (km/s)^2 / (10^10 Msun)
vmaxs = {
"Mvir": 52,
"Vmax": 93,
"cNFW": 31
}
vmaxs = {"Mvir": 52, "Vmax": 93, "cNFW": 31}
units = {
"distance": "Mpc",
"Mvir": r"10^{10} \textrm{M}_\odot",
"Vmax": r"\textrm{km} \textrm{s}^{-1}" # TODO
"Mvir": r"10^{10} \textrm{ M}_\odot",
"Vmax": r"\textrm{km } \textrm{s}^{-1}", # TODO
}
def concentration(row, halo_type: str) -> bool:
r_200crit = row[f'{halo_type}_R_200crit']
r_200crit = row[f"{halo_type}_R_200crit"]
if r_200crit <= 0:
cnfw = -1
colour = 'orange'
colour = "orange"
return False
# return cnfw, colour
r_size = row[f'{halo_type}_R_size'] # largest difference from center of mass to any halo particle
m_200crit = row[f'{halo_type}_Mass_200crit']
vmax = row[f'{halo_type}_Vmax'] # largest velocity coming from enclosed mass profile calculation
rmax = row[f'{halo_type}_Rmax']
npart = row[f'{halo_type}_npart']
r_size = row[
f"{halo_type}_R_size"
] # largest difference from center of mass to any halo particle
m_200crit = row[f"{halo_type}_Mass_200crit"]
vmax = row[
f"{halo_type}_Vmax"
] # largest velocity coming from enclosed mass profile calculation
rmax = row[f"{halo_type}_Rmax"]
npart = row[f"{halo_type}_npart"]
VmaxVvir2 = vmax ** 2 * r_200crit / (G * m_200crit)
if VmaxVvir2 <= 1.05:
if m_200crit == 0:
@ -59,7 +59,7 @@ def concentration(row, halo_type: str) -> bool:
# colour = 'white'
else:
if npart >= 100: # only calculate cnfw for groups with more than 100 particles
cnfw = row[f'{halo_type}_cNFW']
cnfw = row[f"{halo_type}_cNFW"]
return True
# colour = 'black'
else:
@ -91,12 +91,12 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
max_x = max([max(df[x_col]), max(df[y_col])])
num_bins = 100
bins = np.geomspace(min_x, max_x, num_bins)
if property == 'cNFW':
if property == "cNFW":
rows = []
for i, row in df.iterrows():
comp_cnfw_normal = concentration(row, halo_type="comp")
ref_cnfw_normal = concentration(row, halo_type='ref')
ref_cnfw_normal = concentration(row, halo_type="ref")
cnfw_normal = comp_cnfw_normal and ref_cnfw_normal
if cnfw_normal:
rows.append(row)
@ -118,13 +118,10 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
stds.append(std)
means = np.array(means)
stds = np.array(stds)
args = {
"color": "C2",
"zorder": 10
}
ax.fill_between(bins, means - stds, means + stds, alpha=.2, **args)
ax.plot(bins, means + stds, alpha=.5, **args)
ax.plot(bins, means - stds, alpha=.5, **args)
args = {"color": "C2", "zorder": 10}
ax.fill_between(bins, means - stds, means + stds, alpha=0.2, **args)
ax.plot(bins, means + stds, alpha=0.5, **args)
ax.plot(bins, means - stds, alpha=0.5, **args)
# ax_scatter.plot(bins, stds, label=f"{file.stem}")
if property in vmaxs:
@ -133,8 +130,13 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
vmax = None
print("WARNING: vmax not set")
image: QuadMesh
_, _, _, image = ax.hist2d(df[x_col], df[y_col] / df[x_col], bins=(bins, np.linspace(0, 2, num_bins)),
norm=LogNorm(vmax=vmax), rasterized=True)
_, _, _, image = ax.hist2d(
df[x_col],
df[y_col] / df[x_col],
bins=(bins, np.linspace(0, 2, num_bins)),
norm=LogNorm(vmax=vmax),
rasterized=True,
)
# ax.plot([rep_x_left, rep_x_left], [mean - std, mean + std], c="C1")
# ax.annotate(
# text=f"std={std:.2f}", xy=(rep_x_left, mean + std),
@ -148,7 +150,9 @@ def plot_comparison_hist2d(ax: Axes, file: Path, property: str):
# ax.set_yscale("log")
ax.set_xlim(min(df[x_col]), max(df[y_col]))
ax.plot([min(df[x_col]), max(df[y_col])], [1, 1], linewidth=1, color="C1", zorder=10)
ax.plot(
[min(df[x_col]), max(df[y_col])], [1, 1], linewidth=1, color="C1", zorder=10
)
return x_col, y_col
# ax.set_title(file.name)
@ -193,7 +197,9 @@ def plot_comparison_hist(ax: Axes, file: Path, property: str, m_min=None, m_max=
ax.plot(bin_centers, hist_val, label=label)
else:
patches: List[Polygon]
hist_val, bin_edges, patches = ax.hist(df[property], bins=bins, histtype=histtype, label=label, density=density)
hist_val, bin_edges, patches = ax.hist(
df[property], bins=bins, histtype=histtype, label=label, density=density
)
comparisons_dir = base_dir / "comparisons"
@ -206,8 +212,10 @@ def compare_property(property, show: bool):
is_hist_property = property in hist_properties
fig: Figure
fig, axes = plt.subplots(
len(waveforms), len(comparisons),
sharey="all", sharex="all",
len(waveforms),
len(comparisons),
sharey="all",
sharex="all",
figsize=figsize_from_page_fraction(columns=2),
)
for i, waveform in enumerate(waveforms):
@ -227,24 +235,82 @@ def compare_property(property, show: bool):
}
x_col, y_col = plot_comparison_hist2d(ax, file, property)
lab_a, lab_b = x_labels[property]
unit = f"[{units[property]}]" if property in units and units[property] else ""
unit = (
f"[{units[property]}]"
if property in units and units[property]
else ""
)
if is_bottom_row:
if lab_b:
ax.set_xlabel(tex_fmt(r"$AA_{\textrm{BB},CC} DD$", lab_a, lab_b, ref_res, unit))
ax.set_xlabel(
tex_fmt(
r"$AA_{\textrm{BB},\textrm{ CC}} \textrm{ } DD$",
lab_a,
lab_b,
ref_res,
unit,
)
)
# fig.supxlabel(tex_fmt(r"$AA_{\textrm{BB},\textrm{ } CC} \textrm{ } DD$", lab_a, lab_b, ref_res, unit), fontsize='medium')
else:
ax.set_xlabel(tex_fmt(r"$AA_{BB} CC$", lab_a, ref_res, unit))
ax.set_xlabel(
tex_fmt(
r"$AA_{\textrm{BB}} \textrm{ } CC$",
lab_a,
ref_res,
unit,
)
)
# fig.supxlabel(tex_fmt(r"$AA_{BB} \textrm{ } CC$", lab_a, ref_res, unit), fontsize='medium')
if is_left_col:
if lab_b:
ax.set_ylabel(
tex_fmt(r"$AA_{\textrm{BB},\textrm{comp}} / AA_{\textrm{BB},\textrm{CC}}$",
lab_a, lab_b, ref_res))
# ax.set_ylabel(
# tex_fmt(r"$AA_{\textrm{BB},\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{CC}}$",
# lab_a, lab_b, ref_res))
# fig.text(0.015, 0.5, tex_fmt(r"$AA_{\textrm{BB},\textrm{ comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{ CC}}$", lab_a, lab_b, ref_res), va='center', rotation='vertical', size='medium')
fig.supylabel(
tex_fmt(
r"$AA_{\textrm{BB},\textrm{ comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB},\textrm{ CC}}$",
lab_a,
lab_b,
ref_res,
),
fontsize="medium",
fontvariant="small-caps",
)
else:
ax.set_ylabel(
tex_fmt(r"$AA_{\textrm{comp}} / AA_{\textrm{BB}}$",
lab_a, ref_res))
# ax.set_ylabel(
# tex_fmt(r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$",
# lab_a, ref_res))
# fig.text(0.015, 0.5, tex_fmt(r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$", lab_a, ref_res), va='center', rotation='vertical', size='medium')
fig.supylabel(
tex_fmt(
r"$AA_{\textrm{comp}} \textrm{ } / \textrm{ } AA_{\textrm{BB}}$",
lab_a,
ref_res,
),
fontsize="medium",
)
# ax.set_ylabel(f"{property}_{{comp}}/{property}_{ref_res}")
ax.text(
0.975,
0.9,
f"comp = {comp_res}",
horizontalalignment="right",
verticalalignment="top",
transform=ax.transAxes,
)
else:
if property == "match":
if not (is_bottom_row and is_left_col):
ax.text(
0.05,
0.9,
f"comp = {comp_res}",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
# mass_bins = np.geomspace(10, 30000, num_mass_bins)
plot_comparison_hist(ax, file, property)
@ -257,18 +323,25 @@ def compare_property(property, show: bool):
ax.legend()
else:
ax.text(
0.05,
0.9,
f"comp = {comp_res}",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
plot_comparison_hist(ax, file, property)
x_labels = {
"match": "$J$",
"distance": "$D$ [$R_{vir}$]"
}
x_labels = {"match": "$J$", "distance": "$D$ [$R_\mathrm{{vir}}$]"}
if is_bottom_row:
ax.set_xlabel(x_labels[property])
if is_left_col:
if property == "match":
ax.set_ylabel(r"$p(J)$")
# ax.set_ylabel(r"$p(J)$")
fig.supylabel(r"$p(J)$", fontsize="medium")
else:
ax.set_ylabel(r"\# Halos")
# ax.set_ylabel(r"\# Halos")
fig.supylabel(r"\# Halos", fontsize="medium")
if property == "distance":
ax.set_xscale("log")
ax.set_yscale("log")
@ -278,11 +351,7 @@ def compare_property(property, show: bool):
last_ytick: YTick = ax.yaxis.get_major_ticks()[-1]
last_ytick.set_visible(False)
if property == "Mvir" and is_top_row:
particle_masses = {
256: 0.23524624,
512: 0.02940578,
1024: 0.0036757225
}
particle_masses = {256: 0.23524624, 512: 0.02940578, 1024: 0.0036757225}
partmass = particle_masses[ref_res]
def mass2partnum(mass: float) -> float:
@ -291,10 +360,12 @@ def compare_property(property, show: bool):
def partnum2mass(partnum: float) -> float:
return partnum * partmass
sec_ax = ax.secondary_xaxis("top", functions=(mass2partnum, partnum2mass))
sec_ax.set_xlabel(r"[\# \textrm{particles}]")
sec_ax = ax.secondary_xaxis(
"top", functions=(mass2partnum, partnum2mass)
)
sec_ax.set_xlabel(r"\textrm{Halo Size }[\# \textrm{particles}]")
rowcolumn_labels(axes, comparisons, isrow=False)
# rowcolumn_labels(axes, comparisons, isrow=False)
rowcolumn_labels(axes, waveforms, isrow=True)
fig.tight_layout()
fig.subplots_adjust(hspace=0)
@ -315,5 +386,5 @@ def main():
compare_property(property, show=len(argv) == 2)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -17,20 +17,22 @@ const_boltzmann_k_cgs = 1.380649e-16
def calculate_gas_internal_energy(omegab, hubble_param_, zstart_):
astart_ = 1.0 / (1.0 + zstart_)
if fabs(1.0 - gamma) > 1e-7:
npol = 1.0 / (gamma - 1.)
npol = 1.0 / (gamma - 1.