New plots (#85)

* Update verbosity messages

* Update verbosity messags

* Update more verbosity flags

* Update the iterator settings

* Add basic plots

* Update verbosity flags

* Update arg parsre

* Update plots

* Remove some older code

* Fix some definitions

* Update plots

* Update plotting

* Update plots

* Add support functions

* Update nb

* Improve plots, move back to scripts

* Update plots

* pep8

* Add max overlap plot

* Add blank line

* Upload changes

* Update changes

* Add weighted stats

* Remove

* Add import

* Add Max's matching

* Edit submission

* Add paths to Max's matching

* Fix matching

* Edit submission

* Edit plot

* Add max overlap separation plot

* Add periodic distance

* Update overlap summaries

* Add nsim0 for Max matvhing

* Add Max's agreement plot

* Add Quijote for Max method

* Update ploitting

* Update name
This commit is contained in:
Richard Stiskalek 2023-08-18 19:20:47 +01:00 committed by GitHub
parent ca3772ac6f
commit 8e3127f4d9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 1343 additions and 2100 deletions

View file

@ -14,17 +14,15 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from argparse import ArgumentParser
from gc import collect
from os.path import join
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import numpy
import scienceplots # noqa
from cache_to_disk import cache_to_disk, delete_disk_caches_for_function
from scipy.stats import kendalltau
from tqdm import trange, tqdm
from tqdm import tqdm
import plt_utils
@ -36,11 +34,7 @@ except ModuleNotFoundError:
import csiborgtools
###############################################################################
# IC overlap plotting #
###############################################################################
def open_cat(nsim: int, simname: str):
def open_cat(nsim, simname):
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
if simname == "csiborg":
@ -56,648 +50,17 @@ def open_cat(nsim: int, simname: str):
return cat
def open_cats(nsims, simname):
catxs = [None] * len(nsims)
@cache_to_disk(7)
def get_overlap(simname, nsim0):
"""
Calculate the summed overlap and probability of no match for a single
reference simulation.
for i, nsim in enumerate(tqdm(nsims, desc="Opening catalogues")):
catxs[i] = open_cat(nsim, simname)
Parameters
----------
simname : str
Simulation name.
nsim0 : int
Simulation index.
Returns
-------
mass : 1-dimensional array
Mass of halos in the reference simulation.
hindxs : 1-dimensional array
Halo indices in the reference simulation.
max_overlap : 2-dimensional array
Maximum overlap for each halo in the reference simulation.
summed_overlap : 2-dimensional array
Summed overlap for each halo in the reference simulation.
prob_nomatch : 2-dimensional array
Probability of no match for each halo in the reference simulation.
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
nsimxs = csiborgtools.read.get_cross_sims(simname, nsim0, paths,
smoothed=True)
cat0 = open_cat(nsim0)
catxs = []
print("Opening catalogues...", flush=True)
for nsimx in tqdm(nsimxs):
catxs.append(open_cat(nsimx))
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths)
mass = reader.cat0("totpartmass")
hindxs = reader.cat0("index")
summed_overlap = reader.summed_overlap(True)
max_overlap = reader.max_overlap(True)
prob_nomatch = reader.prob_nomatch(True)
return mass, hindxs, max_overlap, summed_overlap, prob_nomatch
@cache_to_disk(7)
def get_max_key(simname, nsim0, key):
"""
Get the value of a maximum overlap halo's property.
Parameters
----------
simname : str
Simulation name.
nsim0 : int
Reference simulation index.
key : str
Property to get.
Returns
-------
mass0 : 1-dimensional array
Mass of the reference haloes.
key_val : 1-dimensional array
Value of the property of the reference haloes.
max_overlap : 2-dimensional array
Maximum overlap of the reference haloes.
stat : 2-dimensional array
Value of the property of the maximum overlap halo.
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
nsimxs = csiborgtools.read.get_cross_sims(simname, nsim0, paths,
smoothed=True)
nsimxs = nsimxs
cat0 = open_cat(nsim0)
catxs = []
print("Opening catalogues...", flush=True)
for nsimx in tqdm(nsimxs):
catxs.append(open_cat(nsimx))
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths)
mass0 = reader.cat0("totpartmass")
key_val = reader.cat0(key)
max_overlap = reader.max_overlap(True)
stat = reader.max_overlap_key(key, True)
return mass0, key_val, max_overlap, stat
def plot_mass_vs_pairoverlap(nsim0, nsimx):
"""
Plot the pair overlap of a reference simulation with a single cross
simulation as a function of the reference halo mass.
Parameters
----------
nsim0 : int
Reference simulation index.
nsimx : int
Cross simulation index.
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
cat0 = open_cat(nsim0)
catx = open_cat(nsimx)
reader = csiborgtools.read.PairOverlap(cat0, catx, paths)
x = reader.copy_per_match("totpartmass")
y = reader.overlap(True)
x = numpy.log10(numpy.concatenate(x))
y = numpy.concatenate(y)
with plt.style.context(plt_utils.mplstyle):
plt.figure()
plt.hexbin(x, y, mincnt=1, bins="log",
gridsize=50)
plt.colorbar(label="Counts in bins")
plt.xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
plt.ylabel("Pair overlap")
plt.ylim(0., 1.)
plt.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout, f"mass_vs_pair_overlap{nsim0}.{ext}")
print(f"Saving to `{fout}`.")
plt.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
def plot_mass_vs_maxpairoverlap(nsim0, nsimx):
"""
Plot the maximum pair overlap of a reference simulation haloes with a
single cross simulation.
Parameters
----------
nsim0 : int
Reference simulation index.
nsimx : int
Cross simulation index.
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
cat0 = open_cat(nsim0)
catx = open_cat(nsimx)
reader = csiborgtools.read.PairOverlap(cat0, catx, paths)
x = numpy.log10(cat0["totpartmass"])
y = reader.overlap(True)
def get_max(y_):
if len(y_) == 0:
return numpy.nan
return numpy.max(y_)
y = numpy.array([get_max(y_) for y_ in y])
with plt.style.context(plt_utils.mplstyle):
plt.figure()
plt.hexbin(x, y, mincnt=1, bins="log",
gridsize=50)
plt.colorbar(label="Counts in bins")
plt.xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
plt.ylabel("Maximum pair overlap")
plt.ylim(0., 1.)
plt.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout, f"mass_vs_maxpairoverlap{nsim0}.{ext}")
print(f"Saving to `{fout}`.")
plt.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
def plot_mass_vsmedmaxoverlap(nsim0):
"""
Plot the mean maximum overlap of a reference simulation haloes with all the
cross simulations.
Parameters
----------
nsim0 : int
Reference simulation index.
"""
x, __, max_overlap, __, __ = get_overlap("csiborg", nsim0)
for i in trange(max_overlap.shape[0]):
if numpy.sum(numpy.isnan(max_overlap[i, :])) > 0:
max_overlap[i, :] = numpy.nan
x = numpy.log10(x)
with plt.style.context(plt_utils.mplstyle):
fig, axs = plt.subplots(ncols=3, figsize=(3.5 * 2, 2.625))
im1 = axs[0].hexbin(x, numpy.nanmean(max_overlap, axis=1), gridsize=30,
mincnt=1, bins="log")
im2 = axs[1].hexbin(x, numpy.nanstd(max_overlap, axis=1), gridsize=30,
mincnt=1, bins="log")
im3 = axs[2].hexbin(numpy.nanmean(max_overlap, axis=1),
numpy.nanstd(max_overlap, axis=1), gridsize=30,
C=x, reduce_C_function=numpy.nanmean)
axs[0].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[0].set_ylabel(r"Mean max. pair overlap")
axs[1].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[1].set_ylabel(r"Uncertainty of max. pair overlap")
axs[2].set_xlabel(r"Mean max. pair overlap")
axs[2].set_ylabel(r"Uncertainty of max. pair overlap")
label = ["Bin counts", "Bin counts", r"$\log M_{\rm tot} / M_\odot$"]
ims = [im1, im2, im3]
for i in range(3):
axins = inset_axes(axs[i], width="100%", height="5%",
loc='upper center', borderpad=-0.75)
fig.colorbar(ims[i], cax=axins, orientation="horizontal",
label=label[i])
axins.xaxis.tick_top()
axins.xaxis.set_tick_params(labeltop=True)
axins.xaxis.set_label_position("top")
fig.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout, f"maxpairoverlap_{nsim0}.{ext}")
print(f"Saving to `{fout}`.")
fig.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
def plot_summed_overlap_vs_mass(nsim0):
"""
Plot the summed overlap of probability of no matching for a single
reference simulations as a function of the reference halo mass, along with
their comparison.
Parameters
----------
nsim0 : int
Simulation index.
Returns
-------
None
"""
x, __, __, summed_overlap, prob_nomatch = get_overlap("csiborg", nsim0)
del __
collect()
for i in trange(summed_overlap.shape[0]):
if numpy.sum(numpy.isnan(summed_overlap[i, :])) > 0:
summed_overlap[i, :] = numpy.nan
x = numpy.log10(x)
mean_overlap = numpy.nanmean(summed_overlap, axis=1)
std_overlap = numpy.nanstd(summed_overlap, axis=1)
mean_prob_nomatch = numpy.nanmean(prob_nomatch, axis=1)
mask = mean_overlap > 0
x = x[mask]
mean_overlap = mean_overlap[mask]
std_overlap = std_overlap[mask]
mean_prob_nomatch = mean_prob_nomatch[mask]
with plt.style.context(plt_utils.mplstyle):
fig, axs = plt.subplots(ncols=3, figsize=(3.5 * 2, 2.625))
im1 = axs[0].hexbin(x, mean_overlap, mincnt=1, bins="log",
gridsize=30)
im2 = axs[1].hexbin(x, std_overlap, mincnt=1, bins="log",
gridsize=30)
im3 = axs[2].scatter(1 - mean_overlap, mean_prob_nomatch, c=x, s=2,
rasterized=True)
t = numpy.linspace(0.3, 1, 100)
axs[2].plot(t, t, color="red", linestyle="--")
axs[0].set_ylim(0.)
axs[1].set_ylim(0.)
axs[0].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[0].set_ylabel("Mean summed overlap")
axs[1].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[1].set_ylabel("Uncertainty of summed overlap")
axs[2].set_xlabel(r"$1 - $ mean summed overlap")
axs[2].set_ylabel("Mean prob. of no match")
label = ["Bin counts", "Bin counts",
r"$\log M_{\rm tot} ~ [M_\odot / h]$"]
ims = [im1, im2, im3]
for i in range(3):
axins = inset_axes(axs[i], width="100%", height="5%",
loc='upper center', borderpad=-0.75)
fig.colorbar(ims[i], cax=axins, orientation="horizontal",
label=label[i])
axins.xaxis.tick_top()
axins.xaxis.set_tick_params(labeltop=True)
axins.xaxis.set_label_position("top")
fig.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout, f"overlap_stat_{nsim0}.{ext}")
print(f"Saving to `{fout}`.")
fig.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
def plot_mass_vs_separation(nsim0, nsimx, plot_std=False, min_overlap=0.0):
"""
Plot the mass of a reference halo against the weighted separation of
its counterparts.
Parameters
----------
nsim0 : int
Reference simulation index.
nsimx : int
Cross simulation index.
plot_std : bool, optional
Whether to plot thestd instead of mean.
min_overlap : float, optional
Minimum overlap to consider.
Returns
-------
None
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
cat0 = open_cat(nsim0)
catx = open_cat(nsimx)
reader = csiborgtools.read.PairOverlap(cat0, catx, paths,
maxdist=155)
mass = numpy.log10(reader.cat0("totpartmass"))
dist = reader.dist(in_initial=False, norm_kind="r200c")
overlap = reader.overlap(True)
dist = csiborgtools.read.weighted_stats(dist, overlap,
min_weight=min_overlap)
mask = numpy.isfinite(dist[:, 0])
mass = mass[mask]
dist = dist[mask, :]
dist = numpy.log10(dist)
if not plot_std:
p = numpy.polyfit(mass, dist[:, 0], 1)
else:
p = numpy.polyfit(mass, dist[:, 1], 1)
xrange = numpy.linspace(numpy.min(mass), numpy.max(mass), 1000)
txt = r"$m = {}$, $c = {}$".format(*plt_utils.latex_float(*p, n=3))
with plt.style.context(plt_utils.mplstyle):
fig, ax = plt.subplots()
ax.set_title(txt, fontsize="small")
if not plot_std:
cx = ax.hexbin(mass, dist[:, 0], mincnt=1, bins="log", gridsize=50)
ax.set_ylabel(r"$\log \langle \Delta R / R_{\rm 200c}\rangle$")
else:
cx = ax.hexbin(mass, dist[:, 1], mincnt=1, bins="log", gridsize=50)
ax.set_ylabel(
r"$\delta \log \langle \Delta R / R_{\rm 200c}\rangle$")
ax.plot(xrange, numpy.polyval(p, xrange), color="red",
linestyle="--")
fig.colorbar(cx, label="Bin counts")
ax.set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
ax.set_ylabel(r"$\log \langle \Delta R / R_{\rm 200c}\rangle$")
fig.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout,
f"mass_vs_sep_{nsim0}_{nsimx}_{min_overlap}.{ext}")
if plot_std:
fout = fout.replace(f".{ext}", f"_std.{ext}")
print(f"Saving to `{fout}`.")
fig.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
def plot_maxoverlap_mass(nsim0):
"""
Plot the mass of the reference haloes against the mass of the maximum
overlap haloes.
Parameters
----------
nsim0 : int
Reference simulation index.
"""
mass0, __, __, stat = get_max_key("csiborg", nsim0, "totpartmass")
mu = numpy.mean(stat, axis=1)
std = numpy.std(numpy.log10(stat), axis=1)
mu = numpy.log10(mu)
mass0 = numpy.log10(mass0)
with plt.style.context(plt_utils.mplstyle):
fig, axs = plt.subplots(ncols=2, figsize=(3.5 * 1.75, 2.625))
im0 = axs[0].hexbin(mass0, mu, mincnt=1, bins="log", gridsize=50)
im1 = axs[1].hexbin(mass0, std, mincnt=1, bins="log", gridsize=50)
m = numpy.isfinite(mass0) & numpy.isfinite(mu)
print("True to expectation corr: ", kendalltau(mass0[m], mu[m]))
t = numpy.linspace(*numpy.percentile(mass0, [0, 100]), 1000)
axs[0].plot(t, t, color="red", linestyle="--")
axs[0].plot(t, t + 0.2, color="red", linestyle="--", alpha=0.5)
axs[0].plot(t, t - 0.2, color="red", linestyle="--", alpha=0.5)
axs[0].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[1].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[0].set_ylabel(
r"Max. overlap mean of $\log M_{\rm tot} ~ [M_\odot / h]$")
axs[1].set_ylabel(
r"Max. overlap std. of $\log M_{\rm tot} ~ [M_\odot / h]$")
ims = [im0, im1]
for i in range(2):
axins = inset_axes(axs[i], width="100%", height="5%",
loc='upper center', borderpad=-0.75)
fig.colorbar(ims[i], cax=axins, orientation="horizontal",
label="Bin counts")
axins.xaxis.tick_top()
axins.xaxis.set_tick_params(labeltop=True)
axins.xaxis.set_label_position("top")
fig.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout,
f"max_totpartmass_{nsim0}.{ext}")
print(f"Saving to `{fout}`.")
fig.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
def plot_maxoverlapstat(nsim0, key):
"""
Plot the mass of the reference haloes against the value of the maximum
overlap statistic.
Parameters
----------
nsim0 : int
Reference simulation index.
key : str
Property to get.
"""
assert key != "totpartmass"
mass0, key_val, __, stat = get_max_key("csiborg", nsim0, key)
xlabels = {"lambda200c": r"\log \lambda_{\rm 200c}"}
key_label = xlabels.get(key, key)
mass0 = numpy.log10(mass0)
key_val = numpy.log10(key_val)
mu = numpy.mean(stat, axis=1)
std = numpy.std(numpy.log10(stat), axis=1)
mu = numpy.log10(mu)
with plt.style.context(plt_utils.mplstyle):
fig, axs = plt.subplots(ncols=3, figsize=(3.5 * 2, 2.625))
im0 = axs[0].hexbin(mass0, mu, mincnt=1, bins="log", gridsize=30)
im1 = axs[1].hexbin(mass0, std, mincnt=1, bins="log", gridsize=30)
im2 = axs[2].hexbin(key_val, mu, mincnt=1, bins="log", gridsize=30)
m = numpy.isfinite(key_val) & numpy.isfinite(mu)
print("True to expectation corr: ", kendalltau(key_val[m], mu[m]))
axs[0].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[0].set_ylabel(r"Max. overlap mean of ${}$".format(key_label))
axs[1].set_xlabel(r"$\log M_{\rm tot} ~ [M_\odot / h]$")
axs[1].set_ylabel(r"Max. overlap std. of ${}$".format(key_label))
axs[2].set_xlabel(r"${}$".format(key_label))
axs[2].set_ylabel(r"Max. overlap mean of ${}$".format(key_label))
ims = [im0, im1, im2]
for i in range(3):
axins = inset_axes(axs[i], width="100%", height="5%",
loc='upper center', borderpad=-0.75)
fig.colorbar(ims[i], cax=axins, orientation="horizontal",
label="Bin counts")
axins.xaxis.tick_top()
axins.xaxis.set_tick_params(labeltop=True)
axins.xaxis.set_label_position("top")
fig.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout,
f"max_{key}_{nsim0}.{ext}")
print(f"Saving to `{fout}`.")
fig.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
@cache_to_disk(7)
def get_expected_mass(simname, nsim0, min_overlap):
"""
Get the expected mass of a reference halo given its overlap with halos
from other simulations.
Parameters
----------
simname : str
Simulation name.
nsim0 : int
Reference simulation index.
min_overlap : float
Minimum overlap to consider between a pair of haloes.
Returns
-------
mass : 1-dimensional array
Mass of the reference haloes.
mu : 1-dimensional array
Expected mass of the matched haloes.
std : 1-dimensional array
Standard deviation of the expected mass of the matched haloes.
prob_nomatch : 2-dimensional array
Probability of not matching the reference halo.
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
nsimxs = csiborgtools.read.get_cross_sims(simname, nsim0, paths,
smoothed=True)
nsimxs = nsimxs
cat0 = open_cat(nsim0)
catxs = []
print("Opening catalogues...", flush=True)
for nsimx in tqdm(nsimxs):
catxs.append(open_cat(nsimx))
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths)
mass = reader.cat0("totpartmass")
mu, std = reader.counterpart_mass(True, overlap_threshold=min_overlap,
in_log=False, return_full=False)
prob_nomatch = reader.prob_nomatch(True)
return mass, mu, std, prob_nomatch
def plot_mass_vs_expected_mass(nsim0, min_overlap=0, max_prob_nomatch=1):
"""
Plot the mass of a reference halo against the expected mass of its
counterparts.
Parameters
----------
nsim0 : int
Reference simulation index.
min_overlap : float, optional
Minimum overlap between a pair of haloes to consider.
max_prob_nomatch : float, optional
Maximum probability of no match to consider.
"""
mass, mu, std, prob_nomatch = get_expected_mass("csiborg", nsim0,
min_overlap)
std = std / mu / numpy.log(10)
mass = numpy.log10(mass)
mu = numpy.log10(mu)
prob_nomatch = numpy.nanmedian(prob_nomatch, axis=1)
mask = numpy.isfinite(mass) & numpy.isfinite(mu)
mask &= (prob_nomatch < max_prob_nomatch)
with plt.style.context(plt_utils.mplstyle):
fig, axs = plt.subplots(ncols=3, figsize=(3.5 * 2, 2.625))
im0 = axs[0].hexbin(mass[mask], mu[mask], mincnt=1, bins="log",
gridsize=50,)
im1 = axs[1].hexbin(mass[mask], std[mask], mincnt=1, bins="log",
gridsize=50)
im2 = axs[2].hexbin(1 - prob_nomatch[mask], mass[mask] - mu[mask],
gridsize=50, C=mass[mask],
reduce_C_function=numpy.nanmedian)
axs[2].axhline(0, color="red", linestyle="--", alpha=0.5)
axs[0].set_xlabel(r"True $\log M_{\rm tot} ~ [M_\odot / h]$")
axs[0].set_ylabel(r"Expected $\log M_{\rm tot} ~ [M_\odot / h]$")
axs[1].set_xlabel(r"True $\log M_{\rm tot} ~ [M_\odot / h]$")
axs[1].set_ylabel(r"Std. of $\sigma_{\log M_{\rm tot}}$")
axs[2].set_xlabel(r"1 - median prob. of no match")
axs[2].set_ylabel(r"$\log M_{\rm tot} - \log M_{\rm tot, exp}$")
t = numpy.linspace(*numpy.percentile(mass[mask], [0, 100]), 1000)
axs[0].plot(t, t, color="red", linestyle="--")
axs[0].plot(t, t + 0.2, color="red", linestyle="--", alpha=0.5)
axs[0].plot(t, t - 0.2, color="red", linestyle="--", alpha=0.5)
ims = [im0, im1, im2]
labels = ["Bin counts", "Bin counts",
r"$\log M_{\rm tot} ~ [M_\odot / h]$"]
for i in range(3):
axins = inset_axes(axs[i], width="100%", height="5%",
loc='upper center', borderpad=-0.75)
fig.colorbar(ims[i], cax=axins, orientation="horizontal",
label=labels[i])
axins.xaxis.tick_top()
axins.xaxis.set_tick_params(labeltop=True)
axins.xaxis.set_label_position("top")
fig.tight_layout()
for ext in ["png"]:
fout = join(plt_utils.fout,
f"mass_vs_expmass_{nsim0}_{max_prob_nomatch}.{ext}")
print(f"Saving to `{fout}`.")
fig.savefig(fout, dpi=plt_utils.dpi, bbox_inches="tight")
plt.close()
###############################################################################
# Nearest neighbour plotting #
###############################################################################
return catxs
def read_dist(simname, run, kind, kwargs):
"""
Read PDF/CDF of a nearest neighbour distribution.
Parameters
----------
simname : str
Simulation name. Must be either `csiborg` or `quijote`.
run : str
Run name.
kind : str
Kind of distribution. Must be either `pdf` or `cdf`.
kwargs : dict
Nearest neighbour reader keyword arguments.
Returns
-------
dist : 2-dimensional array
Distribution of distances in radial and neighbour bins.
"""
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
@ -707,26 +70,6 @@ def read_dist(simname, run, kind, kwargs):
def pull_cdf(x, fid_cdf, test_cdf):
"""
Pull a CDF so that it matches the fiducial CDF at 0.5. Rescales the x-axis,
while keeping the corresponding CDF values fixed.
Parameters
----------
x : 1-dimensional array
The x-axis of the CDF.
fid_cdf : 1-dimensional array
The fiducial CDF.
test_cdf : 1-dimensional array
The test CDF to be pulled.
Returns
-------
xnew : 1-dimensional array
The new x-axis of the test CDF.
test_cdf : 1-dimensional array
The new test CDF.
"""
xnew = x * numpy.interp(0.5, fid_cdf, x) / numpy.interp(0.5, test_cdf, x)
return xnew, test_cdf
@ -1360,7 +703,7 @@ def plot_kl_vs_overlap(runs, nsim, kwargs, runs_to_mass, plot_std=True,
for run in runs:
nn_data = nn_reader.read_single("csiborg", run, nsim, nobs=None)
nn_hindxs = nn_data["ref_hindxs"]
mass, overlap_hindxs, __, summed_overlap, prob_nomatch = get_overlap("csiborg", nsim) # noqa
mass, overlap_hindxs, __, summed_overlap, prob_nomatch = get_overlap_summary("csiborg", nsim) # noqa
# We need to match the hindxs between the two.
hind2overlap_array = {hind: i for i, hind in enumerate(overlap_hindxs)}
@ -1457,8 +800,8 @@ if __name__ == "__main__":
"mass009": (14.0, 14.4), # There is no upper limit.
}
# cached_funcs = ["get_overlap", "read_dist", "make_kl", "make_ks"]
cached_funcs = ["get_max_key"]
# cached_funcs = ["get_overlap_summary", "read_dist", "make_kl", "make_ks"]
cached_funcs = ["get_property_maxoverlap"]
if args.clean:
for func in cached_funcs:
print(f"Cleaning cache for function {func}.")

File diff suppressed because it is too large Load diff

View file

@ -15,6 +15,7 @@
import numpy
from scipy.stats import binned_statistic
from scipy.special import erf
dpi = 600
fout = "../plots/"
@ -56,38 +57,74 @@ def latex_float(*floats, n=2):
return latex_floats
def binned_trend(x, y, weights, bins):
"""
Calculate the weighted mean and standard deviation of `y` in bins of `x`.
def nan_weighted_average(arr, weights=None, axis=None):
if weights is None:
weights = numpy.ones_like(arr)
Parameters
----------
x : 1-dimensional array
The x-coordinates of the data points.
y : 1-dimensional array
The y-coordinates of the data points.
weights : 1-dimensional array
The weights of the data points.
bins : 1-dimensional array
The bin edges.
valid_entries = ~numpy.isnan(arr)
Returns
-------
stat_x : 1-dimensional array
The x-coordinates of the binned data points.
stat_mu : 1-dimensional array
The weighted mean of `y` in bins of `x`.
stat_std : 1-dimensional array
The weighted standard deviation of `y` in bins of `x`.
"""
stat_mu, __, __ = binned_statistic(x, y * weights, bins=bins,
statistic="sum")
stat_std, __, __ = binned_statistic(x, y * weights, bins=bins,
statistic=numpy.var)
stat_w, __, __ = binned_statistic(x, weights, bins=bins, statistic="sum")
# Set NaN entries in arr to 0 for computation
arr = numpy.where(valid_entries, arr, 0)
stat_x = (bins[1:] + bins[:-1]) / 2
stat_mu /= stat_w
stat_std /= stat_w
stat_std = numpy.sqrt(stat_std)
return stat_x, stat_mu, stat_std
# Set weights of NaN entries to 0
weights = numpy.where(valid_entries, weights, 0)
# Compute the weighted sum and the sum of weights along the axis
weighted_sum = numpy.sum(arr * weights, axis=axis)
sum_weights = numpy.sum(weights, axis=axis)
return weighted_sum / sum_weights
def nan_weighted_std(arr, weights=None, axis=None, ddof=0):
if weights is None:
weights = numpy.ones_like(arr)
valid_entries = ~numpy.isnan(arr)
# Set NaN entries in arr to 0 for computation
arr = numpy.where(valid_entries, arr, 0)
# Set weights of NaN entries to 0
weights = numpy.where(valid_entries, weights, 0)
# Calculate weighted mean
weighted_mean = numpy.sum(
arr * weights, axis=axis) / numpy.sum(weights, axis=axis)
# Calculate the weighted variance
variance = numpy.sum(
weights * (arr - numpy.expand_dims(weighted_mean, axis))**2, axis=axis)
variance /= numpy.sum(weights, axis=axis) - ddof
return numpy.sqrt(variance)
def compute_error_bars(x, y, xbins, sigma):
bin_indices = numpy.digitize(x, xbins)
y_medians = numpy.array([numpy.median(y[bin_indices == i])
for i in range(1, len(xbins))])
lower_pct = 100 * 0.5 * (1 - erf(sigma / numpy.sqrt(2)))
upper_pct = 100 - lower_pct
y_lower = numpy.full(len(y_medians), numpy.nan)
y_upper = numpy.full(len(y_medians), numpy.nan)
for i in range(len(y_medians)):
if numpy.sum(bin_indices == i + 1) == 0:
continue
y_lower[i] = numpy.percentile(y[bin_indices == i + 1], lower_pct)
y_upper[i] = numpy.percentile(y[bin_indices == i + 1], upper_pct)
yerr = (y_medians - numpy.array(y_lower), numpy.array(y_upper) - y_medians)
return y_medians, yerr
def normalize_hexbin(hb):
hexagon_counts = hb.get_array()
normalized_counts = hexagon_counts / hexagon_counts.sum()
hb.set_array(normalized_counts)
hb.set_clim(normalized_counts.min(), normalized_counts.max())