mirror of
https://github.com/Richard-Sti/csiborgtools_public.git
synced 2025-05-13 22:21:12 +00:00
Plots of VF (#134)
* Add VF plots * Update nb * Add CMB velocity note * rm nb * Add option to return alllikelihood * Add simulation weights * Update nb * Add bulkflow * Update nb * Add values of beta * Update imports * Update imports * Add paths to Carrick and Lilow fiels * Add Carrick and Lilow fields * Add support for more fields * Update bulkflow comp * Update nb * Update script
This commit is contained in:
parent
7dad6885e8
commit
c6f49790bf
13 changed files with 1208 additions and 2680 deletions
|
@ -21,16 +21,16 @@ The script is not parallelized in any way but it should not take very long, the
|
|||
main bottleneck is reading the data from disk.
|
||||
"""
|
||||
from argparse import ArgumentParser
|
||||
from os.path import join
|
||||
from datetime import datetime
|
||||
from gc import collect
|
||||
from os.path import join
|
||||
|
||||
import csiborgtools
|
||||
import numpy as np
|
||||
from astropy import units as u
|
||||
from astropy.coordinates import CartesianRepresentation, SkyCoord
|
||||
from tqdm import tqdm
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Read in information about the simulation #
|
||||
###############################################################################
|
||||
|
@ -132,7 +132,7 @@ def get_particles(reader, boxsize, get_velocity=True, verbose=True):
|
|||
|
||||
|
||||
###############################################################################
|
||||
# Main & command line interface #
|
||||
# Main #
|
||||
###############################################################################
|
||||
|
||||
|
||||
|
@ -201,7 +201,7 @@ def main_csiborg(args, folder):
|
|||
cumulative_velocity=cumulative_velocity)
|
||||
|
||||
|
||||
def main_csiborg2X(args, folder):
|
||||
def main_from_field(args, folder):
|
||||
"""Bulk flow in the Manticore boxes provided by Stuart."""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
boxsize = csiborgtools.simname2boxsize(args.simname)
|
||||
|
@ -214,7 +214,14 @@ def main_csiborg2X(args, folder):
|
|||
cumulative_vel_y = np.zeros_like(cumulative_vel_x)
|
||||
cumulative_vel_z = np.zeros_like(cumulative_vel_x)
|
||||
for i, nsim in enumerate(tqdm(nsims, desc="Simulations")):
|
||||
reader = csiborgtools.read.CSiBORG2XField(nsim, paths)
|
||||
if args.simname == "csiborg2X":
|
||||
reader = csiborgtools.read.CSiBORG2XField(nsim, paths)
|
||||
elif args.simname == "Carrick2015":
|
||||
reader = csiborgtools.read.Carrick2015Field(paths)
|
||||
elif args.simname == "Lilow2024":
|
||||
reader = csiborgtools.read.Lilow2024Field(paths)
|
||||
else:
|
||||
raise ValueError(f"Unknown simname: `{args.simname}`.")
|
||||
|
||||
density_field = reader.density_field()
|
||||
velocity_field = reader.velocity_field()
|
||||
|
@ -229,6 +236,20 @@ def main_csiborg2X(args, folder):
|
|||
cumulative_vel_z[i, :], __ = csiborgtools.field.field_enclosed_mass(
|
||||
velocity_field[2], distances, boxsize, verbose=False)
|
||||
|
||||
if args.simname in ["Carrick2015", "Lilow2024"]:
|
||||
# Carrick+2015 and Lilow+2024 box is in galactic coordinates, so we
|
||||
# need to convert the bulk flow vector to RA/dec Cartesian
|
||||
# representation.
|
||||
galactic_cartesian = CartesianRepresentation(
|
||||
cumulative_vel_x, cumulative_vel_y, cumulative_vel_z,
|
||||
unit=u.km/u.s)
|
||||
galactic_coord = SkyCoord(galactic_cartesian, frame='galactic')
|
||||
icrs_cartesian = galactic_coord.icrs.cartesian
|
||||
|
||||
cumulative_vel_x = icrs_cartesian.x.to(u.km/u.s).value
|
||||
cumulative_vel_y = icrs_cartesian.y.to(u.km/u.s).value
|
||||
cumulative_vel_z = icrs_cartesian.z.to(u.km/u.s).value
|
||||
|
||||
cumulative_vel = np.stack(
|
||||
[cumulative_vel_x, cumulative_vel_y, cumulative_vel_z], axis=-1)
|
||||
cumulative_vel /= cumulative_volume[..., None]
|
||||
|
@ -236,20 +257,28 @@ def main_csiborg2X(args, folder):
|
|||
# Finally save the output
|
||||
fname = f"enclosed_mass_{args.simname}.npz"
|
||||
fname = join(folder, fname)
|
||||
print(f"Saving to `{fname}`.")
|
||||
np.savez(fname, enclosed_mass=cumulative_mass, distances=distances,
|
||||
cumulative_velocity=cumulative_vel,
|
||||
enclosed_volume=cumulative_volume)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Command line interface #
|
||||
###############################################################################
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--simname", type=str, help="Simulation name.",
|
||||
choices=["csiborg1", "csiborg2_main", "csiborg2_varysmall", "csiborg2_random", "borg1", "borg2", "borg2_all", "csiborg2X"]) # noqa
|
||||
choices=["csiborg1", "csiborg2_main", "csiborg2_varysmall", "csiborg2_random", # noqa
|
||||
"borg1", "borg2", "borg2_all", "csiborg2X", "Carrick2015", # noqa
|
||||
"Lilow2024"]) # noqa
|
||||
args = parser.parse_args()
|
||||
|
||||
folder = "/mnt/extraspace/rstiskalek/csiborg_postprocessing/field_shells"
|
||||
if args.simname == "csiborg2X":
|
||||
main_csiborg2X(args, folder)
|
||||
if args.simname in ["csiborg2X", "Carrick2015", "Lilow2024"]:
|
||||
main_from_field(args, folder)
|
||||
elif "csiborg" in args.simname:
|
||||
main_csiborg(args, folder)
|
||||
elif "borg" in args.simname:
|
||||
|
|
|
@ -48,18 +48,18 @@ def parse_args():
|
|||
ARGS = parse_args()
|
||||
# This must be done before we import JAX etc.
|
||||
from numpyro import set_host_device_count, set_platform # noqa
|
||||
|
||||
set_platform(ARGS.device) # noqa
|
||||
set_host_device_count(ARGS.ndevice) # noqa
|
||||
|
||||
import sys # noqa
|
||||
from os.path import join # noqa
|
||||
|
||||
import csiborgtools # noqa
|
||||
import jax # noqa
|
||||
from h5py import File # noqa
|
||||
from mpi4py import MPI # noqa
|
||||
from numpyro.infer import MCMC, NUTS, init_to_median # noqa
|
||||
|
||||
import csiborgtools # noqa
|
||||
from numpyro.infer import MCMC, NUTS, Predictive, init_to_median # noqa
|
||||
|
||||
|
||||
def print_variables(names, variables):
|
||||
|
@ -113,6 +113,19 @@ def get_harmonic_evidence(samples, log_posterior, nchains_harmonic, epoch_num):
|
|||
data, log_posterior, return_flow_samples=False, epochs_num=epoch_num)
|
||||
|
||||
|
||||
def get_simulation_weights(samples, model, model_kwargs):
|
||||
"""Get the weights per posterior samples for each simulation."""
|
||||
predictive = Predictive(model, samples)
|
||||
ll_all = predictive(
|
||||
jax.random.PRNGKey(1), store_ll_all=True, **model_kwargs)["ll_all"]
|
||||
|
||||
# Multiply the likelihood of galaxies
|
||||
ll_per_simulation = jax.numpy.sum(ll_all, axis=-1)
|
||||
# Normalization by summing the likelihood over simulations
|
||||
norm = jax.scipy.special.logsumexp(ll_per_simulation, axis=-1)
|
||||
return ll_per_simulation - norm[:, None]
|
||||
|
||||
|
||||
def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
|
||||
calculate_evidence, nchains_harmonic, epoch_num, kwargs_print):
|
||||
"""Run the NumPyro model and save output to a file."""
|
||||
|
@ -128,6 +141,7 @@ def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
|
|||
|
||||
mcmc.run(rng_key, extra_fields=("potential_energy",), **model_kwargs)
|
||||
samples = mcmc.get_samples()
|
||||
simulation_weights = get_simulation_weights(samples, model, model_kwargs)
|
||||
|
||||
log_posterior = -mcmc.get_extra_fields()["potential_energy"]
|
||||
log_likelihood = samples.pop("ll_values")
|
||||
|
@ -141,13 +155,13 @@ def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
|
|||
|
||||
if calculate_evidence:
|
||||
print("Calculating the evidence using `harmonic`.", flush=True)
|
||||
ln_evidence, ln_evidence_err = get_harmonic_evidence(
|
||||
neg_ln_evidence, neg_ln_evidence_err = get_harmonic_evidence(
|
||||
samples, log_posterior, nchains_harmonic, epoch_num)
|
||||
print(f"{'ln(Z)':<20} {ln_evidence}")
|
||||
print(f"{'ln(Z) error':<20} {ln_evidence_err}")
|
||||
print(f"{'-ln(Z)':<20} {neg_ln_evidence}")
|
||||
print(f"{'-ln(Z) error':<20} {neg_ln_evidence_err}")
|
||||
else:
|
||||
ln_evidence = jax.numpy.nan
|
||||
ln_evidence_err = (jax.numpy.nan, jax.numpy.nan)
|
||||
neg_ln_evidence = jax.numpy.nan
|
||||
neg_ln_evidence_err = (jax.numpy.nan, jax.numpy.nan)
|
||||
|
||||
fname = f"samples_{ARGS.simname}_{ARGS.catalogue}_ksmooth{ARGS.ksmooth}.hdf5" # noqa
|
||||
if ARGS.ksim is not None:
|
||||
|
@ -167,13 +181,14 @@ def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
|
|||
# Write log likelihood and posterior
|
||||
f.create_dataset("log_likelihood", data=log_likelihood)
|
||||
f.create_dataset("log_posterior", data=log_posterior)
|
||||
f.create_dataset("simulation_weights", data=simulation_weights)
|
||||
|
||||
# Write goodness of fit
|
||||
grp = f.create_group("gof")
|
||||
grp.create_dataset("BIC", data=BIC)
|
||||
grp.create_dataset("AIC", data=AIC)
|
||||
grp.create_dataset("lnZ", data=ln_evidence)
|
||||
grp.create_dataset("lnZ_err", data=ln_evidence_err)
|
||||
grp.create_dataset("neg_lnZ", data=neg_ln_evidence)
|
||||
grp.create_dataset("neg_lnZ_err", data=neg_ln_evidence_err)
|
||||
|
||||
fname_summary = fname.replace(".hdf5", ".txt")
|
||||
print(f"Saving summary to `{fname_summary}`.")
|
||||
|
@ -188,8 +203,8 @@ def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
|
|||
print("HMC summary:")
|
||||
print(f"{'BIC':<20} {BIC}")
|
||||
print(f"{'AIC':<20} {AIC}")
|
||||
print(f"{'ln(Z)':<20} {ln_evidence}")
|
||||
print(f"{'ln(Z) error':<20} {ln_evidence_err}")
|
||||
print(f"{'-ln(Z)':<20} {neg_ln_evidence}")
|
||||
print(f"{'-ln(Z) error':<20} {neg_ln_evidence_err}")
|
||||
mcmc.print_summary(exclude_deterministic=False)
|
||||
sys.stdout = original_stdout
|
||||
|
||||
|
@ -238,7 +253,7 @@ if __name__ == "__main__":
|
|||
print_variables(
|
||||
calibration_hyperparams.keys(), calibration_hyperparams.values())
|
||||
|
||||
if ARGS.catalogue in ["LOSS", "Foundation", "Pantheon+", "Pantheon+_groups"]: # noqa
|
||||
if ARGS.catalogue in ["LOSS", "Foundation", "Pantheon+", "Pantheon+_groups", "Pantheon+_zSN"]: # noqa
|
||||
distmod_hyperparams = {"e_mu_mean": 0.1, "e_mu_std": 0.05,
|
||||
"mag_cal_mean": -18.25, "mag_cal_std": 0.5,
|
||||
"alpha_cal_mean": 0.148, "alpha_cal_std": 0.05,
|
||||
|
|
|
@ -44,10 +44,10 @@ COMMENT
|
|||
|
||||
# Submit a job for each combination of simname, catalogue, ksim
|
||||
# for simname in "Lilow2024" "CF4" "CF4gp" "csiborg2_main" "csiborg2X"; do
|
||||
for simname in "Lilow2024" "Carrick2015" "CF4" "CF4gp"; do
|
||||
for simname in "Lilow2024"; do
|
||||
# for simname in "csiborg1" "csiborg2_main" "csiborg2X"; do
|
||||
for catalogue in "LOSS" "Foundation" "2MTF" "Pantheon+" "Pantheon+_groups" "Pantheon+_zSN" "SFI_gals"; do
|
||||
# for catalogue in "2MTF" "Pantheon+"; do
|
||||
# for catalogue in "LOSS" "Foundation" "2MTF" "Pantheon+" "Pantheon+_groups" "Pantheon+_zSN" "SFI_gals"; do
|
||||
for catalogue in "LOSS"; do
|
||||
# for ksim in 0 1 2; do
|
||||
# for ksim in 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20; do
|
||||
for ksim in "none"; do
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue