More flow preparation & Olympics (#143)

* Add more comments

* Add flow paths

* Simplify paths

* Update default arguemnts

* Update paths

* Update param names

* Update some of scipts for reading files

* Add the Mike method option

* Update plotting

* Update fnames

* Simplify things

* Make more default options

* Add print

* Update

* Downsample CF4

* Update numpyro selection

* Add selection fitting nb

* Add coeffs

* Update script

* Add nb

* Add label

* Increase number of steps

* Update default params

* Add more labels

* Improve file name

* Update nb

* Fix little bug

* Remove import

* Update scales

* Update labels

* Add script

* Update script

* Add more

* Add more labels

* Add script

* Add submit

* Update spacing

* Update submit scrips

* Update script

* Update defaults

* Update defaults

* Update nb

* Update test

* Update imports

* Add script

* Add support for Indranil void

* Add a dipole

* Update nb

* Update submit

* Update Om0

* Add final

* Update default params

* Fix bug

* Add option to fix to LG frame

* Add Vext label

* Add Vext label

* Update script

* Rm fixed LG

* rm LG stuff

* Update script

* Update bulk flow plotting

* Update nb

* Add no field option

* Update defaults

* Update nb

* Update script

* Update nb

* Update nb

* Add names to plots

* Update nb

* Update plot

* Add more latex names

* Update default

* Update nb

* Update np

* Add plane slicing

* Add nb with slices

* Update nb

* Update script

* Upddate nb

* Update nb
This commit is contained in:
Richard Stiskalek 2024-09-11 08:45:42 +02:00 committed by GitHub
parent 3d1e1c0ae3
commit 2b938c112c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 3106 additions and 379 deletions

2
scripts/field_prop/clear.sh Executable file
View file

@ -0,0 +1,2 @@
cm="rm *.out"
$cm

View file

@ -396,10 +396,17 @@ if __name__ == "__main__":
parser.add_argument("--grid", type=int, help="Grid resolution.")
args = parser.parse_args()
rmax = 300
dr = 0.5
rmax = 200
if args.catalogue == "CF4_GroupAll":
dr = 1
else:
dr = 0.75
# smooth_scales = [0, 2, 4, 6, 8]
smooth_scales = [0]
print(f"Running catalogue {args.catalogue} for simulation {args.simname}.")
comm = MPI.COMM_WORLD
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
nsims = get_nsims(args, paths)

View file

@ -1,17 +1,26 @@
nthreads=1
memory=64
on_login=1
on_login=${1}
queue="berg"
env="/mnt/users/rstiskalek/csiborgtools/venv_csiborg/bin/python"
file="field_los.py"
nsims="-1"
# These are only for CB
MAS="SPH"
grid=1024
for simname in "Lilow2024"; do
for catalogue in "CF4_TFR"; do
if [ "$on_login" != "1" ] && [ "$on_login" != "0" ]
then
echo "'on_login' (1) must be either 0 or 1."
exit 1
fi
# for simname in "csiborg1" "csiborg2_main" "csiborg2X" "Lilow2024" "Carrick2015" "CF4"; do
for simname in "csiborg2_main"; do
for catalogue in "2MTF" "SFI_gals" "CF4_TFR"; do
pythoncm="$env $file --catalogue $catalogue --nsims $nsims --simname $simname --MAS $MAS --grid $grid"
if [ $on_login -eq 1 ]; then
echo $pythoncm

View file

@ -0,0 +1,95 @@
# Copyright (C) 2024 Richard Stiskalek
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
MPI script to interpolate the density and velocity fields along the line of
sight.
"""
from os.path import join
import csiborgtools
import numpy as np
from astropy.coordinates import SkyCoord, angular_separation
from h5py import File
from mpi4py import MPI
from scipy.interpolate import RegularGridInterpolator
from field_los import get_los
def interpolate_indranil_void(kind, RA, dec, rmax, dr, dump_folder, catalogue):
fdir = "/mnt/extraspace/rstiskalek/catalogs"
if kind == "exp":
fname = join(fdir, "v_pec_EXP_IndranilVoid.dat")
elif kind == "gauss":
fname = join(fdir, "v_pec_GAUSS_IndranilVoid.dat")
else:
raise ValueError("Invalid void kind.")
# These are only velocities.
data = np.loadtxt(fname)
fname_out = join(dump_folder, f"los_{catalogue}_IndranilVoid_{kind}.hdf5")
r_grid = np.arange(0, 251)
phi_grid = np.arange(0, 181)
r_eval = np.arange(0, rmax, dr).astype(float) / 0.674
model_axis = SkyCoord(l=117, b=4, frame='galactic', unit='deg').icrs
coords = SkyCoord(ra=RA, dec=dec, unit='deg').icrs
# Get angular separation in degrees
phi = angular_separation(coords.ra.rad, coords.dec.rad,
model_axis.ra.rad, model_axis.dec.rad)
phi *= 180 / np.pi
# Get the interpolator
f = RegularGridInterpolator((r_grid, phi_grid), data.T)
# Get the dummy x-values to evaluate for each LOS
x_dummy = np.ones((len(r_eval), 2))
x_dummy[:, 0] = r_eval
result = np.full((len(RA), len(r_eval)), np.nan)
for i in range(len(RA)):
x_dummy[:, 1] = phi[i]
result[i] = f(x_dummy)
# Write the output, homogenous density.
density = np.ones_like(result)
print(f"Writing to `{fname_out}`.")
with File(fname_out, 'w') as f_out:
f_out.create_dataset("rdist_0", data=r_eval * 0.674)
f_out.create_dataset("density_0", data=density)
f_out.create_dataset("velocity_0", data=result)
###############################################################################
# Command line interface #
###############################################################################
if __name__ == "__main__":
kind = "exp"
rmax = 165
dr = 1
comm = MPI.COMM_WORLD
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
out_folder = "/mnt/extraspace/rstiskalek/csiborg_postprocessing/field_los"
for catalogue in ["LOSS", "Foundation", "2MTF", "SFI_gals", "CF4_TFR", "CF4_GroupAll"]: # noqa
print(f"Running kind `{kind}` for catalogue `{catalogue}`.")
RA, dec = get_los(catalogue, "", comm).T
interpolate_indranil_void(
kind, RA, dec, rmax, dr, out_folder, catalogue)

2
scripts/flow/clear.sh Executable file
View file

@ -0,0 +1,2 @@
cm="rm *.out"
$cm

View file

@ -34,7 +34,7 @@ def parse_args():
help="Simulation name.")
parser.add_argument("--catalogue", type=str, required=True,
help="PV catalogues.")
parser.add_argument("--ksmooth", type=int, default=1,
parser.add_argument("--ksmooth", type=int, default=0,
help="Smoothing index.")
parser.add_argument("--ksim", type=none_or_int, default=None,
help="IC iteration number. If 'None', all IC realizations are used.") # noqa
@ -61,6 +61,7 @@ import sys
from os.path import join # noqa
import csiborgtools # noqa
from csiborgtools import fprint # noqa
import jax # noqa
from h5py import File # noqa
from numpyro.infer import MCMC, NUTS, init_to_median # noqa
@ -72,7 +73,7 @@ def print_variables(names, variables):
print(flush=True)
def get_models(get_model_kwargs, toy_selection, verbose=True):
def get_models(get_model_kwargs, mag_selection, verbose=True):
"""Load the data and create the NumPyro models."""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
folder = "/mnt/extraspace/rstiskalek/catalogs/"
@ -111,7 +112,7 @@ def get_models(get_model_kwargs, toy_selection, verbose=True):
cat, fpath, paths,
ksmooth=ARGS.ksmooth)
models[i] = csiborgtools.flow.get_model(
loader, toy_selection=toy_selection[i], **get_model_kwargs)
loader, mag_selection=mag_selection[i], **get_model_kwargs)
print(f"\n{'Num. radial steps':<20} {len(loader.rdist)}\n", flush=True)
return models
@ -127,9 +128,15 @@ def get_harmonic_evidence(samples, log_posterior, nchains_harmonic, epoch_num):
data, log_posterior, return_flow_samples=False, epochs_num=epoch_num)
def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
calculate_harmonic, nchains_harmonic, epoch_num, kwargs_print):
def run_model(model, nsteps, nburn, model_kwargs, out_folder,
calculate_harmonic, nchains_harmonic, epoch_num, kwargs_print,
fname_kwargs):
"""Run the NumPyro model and save output to a file."""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
fname = paths.flow_validation(out_folder, ARGS.simname, ARGS.catalogue,
**fname_kwargs)
try:
ndata = sum(model.ndata for model in model_kwargs["models"])
except AttributeError as e:
@ -159,13 +166,6 @@ def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
neg_ln_evidence = jax.numpy.nan
neg_ln_evidence_err = (jax.numpy.nan, jax.numpy.nan)
fname = f"samples_{ARGS.simname}_{'+'.join(ARGS.catalogue)}_ksmooth{ARGS.ksmooth}.hdf5" # noqa
if ARGS.ksim is not None:
fname = fname.replace(".hdf5", f"_nsim{ARGS.ksim}.hdf5")
if sample_beta:
fname = fname.replace(".hdf5", "_sample_beta.hdf5")
fname = join(out_folder, fname)
print(f"Saving results to `{fname}`.")
with File(fname, "w") as f:
@ -209,7 +209,7 @@ def run_model(model, nsteps, nburn, model_kwargs, out_folder, sample_beta,
def get_distmod_hyperparams(catalogue, sample_alpha, sample_mag_dipole):
alpha_min = -1.0
alpha_max = 3.0
alpha_max = 10.0
if catalogue in ["LOSS", "Foundation", "Pantheon+", "Pantheon+_groups", "Pantheon+_zSN"]: # noqa
return {"e_mu_min": 0.001, "e_mu_max": 1.0,
@ -224,7 +224,6 @@ def get_distmod_hyperparams(catalogue, sample_alpha, sample_mag_dipole):
"a_mean": -21., "a_std": 5.0,
"b_mean": -5.95, "b_std": 4.0,
"c_mean": 0., "c_std": 20.0,
"sample_curvature": False,
"a_dipole_mean": 0., "a_dipole_std": 1.0,
"sample_a_dipole": sample_mag_dipole,
"alpha_min": alpha_min, "alpha_max": alpha_max,
@ -242,14 +241,27 @@ def get_distmod_hyperparams(catalogue, sample_alpha, sample_mag_dipole):
raise ValueError(f"Unsupported catalogue: `{ARGS.catalogue}`.")
def get_toy_selection(toy_selection, catalogue):
if not toy_selection:
def get_toy_selection(catalogue):
"""Toy magnitude selection coefficients."""
if catalogue == "SFI_gals":
kind = "soft"
# m1, m2, a
coeffs = [11.467, 12.906, -0.231]
elif "CF4_TFR" in catalogue and "_i" in catalogue:
kind = "soft"
coeffs = [13.043, 14.423, -0.129]
elif "CF4_TFR" in catalogue and "w1" in catalogue:
kind = "soft"
coeffs = [11.731, 14.189, -0.118]
elif catalogue == "2MTF":
kind = "hard"
coeffs = 11.25
else:
fprint(f"found no selection coefficients for {catalogue}.")
return None
if catalogue == "SFI_gals":
return [1.221e+01, 1.297e+01, -2.708e-01]
else:
raise ValueError(f"Unsupported catalogue: `{ARGS.catalogue}`.")
return {"kind": kind,
"coeffs": coeffs}
if __name__ == "__main__":
@ -262,43 +274,63 @@ if __name__ == "__main__":
# Fixed user parameters #
###########################################################################
nsteps = 1000
nburn = 500
zcmb_min = 0
# `None` means default behaviour
nsteps = 10_000
nburn = 2_000
zcmb_min = None
zcmb_max = 0.05
nchains_harmonic = 10
num_epochs = 50
inference_method = "bayes"
calculate_harmonic = True if inference_method == "mike" else False
maxmag_selection = None
sample_alpha = False
sample_beta = True
inference_method = "mike"
mag_selection = None
sample_alpha = True
sample_beta = None
sample_Vmono = False
sample_mag_dipole = False
toy_selection = True
calculate_harmonic = False if inference_method == "bayes" else True
if toy_selection and inference_method == "mike":
raise ValueError("Toy selection is not supported with `mike` inference.") # noqa
if nsteps % nchains_harmonic != 0:
raise ValueError(
"The number of steps must be divisible by the number of chains.")
fname_kwargs = {"inference_method": inference_method,
"smooth": ARGS.ksmooth,
"nsim": ARGS.ksim,
"zcmb_min": zcmb_min,
"zcmb_max": zcmb_max,
"mag_selection": mag_selection,
"sample_alpha": sample_alpha,
"sample_beta": sample_beta,
"sample_Vmono": sample_Vmono,
"sample_mag_dipole": sample_mag_dipole,
}
main_params = {"nsteps": nsteps, "nburn": nburn,
"zcmb_min": zcmb_min,
"zcmb_max": zcmb_max,
"maxmag_selection": maxmag_selection,
"mag_selection": mag_selection,
"calculate_harmonic": calculate_harmonic,
"nchains_harmonic": nchains_harmonic,
"num_epochs": num_epochs,
"inference_method": inference_method,
"sample_mag_dipole": sample_mag_dipole,
"toy_selection": toy_selection}
}
print_variables(main_params.keys(), main_params.values())
calibration_hyperparams = {"Vext_min": -1000, "Vext_max": 1000,
if sample_beta is None:
sample_beta = ARGS.simname == "Carrick2015"
if mag_selection and inference_method != "bayes":
raise ValueError("Magnitude selection is only supported with `bayes` inference.") # noqa
if inference_method != "bayes":
mag_selection = [None] * len(ARGS.catalogue)
elif mag_selection is None or mag_selection:
mag_selection = [get_toy_selection(cat) for cat in ARGS.catalogue]
if nsteps % nchains_harmonic != 0:
raise ValueError(
"The number of steps must be divisible by the number of chains.")
calibration_hyperparams = {"Vext_min": -3000, "Vext_max": 3000,
"Vmono_min": -1000, "Vmono_max": 1000,
"beta_min": -1.0, "beta_max": 3.0,
"beta_min": -10.0, "beta_max": 10.0,
"sigma_v_min": 1.0, "sigma_v_max": 750.,
"sample_Vmono": sample_Vmono,
"sample_beta": sample_beta,
@ -315,15 +347,11 @@ if __name__ == "__main__":
kwargs_print = (main_params, calibration_hyperparams,
*distmod_hyperparams_per_catalogue)
###########################################################################
get_model_kwargs = {"zcmb_min": zcmb_min, "zcmb_max": zcmb_max,
"maxmag_selection": maxmag_selection}
toy_selection = [get_toy_selection(toy_selection, cat)
for cat in ARGS.catalogue]
models = get_models(get_model_kwargs, toy_selection)
get_model_kwargs = {"zcmb_min": zcmb_min, "zcmb_max": zcmb_max}
models = get_models(get_model_kwargs, mag_selection)
model_kwargs = {
"models": models,
"field_calibration_hyperparams": calibration_hyperparams,
@ -334,5 +362,5 @@ if __name__ == "__main__":
model = csiborgtools.flow.PV_validation_model
run_model(model, nsteps, nburn, model_kwargs, out_folder,
calibration_hyperparams["sample_beta"], calculate_harmonic,
nchains_harmonic, num_epochs, kwargs_print)
calculate_harmonic, nchains_harmonic, num_epochs, kwargs_print,
fname_kwargs)

View file

@ -1,5 +1,5 @@
#!/bin/bash
memory=7
memory=14
on_login=${1}
queue=${2}
ndevice=1
@ -37,12 +37,19 @@ else
fi
# for simname in "Lilow2024" "CF4" "CF4gp" "csiborg1" "csiborg2_main" "csiborg2X"; do
for simname in "Carrick2015"; do
for catalogue in "SFI_gals"; do
# for catalogue in "CF4_TFR_i"; do
# for ksim in 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20; do
for simname in "Carrick2015" "csiborg2_main"; do
# for simname in "csiborg2_main" "csiborg2X" ; do
# for simname in "Carrick2015" "Lilow2024" "csiborg2_main" "csiborg2X" "CF4"; do
# for simname in "Carrick2015" "csiborg2X" "csiborg2_main"; do
# for simname in "Carrick2015"; do
# for catalogue in "LOSS" "Foundation" "2MTF" "SFI_gals" "CF4_TFR_i" "CF4_TFR_w1"; do
for catalogue in "2MTF" "SFI_gals" "CF4_TFR_i"; do
# for catalogue in "2MTF" "SFI" "CF4_TFR_not2MTForSFI_i"; do
# for catalogue in "2MTF" "SFI_gals" "CF4_TFR_i"; do
# for catalogue in "CF4_TFR_w1"; do
# for catalogue in "CF4_GroupAll"; do
for ksim in "none"; do
for ksmooth in 1 2 3 4; do
pythoncm="$env $file --catalogue $catalogue --simname $simname --ksim $ksim --ksmooth $ksmooth --ndevice $ndevice --device $device"
if [ "$on_login" == "1" ]; then
@ -65,3 +72,5 @@ for simname in "Carrick2015"; do
done
done
done
done

View file

@ -1,4 +1,4 @@
from argparse import ArgumentParser, ArgumentTypeError
from argparse import ArgumentParser
def parse_args():
@ -10,7 +10,7 @@ def parse_args():
ARGS = parse_args()
# This must be done before we import JAX etc.
from numpyro import set_host_device_count, set_platform # noqa
from numpyro import set_platform # noqa
set_platform(ARGS.device) # noqa
@ -30,8 +30,8 @@ def get_harmonic_evidence(samples, log_posterior, nchains_harmonic, epoch_num):
data, log_posterior, return_flow_samples=False, epochs_num=epoch_num)
ndim = 250
nsamples = 100_000
ndim = 150
nsamples = 50_000
nchains_split = 10
loc = jnp.zeros(ndim)
cov = jnp.eye(ndim)
@ -42,10 +42,6 @@ X = gen.multivariate_normal(loc, cov, size=nsamples)
samples = {f"x_{i}": X[:, i] for i in range(ndim)}
logprob = multivariate_normal(loc, cov).logpdf(X)
neg_lnZ_laplace, neg_lnZ_laplace_error = csiborgtools.laplace_evidence(
samples, logprob, nchains_split)
print(f"neg_lnZ_laplace: {neg_lnZ_laplace} +/- {neg_lnZ_laplace_error}")
neg_lnZ_harmonic, neg_lnZ_harmonic_error = get_harmonic_evidence(
samples, logprob, nchains_split, epoch_num=30)