Add more MANTICORE (#148)

* Add path to fields

* Add new Manticore field support

* Downsample Manticore to 20 snapshots

* Add manticore snapshot

* Remove downsampling

* Add Manticore

* Add names of manti

* Add manti

* Simplify

* Update script

* Update CF4 ICs

* Updaet LOS settings

* Update nb

* Update imports

* Reorganise funcs

* Update script

* Add basic void model
This commit is contained in:
Richard Stiskalek 2024-09-21 14:48:23 +02:00 committed by GitHub
parent c2bc5a6398
commit 4fa0e04f6e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 980 additions and 566 deletions

View File

@ -12,9 +12,9 @@
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from .flow_model import (DataLoader, PV_LogLikelihood, PV_validation_model, # noqa
dist2redshift, get_model, # noqa
from .io import (DataLoader, get_model, read_absolute_calibration, # noqa
radial_velocity_los) # noqa
from .flow_model import (PV_LogLikelihood, PV_validation_model, dist2redshift, # noqa
Observed2CosmologicalRedshift, predict_zobs, # noqa
project_Vext, radial_velocity_los, # noqa
stack_pzosmo_over_realizations) # noqa
project_Vext, stack_pzosmo_over_realizations) # noqa
from .selection import ToyMagnitudeSelection # noqa

View File

@ -26,289 +26,21 @@ from abc import ABC, abstractmethod
import numpy as np
from astropy import units as u
from astropy.cosmology import FlatLambdaCDM, z_at_value
from h5py import File
from jax import jit
from jax import numpy as jnp
from jax.scipy.special import logsumexp, erf
from numpyro import factor, sample, plate
from numpyro.distributions import Normal, Uniform, MultivariateNormal
from jax.scipy.special import erf, logsumexp
from numpyro import factor, plate, sample
from numpyro.distributions import MultivariateNormal, Normal, Uniform
from quadax import simpson
from tqdm import trange
from ..params import SPEED_OF_LIGHT
from ..utils import fprint
from .selection import toy_log_magnitude_selection
from ..params import SPEED_OF_LIGHT, simname2Omega_m
from ..utils import fprint, radec_to_galactic, radec_to_supergalactic
H0 = 100 # km / s / Mpc
###############################################################################
# Data loader #
###############################################################################
class DataLoader:
"""
Data loader for the line of sight (LOS) interpolated fields and the
corresponding catalogues.
Parameters
----------
simname : str
Simulation name.
ksim : int or list of int
Index of the simulation to read in (not the IC index).
catalogue : str
Name of the catalogue with LOS objects.
catalogue_fpath : str
Path to the LOS catalogue file.
paths : csiborgtools.read.Paths
Paths object.
ksmooth : int, optional
Smoothing index.
store_full_velocity : bool, optional
Whether to store the full 3D velocity field. Otherwise stores only
the radial velocity.
verbose : bool, optional
Verbose flag.
"""
def __init__(self, simname, ksim, catalogue, catalogue_fpath, paths,
ksmooth=None, store_full_velocity=False, verbose=True):
fprint("reading the catalogue,", verbose)
self._cat = self._read_catalogue(catalogue, catalogue_fpath)
self._catname = catalogue
fprint("reading the interpolated field.", verbose)
self._field_rdist, self._los_density, self._los_velocity = self._read_field( # noqa
simname, ksim, catalogue, ksmooth, paths)
if len(self._cat) != self._los_density.shape[1]:
raise ValueError("The number of objects in the catalogue does not "
"match the number of objects in the field.")
fprint("calculating the radial velocity.", verbose)
nobject = self._los_density.shape[1]
dtype = self._los_density.dtype
if simname in ["Carrick2015", "Lilow2024"]:
# Carrick+2015 and Lilow+2024 are in galactic coordinates
d1, d2 = radec_to_galactic(self._cat["RA"], self._cat["DEC"])
elif simname in ["CF4", "CLONES"]:
# CF4 is in supergalactic coordinates
d1, d2 = radec_to_supergalactic(self._cat["RA"], self._cat["DEC"])
else:
d1, d2 = self._cat["RA"], self._cat["DEC"]
num_sims = len(self._los_density)
if "IndranilVoid" in simname:
self._los_radial_velocity = self._los_velocity
self._los_velocity = None
else:
radvel = np.empty(
(num_sims, nobject, len(self._field_rdist)), dtype)
for k in range(num_sims):
for i in range(nobject):
radvel[k, i, :] = radial_velocity_los(
self._los_velocity[k, :, i, ...], d1[i], d2[i])
self._los_radial_velocity = radvel
if not store_full_velocity:
self._los_velocity = None
self._Omega_m = simname2Omega_m(simname)
# Normalize the CSiBORG & CLONES density by the mean matter density
if "csiborg" in simname or simname == "CLONES":
cosmo = FlatLambdaCDM(H0=H0, Om0=self._Omega_m)
mean_rho_matter = cosmo.critical_density0.to("Msun/kpc^3").value
mean_rho_matter *= self._Omega_m
self._los_density /= mean_rho_matter
# Since Carrick+2015 and CF4 provide `rho / <rho> - 1`
if simname in ["Carrick2015", "CF4", "CF4gp"]:
self._los_density += 1
# But some CF4 delta values are < -1. Check that CF4 really reports
# this.
if simname in ["CF4", "CF4gp"]:
self._los_density = np.clip(self._los_density, 1e-2, None,)
# Lilow+2024 outside of the range data is NaN. Replace it with some
# finite values. This is OK because the PV tracers are not so far.
if simname == "Lilow2024":
self._los_density[np.isnan(self._los_density)] = 1.
self._los_radial_velocity[np.isnan(self._los_radial_velocity)] = 0.
self._mask = np.ones(len(self._cat), dtype=bool)
self._catname = catalogue
@property
def cat(self):
"""The distance indicators catalogue (structured array)."""
return self._cat[self._mask]
@property
def catname(self):
"""Catalogue name."""
return self._catname
@property
def rdist(self):
"""Radial distances at which the field was interpolated."""
return self._field_rdist
@property
def los_density(self):
"""
Density field along the line of sight `(n_sims, n_objects, n_steps)`
"""
return self._los_density[:, self._mask, ...]
@property
def los_velocity(self):
"""
Velocity field along the line of sight `(n_sims, 3, n_objects,
n_steps)`.
"""
if self._los_velocity is None:
raise ValueError("The 3D velocities were not stored.")
return self._los_velocity[:, :, self._mask, ...]
@property
def los_radial_velocity(self):
"""
Radial velocity along the line of sight `(n_sims, n_objects, n_steps)`.
"""
return self._los_radial_velocity[:, self._mask, ...]
def _read_field(self, simname, ksims, catalogue, ksmooth, paths):
nsims = paths.get_ics(simname)
if isinstance(ksims, int):
ksims = [ksims]
# For no-field read in Carrick+2015 but then zero it.
if simname == "no_field":
simname = "Carrick2015"
to_wipe = True
else:
to_wipe = False
if not all(0 <= ksim < len(nsims) for ksim in ksims):
raise ValueError(f"Invalid simulation index: `{ksims}`")
if "Pantheon+" in catalogue:
fpath = paths.field_los(simname, "Pantheon+")
elif "CF4_TFR" in catalogue:
fpath = paths.field_los(simname, "CF4_TFR")
else:
fpath = paths.field_los(simname, catalogue)
los_density = [None] * len(ksims)
los_velocity = [None] * len(ksims)
for n, ksim in enumerate(ksims):
nsim = nsims[ksim]
with File(fpath, 'r') as f:
has_smoothed = True if f[f"density_{nsim}"].ndim > 2 else False
if has_smoothed and (ksmooth is None or not isinstance(ksmooth, int)): # noqa
raise ValueError("The output contains smoothed field but "
"`ksmooth` is None. It must be provided.")
indx = (..., ksmooth) if has_smoothed else (...)
los_density[n] = f[f"density_{nsim}"][indx]
los_velocity[n] = f[f"velocity_{nsim}"][indx]
rdist = f[f"rdist_{nsim}"][...]
los_density = np.stack(los_density)
los_velocity = np.stack(los_velocity)
if to_wipe:
los_density = np.ones_like(los_density)
los_velocity = np.zeros_like(los_velocity)
return rdist, los_density, los_velocity
def _read_catalogue(self, catalogue, catalogue_fpath):
if catalogue == "A2":
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
arr[key] = f[key][:]
elif catalogue in ["LOSS", "Foundation", "SFI_gals", "2MTF",
"Pantheon+", "SFI_gals_masked", "SFI_groups",
"Pantheon+_groups", "Pantheon+_groups_zSN",
"Pantheon+_zSN"]:
with File(catalogue_fpath, 'r') as f:
if "Pantheon+" in catalogue:
grp = f["Pantheon+"]
else:
grp = f[catalogue]
dtype = [(key, np.float32) for key in grp.keys()]
arr = np.empty(len(grp["RA"]), dtype=dtype)
for key in grp.keys():
arr[key] = grp[key][:]
elif "CB2_" in catalogue:
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
arr[key] = f[key][:]
elif "UPGLADE" in catalogue:
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
if key == "mask":
continue
arr[key] = f[key][:]
elif catalogue in ["CF4_GroupAll"] or "CF4_TFR" in catalogue:
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
dtype += [("DEC", np.float32)]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
arr[key] = f[key][:]
arr["DEC"] = arr["DE"]
if "CF4_TFR" in catalogue:
arr["RA"] *= 360 / 24
else:
raise ValueError(f"Unknown catalogue: `{catalogue}`.")
return arr
###############################################################################
# Supplementary flow functions #
###############################################################################
def radial_velocity_los(los_velocity, ra, dec):
"""
Calculate the radial velocity along the LOS from the 3D velocity
along the LOS `(3, n_steps)`.
"""
types = (float, np.float32, np.float64)
if not isinstance(ra, types) and not isinstance(dec, types):
raise ValueError("RA and dec must be floats.")
if los_velocity.ndim != 2 and los_velocity.shape[0] != 3:
raise ValueError("The shape of `los_velocity` must be (3, n_steps).")
ra_rad, dec_rad = np.deg2rad(ra), np.deg2rad(dec)
vx, vy, vz = los_velocity
return (vx * np.cos(ra_rad) * np.cos(dec_rad)
+ vy * np.sin(ra_rad) * np.cos(dec_rad)
+ vz * np.sin(dec_rad))
###############################################################################
# JAX Flow model #
###############################################################################
@ -992,235 +724,6 @@ def PV_validation_model(models, distmod_hyperparams_per_model,
factor("ll", ll)
###############################################################################
# Shortcut to create a model #
###############################################################################
def read_absolute_calibration(kind, data_length, calibration_fpath):
"""
Read the absolute calibration for the CF4 TFR sample from LEDA but
preprocessed by me.
Parameters
----------
kind : str
Calibration kind: `Cepheids`, `TRGB`, `SBF`, ...
data_length : int
Number of samples in CF4 TFR (should be 9,788).
calibration_fpath : str
Path to the preprocessed calibration file.
Returns
-------
data : 3-dimensional array of shape (data_length, max_calib, 2)
Absolute calibration data.
with_calibration : 1-dimensional array of shape (data_length)
Whether the sample has a calibration.
length_calibration : 1-dimensional array of shape (data_length)
Number of calibration points per sample.
"""
data = {}
with File(calibration_fpath, 'r') as f:
for key in f[kind].keys():
x = f[kind][key][:]
# Get rid of points without uncertainties
x = x[~np.isnan(x[:, 1])]
data[key] = x
max_calib = max(len(val) for val in data.values())
out = np.full((data_length, max_calib, 2), np.nan)
with_calibration = np.full(data_length, False)
length_calibration = np.full(data_length, 0)
for i in data.keys():
out[int(i), :len(data[i]), :] = data[i]
with_calibration[int(i)] = True
length_calibration[int(i)] = len(data[i])
return out, with_calibration, length_calibration
def get_model(loader, zcmb_min=None, zcmb_max=None, mag_selection=None,
absolute_calibration=None, calibration_fpath=None):
"""
Get a model and extract the relevant data from the loader.
Parameters
----------
loader : DataLoader
DataLoader instance.
zcmb_min : float, optional
Minimum observed redshift in the CMB frame to include.
zcmb_max : float, optional
Maximum observed redshift in the CMB frame to include.
mag_selection : dict, optional
Magnitude selection parameters.
add_absolute_calibration : bool, optional
Whether to add an absolute calibration for CF4 TFRs.
calibration_fpath : str, optional
Returns
-------
model : NumPyro model
"""
zcmb_min = 0.0 if zcmb_min is None else zcmb_min
zcmb_max = np.infty if zcmb_max is None else zcmb_max
los_overdensity = loader.los_density
los_velocity = loader.los_radial_velocity
kind = loader._catname
if absolute_calibration is not None and "CF4_TFR_" not in kind:
raise ValueError("Absolute calibration supported only for the CF4 TFR sample.") # noqa
if kind in ["LOSS", "Foundation"]:
keys = ["RA", "DEC", "z_CMB", "mB", "x1", "c", "e_mB", "e_x1", "e_c"]
RA, dec, zCMB, mag, x1, c, e_mag, e_x1, e_c = (
loader.cat[k] for k in keys)
e_zCMB = None
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min)
calibration_params = {"mag": mag[mask], "x1": x1[mask], "c": c[mask],
"e_mag": e_mag[mask], "e_x1": e_x1[mask],
"e_c": e_c[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], e_zCMB, calibration_params,
None, mag_selection, loader.rdist, loader._Omega_m, "SN",
name=kind)
elif "Pantheon+" in kind:
keys = ["RA", "DEC", "zCMB", "mB", "x1", "c", "biasCor_m_b", "mBERR",
"x1ERR", "cERR", "biasCorErr_m_b", "zCMB_SN", "zCMB_Group",
"zCMBERR"]
RA, dec, zCMB, mB, x1, c, bias_corr_mB, e_mB, e_x1, e_c, e_bias_corr_mB, zCMB_SN, zCMB_Group, e_zCMB = (loader.cat[k] for k in keys) # noqa
mB -= bias_corr_mB
e_mB = np.sqrt(e_mB**2 + e_bias_corr_mB**2)
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min)
if kind == "Pantheon+_groups":
mask &= np.isfinite(zCMB_Group)
if kind == "Pantheon+_groups_zSN":
mask &= np.isfinite(zCMB_Group)
zCMB = zCMB_SN
if kind == "Pantheon+_zSN":
zCMB = zCMB_SN
calibration_params = {"mag": mB[mask], "x1": x1[mask], "c": c[mask],
"e_mag": e_mB[mask], "e_x1": e_x1[mask],
"e_c": e_c[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], e_zCMB[mask], calibration_params,
None, mag_selection, loader.rdist, loader._Omega_m, "SN",
name=kind)
elif kind in ["SFI_gals", "2MTF", "SFI_gals_masked"]:
keys = ["RA", "DEC", "z_CMB", "mag", "eta", "e_mag", "e_eta"]
RA, dec, zCMB, mag, eta, e_mag, e_eta = (loader.cat[k] for k in keys)
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min)
calibration_params = {"mag": mag[mask], "eta": eta[mask],
"e_mag": e_mag[mask], "e_eta": e_eta[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], None, calibration_params, None,
mag_selection, loader.rdist, loader._Omega_m, "TFR", name=kind)
elif "CF4_TFR_" in kind:
# The full name can be e.g. "CF4_TFR_not2MTForSFI_i" or "CF4_TFR_i".
band = kind.split("_")[-1]
if band not in ['g', 'r', 'i', 'z', 'w1', 'w2']:
raise ValueError(f"Band `{band}` not recognized.")
keys = ["RA", "DEC", "Vcmb", f"{band}", "lgWmxi", "elgWi",
"not_matched_to_2MTF_or_SFI", "Qs", "Qw"]
RA, dec, z_obs, mag, eta, e_eta, not_matched_to_2MTF_or_SFI, Qs, Qw = (
loader.cat[k] for k in keys)
l, b = radec_to_galactic(RA, dec)
not_matched_to_2MTF_or_SFI = not_matched_to_2MTF_or_SFI.astype(bool)
# NOTE: fiducial uncertainty until we can get the actual values.
e_mag = 0.05 * np.ones_like(mag)
z_obs /= SPEED_OF_LIGHT
eta -= 2.5
fprint("selecting only galaxies with mag > 5 and eta > -0.3.")
mask = (mag > 5) & (eta > -0.3)
fprint("selecting only galaxies with |b| > 7.5.")
mask &= np.abs(b) > 7.5
mask &= (z_obs < zcmb_max) & (z_obs > zcmb_min)
if "not2MTForSFI" in kind:
mask &= not_matched_to_2MTF_or_SFI
elif "2MTForSFI" in kind:
mask &= ~not_matched_to_2MTF_or_SFI
fprint("employing a quality cut on the galaxies.")
if "w" in band:
mask &= Qw == 5
else:
mask &= Qs == 5
# Read the absolute calibration
if absolute_calibration is not None:
CF4_length = len(RA)
distmod, with_calibration, length_calibration = read_absolute_calibration( # noqa
"Cepheids", CF4_length, calibration_fpath)
distmod = distmod[mask]
with_calibration = with_calibration[mask]
length_calibration = length_calibration[mask]
fprint(f"found {np.sum(with_calibration)} galaxies with absolute calibration.") # noqa
distmod = distmod[with_calibration]
length_calibration = length_calibration[with_calibration]
abs_calibration_params = {
"calibration_distmod": distmod,
"data_with_calibration": with_calibration,
"length_calibration": length_calibration}
else:
abs_calibration_params = None
calibration_params = {"mag": mag[mask], "eta": eta[mask],
"e_mag": e_mag[mask], "e_eta": e_eta[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], z_obs[mask], None, calibration_params,
abs_calibration_params, mag_selection, loader.rdist,
loader._Omega_m, "TFR", name=kind)
elif kind in ["CF4_GroupAll"]:
# Note, this for some reason works terribly.
keys = ["RA", "DE", "Vcmb", "DMzp", "eDM"]
RA, dec, zCMB, mu, e_mu = (loader.cat[k] for k in keys)
zCMB /= SPEED_OF_LIGHT
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min) & np.isfinite(mu)
# The distance moduli in CF4 are most likely given assuming h = 0.75
mu += 5 * np.log10(0.75)
calibration_params = {"mu": mu[mask], "e_mu": e_mu[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], None, calibration_params, None,
mag_selection, loader.rdist, loader._Omega_m, "simple",
name=kind)
else:
raise ValueError(f"Catalogue `{kind}` not recognized.")
fprint(f"selected {np.sum(mask)}/{len(mask)} galaxies in catalogue `{kind}`") # noqa
return model
###############################################################################
# Predicting z_cosmo from z_obs #
###############################################################################

518
csiborgtools/flow/io.py Normal file
View File

@ -0,0 +1,518 @@
# Copyright (C) 2024 Richard Stiskalek
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import numpy as np
from astropy.cosmology import FlatLambdaCDM
from h5py import File
from ..params import SPEED_OF_LIGHT, simname2Omega_m
from ..utils import fprint, radec_to_galactic, radec_to_supergalactic
from .flow_model import PV_LogLikelihood
H0 = 100 # km / s / Mpc
##############################################################################
# Data loader #
###############################################################################
class DataLoader:
"""
Data loader for the line of sight (LOS) interpolated fields and the
corresponding catalogues.
Parameters
----------
simname : str
Simulation name.
ksim : int or list of int
Index of the simulation to read in (not the IC index).
catalogue : str
Name of the catalogue with LOS objects.
catalogue_fpath : str
Path to the LOS catalogue file.
paths : csiborgtools.read.Paths
Paths object.
ksmooth : int, optional
Smoothing index.
store_full_velocity : bool, optional
Whether to store the full 3D velocity field. Otherwise stores only
the radial velocity.
verbose : bool, optional
Verbose flag.
"""
def __init__(self, simname, ksim, catalogue, catalogue_fpath, paths,
ksmooth=None, store_full_velocity=False, verbose=True):
fprint("reading the catalogue,", verbose)
self._cat = self._read_catalogue(catalogue, catalogue_fpath)
self._catname = catalogue
fprint("reading the interpolated field.", verbose)
self._field_rdist, self._los_density, self._los_velocity = self._read_field( # noqa
simname, ksim, catalogue, ksmooth, paths)
if len(self._cat) != self._los_density.shape[1]:
raise ValueError("The number of objects in the catalogue does not "
"match the number of objects in the field.")
fprint("calculating the radial velocity.", verbose)
nobject = self._los_density.shape[1]
dtype = self._los_density.dtype
if simname in ["Carrick2015", "Lilow2024"]:
# Carrick+2015 and Lilow+2024 are in galactic coordinates
d1, d2 = radec_to_galactic(self._cat["RA"], self._cat["DEC"])
elif simname in ["CF4", "CLONES"]:
# CF4 is in supergalactic coordinates
d1, d2 = radec_to_supergalactic(self._cat["RA"], self._cat["DEC"])
else:
d1, d2 = self._cat["RA"], self._cat["DEC"]
num_sims = len(self._los_density)
if "IndranilVoid" in simname:
self._los_radial_velocity = self._los_velocity
self._los_velocity = None
else:
radvel = np.empty(
(num_sims, nobject, len(self._field_rdist)), dtype)
for k in range(num_sims):
for i in range(nobject):
radvel[k, i, :] = radial_velocity_los(
self._los_velocity[k, :, i, ...], d1[i], d2[i])
self._los_radial_velocity = radvel
if not store_full_velocity:
self._los_velocity = None
self._Omega_m = simname2Omega_m(simname)
# Normalize the CSiBORG & CLONES density by the mean matter density
if "csiborg" in simname or simname == "CLONES":
cosmo = FlatLambdaCDM(H0=H0, Om0=self._Omega_m)
mean_rho_matter = cosmo.critical_density0.to("Msun/kpc^3").value
mean_rho_matter *= self._Omega_m
self._los_density /= mean_rho_matter
# Since Carrick+2015 and CF4 provide `rho / <rho> - 1`
if simname in ["Carrick2015", "CF4", "CF4gp"]:
self._los_density += 1
# But some CF4 delta values are < -1. Check that CF4 really reports
# this.
if simname in ["CF4", "CF4gp"]:
self._los_density = np.clip(self._los_density, 1e-2, None,)
# Lilow+2024 outside of the range data is NaN. Replace it with some
# finite values. This is OK because the PV tracers are not so far.
if simname == "Lilow2024":
self._los_density[np.isnan(self._los_density)] = 1.
self._los_radial_velocity[np.isnan(self._los_radial_velocity)] = 0.
self._mask = np.ones(len(self._cat), dtype=bool)
self._catname = catalogue
@property
def cat(self):
"""The distance indicators catalogue (structured array)."""
return self._cat[self._mask]
@property
def catname(self):
"""Catalogue name."""
return self._catname
@property
def rdist(self):
"""Radial distances at which the field was interpolated."""
return self._field_rdist
@property
def los_density(self):
"""
Density field along the line of sight `(n_sims, n_objects, n_steps)`
"""
return self._los_density[:, self._mask, ...]
@property
def los_velocity(self):
"""
Velocity field along the line of sight `(n_sims, 3, n_objects,
n_steps)`.
"""
if self._los_velocity is None:
raise ValueError("The 3D velocities were not stored.")
return self._los_velocity[:, :, self._mask, ...]
@property
def los_radial_velocity(self):
"""
Radial velocity along the line of sight `(n_sims, n_objects, n_steps)`.
"""
return self._los_radial_velocity[:, self._mask, ...]
def _read_field(self, simname, ksims, catalogue, ksmooth, paths):
nsims = paths.get_ics(simname)
if isinstance(ksims, int):
ksims = [ksims]
# For no-field read in Carrick+2015 but then zero it.
if simname == "no_field":
simname = "Carrick2015"
to_wipe = simname == "no_field"
if not all(0 <= ksim < len(nsims) for ksim in ksims):
raise ValueError(f"Invalid simulation index: `{ksims}`")
if "Pantheon+" in catalogue:
fpath = paths.field_los(simname, "Pantheon+")
elif "CF4_TFR" in catalogue:
fpath = paths.field_los(simname, "CF4_TFR")
else:
fpath = paths.field_los(simname, catalogue)
los_density = [None] * len(ksims)
los_velocity = [None] * len(ksims)
for n, ksim in enumerate(ksims):
nsim = nsims[ksim]
with File(fpath, 'r') as f:
has_smoothed = True if f[f"density_{nsim}"].ndim > 2 else False
if has_smoothed and (ksmooth is None or not isinstance(ksmooth, int)): # noqa
raise ValueError("The output contains smoothed field but "
"`ksmooth` is None. It must be provided.")
indx = (..., ksmooth) if has_smoothed else (...)
los_density[n] = f[f"density_{nsim}"][indx]
los_velocity[n] = f[f"velocity_{nsim}"][indx]
rdist = f[f"rdist_{nsim}"][...]
los_density = np.stack(los_density)
los_velocity = np.stack(los_velocity)
if to_wipe:
los_density = np.ones_like(los_density)
los_velocity = np.zeros_like(los_velocity)
return rdist, los_density, los_velocity
def _read_catalogue(self, catalogue, catalogue_fpath):
if catalogue == "A2":
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
arr[key] = f[key][:]
elif catalogue in ["LOSS", "Foundation", "SFI_gals", "2MTF",
"Pantheon+", "SFI_gals_masked", "SFI_groups",
"Pantheon+_groups", "Pantheon+_groups_zSN",
"Pantheon+_zSN"]:
with File(catalogue_fpath, 'r') as f:
if "Pantheon+" in catalogue:
grp = f["Pantheon+"]
else:
grp = f[catalogue]
dtype = [(key, np.float32) for key in grp.keys()]
arr = np.empty(len(grp["RA"]), dtype=dtype)
for key in grp.keys():
arr[key] = grp[key][:]
elif "CB2_" in catalogue:
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
arr[key] = f[key][:]
elif "UPGLADE" in catalogue:
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
if key == "mask":
continue
arr[key] = f[key][:]
elif catalogue in ["CF4_GroupAll"] or "CF4_TFR" in catalogue:
with File(catalogue_fpath, 'r') as f:
dtype = [(key, np.float32) for key in f.keys()]
dtype += [("DEC", np.float32)]
arr = np.empty(len(f["RA"]), dtype=dtype)
for key in f.keys():
arr[key] = f[key][:]
arr["DEC"] = arr["DE"]
if "CF4_TFR" in catalogue:
arr["RA"] *= 360 / 24
else:
raise ValueError(f"Unknown catalogue: `{catalogue}`.")
return arr
###############################################################################
# Supplementary flow functions #
###############################################################################
def radial_velocity_los(los_velocity, ra, dec):
"""
Calculate the radial velocity along the LOS from the 3D velocity
along the LOS `(3, n_steps)`.
"""
types = (float, np.float32, np.float64)
if not isinstance(ra, types) and not isinstance(dec, types):
raise ValueError("RA and dec must be floats.")
if los_velocity.ndim != 2 and los_velocity.shape[0] != 3:
raise ValueError("The shape of `los_velocity` must be (3, n_steps).")
ra_rad, dec_rad = np.deg2rad(ra), np.deg2rad(dec)
vx, vy, vz = los_velocity
return (vx * np.cos(ra_rad) * np.cos(dec_rad)
+ vy * np.sin(ra_rad) * np.cos(dec_rad)
+ vz * np.sin(dec_rad))
##############################################################################
# Shortcut to create a model #
###############################################################################
def read_absolute_calibration(kind, data_length, calibration_fpath):
"""
Read the absolute calibration for the CF4 TFR sample from LEDA but
preprocessed by me.
Parameters
----------
kind : str
Calibration kind: `Cepheids`, `TRGB`, `SBF`, ...
data_length : int
Number of samples in CF4 TFR (should be 9,788).
calibration_fpath : str
Path to the preprocessed calibration file.
Returns
-------
data : 3-dimensional array of shape (data_length, max_calib, 2)
Absolute calibration data.
with_calibration : 1-dimensional array of shape (data_length)
Whether the sample has a calibration.
length_calibration : 1-dimensional array of shape (data_length)
Number of calibration points per sample.
"""
data = {}
with File(calibration_fpath, 'r') as f:
for key in f[kind].keys():
x = f[kind][key][:]
# Get rid of points without uncertainties
x = x[~np.isnan(x[:, 1])]
data[key] = x
max_calib = max(len(val) for val in data.values())
out = np.full((data_length, max_calib, 2), np.nan)
with_calibration = np.full(data_length, False)
length_calibration = np.full(data_length, 0)
for i in data.keys():
out[int(i), :len(data[i]), :] = data[i]
with_calibration[int(i)] = True
length_calibration[int(i)] = len(data[i])
return out, with_calibration, length_calibration
def get_model(loader, zcmb_min=None, zcmb_max=None, mag_selection=None,
absolute_calibration=None, calibration_fpath=None):
"""
Get a model and extract the relevant data from the loader.
Parameters
----------
loader : DataLoader
DataLoader instance.
zcmb_min : float, optional
Minimum observed redshift in the CMB frame to include.
zcmb_max : float, optional
Maximum observed redshift in the CMB frame to include.
mag_selection : dict, optional
Magnitude selection parameters.
add_absolute_calibration : bool, optional
Whether to add an absolute calibration for CF4 TFRs.
calibration_fpath : str, optional
Returns
-------
model : NumPyro model
"""
zcmb_min = 0.0 if zcmb_min is None else zcmb_min
zcmb_max = np.infty if zcmb_max is None else zcmb_max
los_overdensity = loader.los_density
los_velocity = loader.los_radial_velocity
kind = loader._catname
if absolute_calibration is not None and "CF4_TFR_" not in kind:
raise ValueError("Absolute calibration supported only for the CF4 TFR sample.") # noqa
if kind in ["LOSS", "Foundation"]:
keys = ["RA", "DEC", "z_CMB", "mB", "x1", "c", "e_mB", "e_x1", "e_c"]
RA, dec, zCMB, mag, x1, c, e_mag, e_x1, e_c = (
loader.cat[k] for k in keys)
e_zCMB = None
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min)
calibration_params = {"mag": mag[mask], "x1": x1[mask], "c": c[mask],
"e_mag": e_mag[mask], "e_x1": e_x1[mask],
"e_c": e_c[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], e_zCMB, calibration_params,
None, mag_selection, loader.rdist, loader._Omega_m, "SN",
name=kind)
elif "Pantheon+" in kind:
keys = ["RA", "DEC", "zCMB", "mB", "x1", "c", "biasCor_m_b", "mBERR",
"x1ERR", "cERR", "biasCorErr_m_b", "zCMB_SN", "zCMB_Group",
"zCMBERR"]
RA, dec, zCMB, mB, x1, c, bias_corr_mB, e_mB, e_x1, e_c, e_bias_corr_mB, zCMB_SN, zCMB_Group, e_zCMB = (loader.cat[k] for k in keys) # noqa
mB -= bias_corr_mB
e_mB = np.sqrt(e_mB**2 + e_bias_corr_mB**2)
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min)
if kind == "Pantheon+_groups":
mask &= np.isfinite(zCMB_Group)
if kind == "Pantheon+_groups_zSN":
mask &= np.isfinite(zCMB_Group)
zCMB = zCMB_SN
if kind == "Pantheon+_zSN":
zCMB = zCMB_SN
calibration_params = {"mag": mB[mask], "x1": x1[mask], "c": c[mask],
"e_mag": e_mB[mask], "e_x1": e_x1[mask],
"e_c": e_c[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], e_zCMB[mask], calibration_params,
None, mag_selection, loader.rdist, loader._Omega_m, "SN",
name=kind)
elif kind in ["SFI_gals", "2MTF", "SFI_gals_masked"]:
keys = ["RA", "DEC", "z_CMB", "mag", "eta", "e_mag", "e_eta"]
RA, dec, zCMB, mag, eta, e_mag, e_eta = (loader.cat[k] for k in keys)
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min)
calibration_params = {"mag": mag[mask], "eta": eta[mask],
"e_mag": e_mag[mask], "e_eta": e_eta[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], None, calibration_params, None,
mag_selection, loader.rdist, loader._Omega_m, "TFR", name=kind)
elif "CF4_TFR_" in kind:
# The full name can be e.g. "CF4_TFR_not2MTForSFI_i" or "CF4_TFR_i".
band = kind.split("_")[-1]
if band not in ['g', 'r', 'i', 'z', 'w1', 'w2']:
raise ValueError(f"Band `{band}` not recognized.")
keys = ["RA", "DEC", "Vcmb", f"{band}", "lgWmxi", "elgWi",
"not_matched_to_2MTF_or_SFI", "Qs", "Qw"]
RA, dec, z_obs, mag, eta, e_eta, not_matched_to_2MTF_or_SFI, Qs, Qw = (
loader.cat[k] for k in keys)
l, b = radec_to_galactic(RA, dec)
not_matched_to_2MTF_or_SFI = not_matched_to_2MTF_or_SFI.astype(bool)
# NOTE: fiducial uncertainty until we can get the actual values.
e_mag = 0.05 * np.ones_like(mag)
z_obs /= SPEED_OF_LIGHT
eta -= 2.5
fprint("selecting only galaxies with mag > 5 and eta > -0.3.")
mask = (mag > 5) & (eta > -0.3)
fprint("selecting only galaxies with |b| > 7.5.")
mask &= np.abs(b) > 7.5
mask &= (z_obs < zcmb_max) & (z_obs > zcmb_min)
if "not2MTForSFI" in kind:
mask &= not_matched_to_2MTF_or_SFI
elif "2MTForSFI" in kind:
mask &= ~not_matched_to_2MTF_or_SFI
fprint("employing a quality cut on the galaxies.")
if "w" in band:
mask &= Qw == 5
else:
mask &= Qs == 5
# Read the absolute calibration
if absolute_calibration is not None:
CF4_length = len(RA)
distmod, with_calibration, length_calibration = read_absolute_calibration( # noqa
"Cepheids", CF4_length, calibration_fpath)
distmod = distmod[mask]
with_calibration = with_calibration[mask]
length_calibration = length_calibration[mask]
fprint(f"found {np.sum(with_calibration)} galaxies with absolute calibration.") # noqa
distmod = distmod[with_calibration]
length_calibration = length_calibration[with_calibration]
abs_calibration_params = {
"calibration_distmod": distmod,
"data_with_calibration": with_calibration,
"length_calibration": length_calibration}
else:
abs_calibration_params = None
calibration_params = {"mag": mag[mask], "eta": eta[mask],
"e_mag": e_mag[mask], "e_eta": e_eta[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], z_obs[mask], None, calibration_params,
abs_calibration_params, mag_selection, loader.rdist,
loader._Omega_m, "TFR", name=kind)
elif kind in ["CF4_GroupAll"]:
# Note, this for some reason works terribly.
keys = ["RA", "DE", "Vcmb", "DMzp", "eDM"]
RA, dec, zCMB, mu, e_mu = (loader.cat[k] for k in keys)
zCMB /= SPEED_OF_LIGHT
mask = (zCMB < zcmb_max) & (zCMB > zcmb_min) & np.isfinite(mu)
# The distance moduli in CF4 are most likely given assuming h = 0.75
mu += 5 * np.log10(0.75)
calibration_params = {"mu": mu[mask], "e_mu": e_mu[mask]}
model = PV_LogLikelihood(
los_overdensity[:, mask], los_velocity[:, mask],
RA[mask], dec[mask], zCMB[mask], None, calibration_params, None,
mag_selection, loader.rdist, loader._Omega_m, "simple",
name=kind)
else:
raise ValueError(f"Catalogue `{kind}` not recognized.")
fprint(f"selected {np.sum(mask)}/{len(mask)} galaxies in catalogue `{kind}`") # noqa
return model

View File

@ -0,0 +1,130 @@
# Copyright (C) 2024 Richard Stiskalek
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Function to work with the void data from Sergij & Indranil's files."""
from glob import glob
from os.path import join
from re import search
import numpy as np
from jax import numpy as jnp
from jax import vmap
from jax.scipy.ndimage import map_coordinates
from tqdm import tqdm
###############################################################################
# I/O of the void data #
###############################################################################
def load_void_data(kind):
"""
Load the void velocities from Sergij & Indranil's files for a given kind
of void profile per observer.
Parameters
----------
kind : str
The kind of void profile to load. One of "exp", "gauss", "mb".
Returns
-------
velocities : 3-dimensional array of shape (nLG, nrad, nphi)
"""
if kind not in ["exp", "gauss", "mb"]:
raise ValueError("kind must be one of 'exp', 'gauss', 'mb'")
fdir = "/mnt/extraspace/rstiskalek/catalogs/IndranilVoid"
kind = kind.upper()
fdir = join(fdir, f"{kind}profile")
files = glob(join(fdir, "*.dat"))
rLG = [int(search(rf'v_pec_{kind}profile_rLG_(\d+)', f).group(1))
for f in files]
rLG = np.sort(rLG)
for i, ri in enumerate(tqdm(rLG, desc="Loading observer data")):
f = join(fdir, f"v_pec_{kind}profile_rLG_{ri}.dat")
data_i = np.genfromtxt(f).T
if i == 0:
data = np.full((len(rLG), *data_i.shape), np.nan, dtype=np.float32)
data[i] = data_i
if np.any(np.isnan(data)):
raise ValueError("Found NaNs in loaded data.")
return rLG, data
###############################################################################
# Interpolation of void velocities #
###############################################################################
def interpolate_void(rLG, r, phi, data, rgrid_min, rgrid_max, rLG_min, rLG_max,
order=1):
"""
Interpolate the void velocities from Sergij & Indranil's files for a given
observer over a set of radial distances and at angles specifying the
galaxies.
Parameters
----------
rLG : float
The observer's distance from the center of the void.
r : 1-dimensional array
The radial distances at which to interpolate the velocities.
phi : 1-dimensional array
The angles at which to interpolate the velocities, in degrees,
defining the galaxy position.
data : 3-dimensional array of shape (nLG, nrad, nphi)
The void velocities for different observers, radial distances, and
angles.
rgrid_min, rgrid_max : float
The minimum and maximum radial distances in the data.
rLG_min, rLG_max : float
The minimum and maximum observer distances in the data.
order : int, optional
The order of the interpolation. Default is 1, can be 0.
Returns
-------
vel : 2-dimensional array of shape (len(phi), len(r))
"""
nLG, nrad, nphi = data.shape
# Normalize rLG to the grid scale
rLG_normalized = (rLG - rLG_min) / (rLG_max - rLG_min) * (nLG - 1)
rLG_normalized = jnp.repeat(rLG_normalized, r.size)
r_normalized = (r - rgrid_min) / (rgrid_max - rgrid_min) * (nrad - 1)
# Function to perform interpolation for a single phi
def interpolate_single_phi(phi_val):
# Normalize phi to match the grid
phi_normalized = phi_val / 180 * (nphi - 1)
# Create the grid for this specific phi
X = jnp.vstack([rLG_normalized,
r_normalized,
jnp.repeat(phi_normalized, r.size)])
# Interpolate over the data using map_coordinates. The mode is nearest
# to avoid extrapolation. But values outside of the grid should never
# occur.
return map_coordinates(data, X, order=order, mode='nearest')
return vmap(interpolate_single_phi)(phi)

View File

@ -16,11 +16,11 @@ from .catalogue import (CSiBORG1Catalogue, CSiBORG2Catalogue,
CSiBORG2SUBFINDCatalogue, # noqa
CSiBORG2MergerTreeReader, QuijoteCatalogue, # noqa
MDPL2Catalogue, fiducial_observers) # noqa
from .snapshot import (CSiBORG1Snapshot, CSiBORG2Snapshot, QuijoteSnapshot, # noqa
CSiBORG1Field, CSiBORG2Field, CSiBORG2XField, # noqa
QuijoteField, BORG2Field, BORG1Field, TNG300_1Field, # noqa
Carrick2015Field, Lilow2024Field, CLONESField, # noqa
CF4Field) # noqa
from .snapshot import (CSiBORG1Snapshot, CSiBORG2Snapshot, CSiBORG2XSnapshot, # noqa
QuijoteSnapshot, CSiBORG1Field, CSiBORG2Field, # noqa
CSiBORG2XField, QuijoteField, BORG2Field, BORG1Field, # noqa
TNG300_1Field, Carrick2015Field, Lilow2024Field, # noqa
CLONESField, CF4Field) # noqa
from .obs import (SDSS, MCXCClusters, PlanckClusters, TwoMPPGalaxies, # noqa
TwoMPPGroups, ObservedCluster, match_array_to_no_masking, # noqa
cols_to_structured, read_pantheonplus_data) # noqa

View File

@ -118,6 +118,8 @@ class Paths:
fdir = join(fdir, "2MPP_N128_DES_V1", "resimulations", "R512")
files = glob(join(fdir, "mcmc_*"))
files = [int(search(r'mcmc_(\d+)', f).group(1)) for f in files]
else:
raise ValueError(f"Unknown MANTICORE simulation `{simname}`.")
elif simname == "csiborg2X":
# NOTE this too is preliminary
fname = "/mnt/extraspace/rstiskalek/MANTICORE/resimulations/fields/2MPP_N128_DES_PROD/R512" # noqa
@ -134,7 +136,7 @@ class Paths:
for file in files if search(r'realization(\d+)_delta\.fits', file)] # noqa
files = [int(file) for file in files]
# Downsample to only 20 realisations
files = files[::5]
# files = files[::5]
elif simname in ["Carrick2015", "Lilow2024", "no_field", "CLONES"]:
files = [0]
elif "IndranilVoid" in simname:
@ -214,9 +216,21 @@ class Paths:
f"chain_16417_{str(nsim).zfill(3)}", "output",
f"snapshot_{str(nsnap).zfill(3)}.hdf5")
elif simname == "csiborg2X":
raise ValueError("Snapshots not available for CSiBORG2X.")
raise ValueError("Snapshots not available for CSiBORG2X based on "
"Stephen's ICs.")
elif "manticore" in simname:
raise ValueError("Snapshots not available for MANTICORE.")
fdir = self.manticore_dir
if simname == "manticore_2MPP_N128_DES_V1":
if nsnap != 1:
raise ValueError("Only snapshot 1 is available for "
"MANTICORE 2MPP_N128_DES_V1.")
fdir = join(fdir, "2MPP_N128_DES_V1", "resimulations", "R512")
nsnap_str = str(nsnap).zfill(4)
fpath = join(fdir, f"mcmc_{nsim}", "swift_monofonic",
f"snap_{nsnap_str}", f"snap_{nsnap_str}.hdf5")
else:
raise ValueError(f"Unknown MANTICORE simulation `{simname}`.")
elif simname == "quijote":
fpath = join(self.quijote_dir, "fiducial_processed",
f"chain_{nsim}",
@ -407,6 +421,11 @@ class Paths:
else:
raise ValueError(f"Unsupported CSiBORG2X field: `{kind}`.")
if simname == "manticore_2MPP_N128_DES_V1":
basedir = join(self.manticore_dir, "2MPP_N128_DES_V1",
"fields", "R512")
return join(basedir, f"SPH_{nsim}.hdf5")
if simname == "Carrick2015":
basedir = "/mnt/extraspace/rstiskalek/catalogs"
if kind == "overdensity":

View File

@ -517,6 +517,91 @@ class CSiBORG2Snapshot(BaseSnapshot):
}
###############################################################################
# CSiBORG2x snapshot class #
###############################################################################
class CSiBORG2XSnapshot(BaseSnapshot):
"""
CSiBORG2X snapshot from the SWIFT N-body simulations provided by Stuart
based on the Manticore ICs. The snapshots are at `z = 0` and correspond to
`manticore_2MPP_N128_DES_V1`.
Parameters
----------
nsim : int
Simulation index.
nsnap : int
Snapshot index.
kind : str
CSiBORG2 run kind. One of `main`, `random`, or `varysmall`.
paths : Paths, optional
Paths object.
keep_snapshot_open : bool, optional
Whether to keep the snapshot file open when reading halo particles.
This is useful for repeated access to the snapshot.
flip_xz : bool, optional
Whether to flip the x- and z-axes to undo the MUSIC bug so that the
coordinates are consistent with observations.
"""
def __init__(self, nsim, paths=None, keep_snapshot_open=False):
nsnap = 1
flip_xz = False
super().__init__(nsim, nsnap, paths, keep_snapshot_open, flip_xz)
fpath = self.paths.snapshot(self.nsnap, self.nsim,
"manticore_2MPP_N128_DES_V1")
self._snapshot_path = fpath
self._simname = "manticore_2MPP_N128_DES_V1"
def _get_particles(self, kind):
with File(self._snapshot_path, "r") as f:
h = f["Cosmology"].attrs["h"][0]
if kind == "Masses":
npart = f["Header"].attrs["NumPart_Total"][1]
mpart = f["Header"].attrs["InitialMassTable"][1] * 1e10 * h
x = np.ones(npart, dtype=np.float32) * mpart
else:
x = f[f"DMParticles/{kind}"][...]
# Convert coordinates to Mpc / h
if kind == "Coordinates":
x *= h
return x
def coordinates(self):
return self._get_particles("Coordinates")
def velocities(self):
return self._get_particles("Velocities")
def masses(self):
return self._get_particles("Masses")
def particle_ids(self):
raise NotImplementedError("Recovering particle IDs is not implemented "
"for CSiBORG2X.")
def _get_halo_particles(self, halo_id, kind, is_group):
raise NotImplementedError("Recovering halo particles is not "
"implemented for CSiBORG2X.")
def halo_coordinates(self, halo_id, is_group=True):
return self._get_halo_particles(halo_id, "Coordinates", is_group)
def halo_velocities(self, halo_id, is_group=True):
return self._get_halo_particles(halo_id, "Velocities", is_group)
def halo_masses(self, halo_id, is_group=True):
return self._get_halo_particles(halo_id, "Masses", is_group) * 1e10
def _make_hid2offset(self):
raise NotImplementedError("Recovering halo offsets is not implemented"
"for CSiBORG2X.")
###############################################################################
# CSiBORG2 snapshot class #
###############################################################################
@ -771,42 +856,90 @@ class CSiBORG2Field(BaseField):
class CSiBORG2XField(BaseField):
"""
CSiBORG2X `z = 0` field class.
CSiBORG2X `z = 0` field class based on the Manticore ICs.
Parameters
----------
nsim : int
Simulation index.
version : str
Manticore version index.
paths : Paths, optional
Paths object. By default, the paths are set to the `glamdring` paths.
"""
def __init__(self, nsim, paths=None):
def __init__(self, nsim, version, paths=None):
self.version = version
if version == 0:
self.nametag = "csiborg2X"
elif version == 1:
self.nametag = "manticore_2MPP_N128_DES_V1"
else:
raise ValueError("Invalid Manticore version.")
super().__init__(nsim, paths, False)
def overdensity_field(self, **kwargs):
fpath = self.paths.field(
"overdensity", None, None, self.nsim, "csiborg2X")
with File(fpath, "r") as f:
field = f["delta_cic"][...].astype(np.float32)
if self.version == 0:
fpath = self.paths.field(
"overdensity", None, None, self.nsim, self.nametag)
with File(fpath, "r") as f:
field = f["delta_cic"][...].astype(np.float32)
else:
raise ValueError("Invalid Manticore version to read the "
"overdensity field.")
return field
def density_field(self, **kwargs):
field = self.overdensity_field()
omega0 = simname2Omega_m("csiborg2X")
rho_mean = omega0 * 277.53662724583074 # Msun / kpc^3
field += 1
field *= rho_mean
if self.version == 0:
field = self.overdensity_field()
omega0 = simname2Omega_m(self.nametag)
rho_mean = omega0 * 277.53662724583074 # Msun / kpc^3
field += 1
field *= rho_mean
elif self.version == 1:
MAS = kwargs["MAS"]
grid = kwargs["grid"]
fpath = self.paths.field(
"density", MAS, grid, self.nsim, self.nametag)
if MAS == "SPH":
with File(fpath, "r") as f:
field = f["density"][:]
field /= (681.1 * 1e3 / grid)**3 # Convert to h^2 Msun / kpc^3
else:
field = np.load(fpath)
else:
raise ValueError("Invalid Manticore version to read the "
"density field.")
return field
def velocity_field(self, **kwargs):
fpath = self.paths.field(
"velocity", None, None, self.nsim, "csiborg2X")
with File(fpath, "r") as f:
v0 = f["v_0"][...]
v1 = f["v_1"][...]
v2 = f["v_2"][...]
field = np.array([v0, v1, v2])
if self.version == 0:
fpath = self.paths.field(
"velocity", None, None, self.nsim, "csiborg2X")
with File(fpath, "r") as f:
v0 = f["v_0"][...]
v1 = f["v_1"][...]
v2 = f["v_2"][...]
field = np.array([v0, v1, v2])
elif self.version == 1:
MAS = kwargs["MAS"]
grid = kwargs["grid"]
fpath = self.paths.field(
"velocity", MAS, grid, self.nsim, self.nametag)
if MAS:
with File(fpath, "r") as f:
density = f["density"][:]
v0 = f["p0"][:] / density
v1 = f["p1"][:] / density
v2 = f["p2"][:] / density
field = np.array([v0, v1, v2])
else:
field = np.load(fpath)
return field

File diff suppressed because one or more lines are too long

View File

@ -153,7 +153,8 @@ def simname_to_pretty(simname):
"Lilow2024": "Lilow+24",
"csiborg1": "CB1",
"csiborg2_main": "CB2",
"csiborg2X": "Manticore",
"csiborg2X": "Manticore V0",
"manticore_2MPP_N128_DES_V1": "Manticore V1",
"CF4": "Courtois+23",
"CF4gp": "CF4group",
"CLONES": "Sorce+2018",

View File

@ -54,6 +54,8 @@ def get_reader(simname, paths, nsim):
kind = simname.split("_")[-1]
reader = csiborgtools.read.CSiBORG2Snapshot(nsim, 99, kind, paths,
flip_xz=True)
elif simname == "manticore_2MPP_N128_DES_V1":
reader = csiborgtools.read.CSiBORG2XSnapshot(nsim)
else:
raise ValueError(f"Unknown simname: `{simname}`.")
@ -72,7 +74,7 @@ def get_particles(reader, boxsize, get_velocity=True, verbose=True):
if get_velocity:
fprint("reading velocities.", verbose)
vel = reader.velocities().astype(dtype)
vrad = np.sum(pos, vel, axis=1) / dist
vrad = np.sum(pos * vel, axis=1) / dist
del pos
collect()
@ -282,14 +284,14 @@ if __name__ == "__main__":
parser.add_argument("--simname", type=str, help="Simulation name.",
choices=["csiborg1", "csiborg2_main", "csiborg2_varysmall", "csiborg2_random", # noqa
"borg1", "borg2", "borg2_all", "csiborg2X", "Carrick2015", # noqa
"Lilow2024", "CLONES", "CF4"]) # noqa
"Lilow2024", "CLONES", "CF4", "manticore_2MPP_N128_DES_V1"]) # noqa
args = parser.parse_args()
folder = "/mnt/extraspace/rstiskalek/csiborg_postprocessing/field_shells"
if args.simname in ["csiborg2X", "Carrick2015", "Lilow2024",
"CLONES", "CF4"]:
main_from_field(args, folder)
elif "csiborg" in args.simname:
elif "csiborg" in args.simname or args.simname == "manticore_2MPP_N128_DES_V1": # noqa
main_csiborg(args, folder)
elif "borg" in args.simname:
main_borg(args, folder)

View File

@ -179,7 +179,9 @@ def get_field(simname, nsim, kind, MAS, grid):
simkind = simname.split("_")[-1]
field_reader = csiborgtools.read.CSiBORG2Field(nsim, simkind)
elif simname == "csiborg2X":
field_reader = csiborgtools.read.CSiBORG2XField(nsim)
field_reader = csiborgtools.read.CSiBORG2XField(nsim, version=0)
elif simname == "manticore_2MPP_N128_DES_V1":
field_reader = csiborgtools.read.CSiBORG2XField(nsim, version=1)
elif simname == "CLONES":
field_reader = csiborgtools.read.CLONESField(nsim)
elif simname == "Carrick2015":
@ -429,10 +431,10 @@ if __name__ == "__main__":
Om0 = csiborgtools.simname2Omega_m(args.simname)
# r = make_spacing(200, 0.75, 23.25, 34, 0.01, Om0)
r = np.arange(0, 200, 0.75)
r = np.arange(0, 200, 0.5)
# smooth_scales = [0, 2, 4, 6, 8]
smooth_scales = [0]
smooth_scales = [0, 8]
print(f"Running catalogue {args.catalogue} for simulation {args.simname} "
f"with {len(r)} radial points.")

View File

@ -1,5 +1,5 @@
nthreads=1
memory=48
memory=32
on_login=${1}
queue="berg"
env="/mnt/users/rstiskalek/csiborgtools/venv_csiborg/bin/python"
@ -18,11 +18,8 @@ then
fi
# for simname in "csiborg1" "csiborg2_main" "csiborg2X" "Lilow2024" "Carrick2015" "CF4"; do
for simname in "Carrick2015"; do
# for catalogue in "2MTF" "SFI_gals" "CF4_TFR"; do
# for catalogue in "Foundation" "2MTF" "SFI_gals" "CF4_TFR"; do
for catalogue in "CF4_TFR"; do
for simname in "csiborg1" "csiborg2_main" "csiborg2X" "Lilow2024" "Carrick2015" "CF4" "manticore_2MPP_N128_DES_V1"; do
for catalogue in "LOSS" "Foundation" "2MTF" "SFI_gals" "CF4_TFR"; do
pythoncm="$env $file --catalogue $catalogue --nsims $nsims --simname $simname --MAS $MAS --grid $grid"
if [ $on_login -eq 1 ]; then
echo $pythoncm

View File

@ -37,15 +37,17 @@ else
fi
for simname in "IndranilVoid_exp" "IndranilVoid_gauss" "IndranilVoid_mb"; do
# for simname in "IndranilVoid_exp" "IndranilVoid_gauss" "IndranilVoid_mb"; do
for simname in "Carrick2015"; do
# for simname in "no_field"; do
# for catalogue in "2MTF" "SFI_gals" "CF4_TFR_i" "CF4_TFR_w1"; do
# for catalogue in "LOSS" "Foundation" "2MTF" "SFI_gals" "CF4_TFR_i" "CF4_TFR_w1"; do
for catalogue in "LOSS"; do
# for catalogue in "CF4_TFR_i" "CF4_TFR_w1"; do
for catalogue in "2MTF" "SFI_gals" "CF4_TFR_i" "CF4_TFR_w1"; do
# for ksim in "none"; do
# for catalogue in "2MTF" "SFI_gals" "CF4_TFR_i" "CF4_TFR_w1"; do
for ksim in "none"; do
# for ksim in 0; do
# for ksim in $(seq 0 5 500); do
for ksim in "0_100_5" "100_200_5" "200_300_5" "300_400_5" "400_500_5"; do
# for ksim in "0_100_5" "100_200_5" "200_300_5" "300_400_5" "400_500_5"; do
# for ksim in {0..500}; do
for ksmooth in 0; do
pythoncm="$env $file --catalogue $catalogue --simname $simname --ksim $ksim --ksmooth $ksmooth --ndevice $ndevice --device $device"