mirror of
https://github.com/Richard-Sti/csiborgtools.git
synced 2024-12-22 12:28:03 +00:00
Update prior in VF (#135)
* Update priors * Update submit * Update README * Relax width of calibration priors * Relax width of calibration priors * Update smooth scales * Update settings * Update README * Update submit * Add names * Update nb * Fix bug * Update script * Move files * Small bug fix * Add script * Add script * Add script * Add more power * Update script * Quick fix * Rm blank line * Update nb * Update nb * Update nb
This commit is contained in:
parent
6e3f354e69
commit
43ebf660ee
18 changed files with 999 additions and 179 deletions
14
README.md
14
README.md
|
@ -4,5 +4,15 @@ Tools for analysing constrained cosmological simulations.
|
|||
|
||||
|
||||
## TODO
|
||||
- [ ] Remove Gaussian priors and switch to uniform. Can the beta preference be a volume effect?
|
||||
- [ ] Verify the effects of resolution (compare how Carrick worsens) and what happens to the LG velocity.
|
||||
- [x] Remove Gaussian priors and switch to uniform. Can the beta preference be a volume effect?
|
||||
- [x] Relax priors on the calibration parameters as well.
|
||||
- [x] Verify the effects of resolution (compare how Carrick worsens).
|
||||
|
||||
|
||||
## To present in the next meeting
|
||||
- Resolution effects on the goodness-of-fit and the LG velocity.
|
||||
- CF4 peculiar velocity samples.
|
||||
|
||||
|
||||
## To discuss in the next meeting
|
||||
- Get Helene Courtois' advice on the CF4 TFR samples.
|
||||
|
|
|
@ -34,7 +34,7 @@ from jax import numpy as jnp
|
|||
from jax import vmap
|
||||
from jax.scipy.special import logsumexp
|
||||
from numpyro import sample
|
||||
from numpyro.distributions import LogNormal, Normal
|
||||
from numpyro.distributions import Normal, Uniform
|
||||
from quadax import simpson
|
||||
from scipy.interpolate import interp1d
|
||||
from sklearn.model_selection import KFold
|
||||
|
@ -556,10 +556,10 @@ def e2_distmod_SN(e2_mB, e2_x1, e2_c, alpha_cal, beta_cal, e_mu_intrinsic):
|
|||
+ e_mu_intrinsic**2)
|
||||
|
||||
|
||||
def sample_SN(e_mu_mean, e_mu_std, mag_cal_mean, mag_cal_std, alpha_cal_mean,
|
||||
def sample_SN(e_mu_min, e_mu_max, mag_cal_mean, mag_cal_std, alpha_cal_mean,
|
||||
alpha_cal_std, beta_cal_mean, beta_cal_std):
|
||||
"""Sample SNIe Tripp parameters."""
|
||||
e_mu = sample("e_mu", LogNormal(*lognorm_mean_std_to_loc_scale(e_mu_mean, e_mu_std))) # noqa
|
||||
e_mu = sample("e_mu", Uniform(e_mu_min, e_mu_max))
|
||||
mag_cal = sample("mag_cal", Normal(mag_cal_mean, mag_cal_std))
|
||||
alpha_cal = sample("alpha_cal", Normal(alpha_cal_mean, alpha_cal_std))
|
||||
|
||||
|
@ -582,9 +582,9 @@ def e2_distmod_TFR(e2_mag, e2_eta, b, e_mu_intrinsic):
|
|||
return e2_mag + b**2 * e2_eta + e_mu_intrinsic**2
|
||||
|
||||
|
||||
def sample_TFR(e_mu_mean, e_mu_std, a_mean, a_std, b_mean, b_std):
|
||||
def sample_TFR(e_mu_min, e_mu_max, a_mean, a_std, b_mean, b_std):
|
||||
"""Sample Tully-Fisher calibration parameters."""
|
||||
e_mu = sample("e_mu", LogNormal(*lognorm_mean_std_to_loc_scale(e_mu_mean, e_mu_std))) # noqa
|
||||
e_mu = sample("e_mu", Uniform(e_mu_min, e_mu_max))
|
||||
a = sample("a", Normal(a_mean, a_std))
|
||||
b = sample("b", Normal(b_mean, b_std))
|
||||
|
||||
|
@ -596,19 +596,19 @@ def sample_TFR(e_mu_mean, e_mu_std, a_mean, a_std, b_mean, b_std):
|
|||
###############################################################################
|
||||
|
||||
|
||||
def sample_calibration(Vext_std, alpha_mean, alpha_std, beta_mean, beta_std,
|
||||
sigma_v_mean, sigma_v_std, sample_alpha, sample_beta):
|
||||
def sample_calibration(Vext_std, alpha_min, alpha_max, beta_min, beta_max,
|
||||
sigma_v_min, sigma_v_max, sample_alpha, sample_beta):
|
||||
"""Sample the flow calibration."""
|
||||
Vext = sample("Vext", Normal(0, Vext_std).expand([3]))
|
||||
sigma_v = sample("sigma_v", LogNormal(*lognorm_mean_std_to_loc_scale(sigma_v_mean, sigma_v_std))) # noqa
|
||||
sigma_v = sample("sigma_v", Uniform(sigma_v_min, sigma_v_max))
|
||||
|
||||
if sample_alpha:
|
||||
alpha = sample("alpha", Normal(alpha_mean, alpha_std))
|
||||
alpha = sample("alpha", Uniform(alpha_min, alpha_max))
|
||||
else:
|
||||
alpha = 1.0
|
||||
|
||||
if sample_beta:
|
||||
beta = sample("beta", Normal(beta_mean, beta_std))
|
||||
beta = sample("beta", Uniform(beta_min, beta_max))
|
||||
else:
|
||||
beta = 1.0
|
||||
|
||||
|
|
|
@ -21,7 +21,6 @@ from abc import ABC, abstractmethod
|
|||
from collections import OrderedDict
|
||||
from functools import lru_cache
|
||||
from gc import collect
|
||||
from itertools import product
|
||||
from math import floor
|
||||
from astropy.cosmology import FlatLambdaCDM
|
||||
|
||||
|
@ -1342,5 +1341,14 @@ def fiducial_observers(boxwidth, radius):
|
|||
Positions of the observers, with each position as a len-3 list.
|
||||
"""
|
||||
nobs = floor(boxwidth / (2 * radius))
|
||||
return [[val * radius for val in position]
|
||||
for position in product([1, 3, 5], repeat=nobs)]
|
||||
|
||||
obs = []
|
||||
for i in range(nobs):
|
||||
x = (2 * i + 1) * radius
|
||||
for j in range(nobs):
|
||||
y = (2 * j + 1) * radius
|
||||
for k in range(nobs):
|
||||
z = (2 * k + 1) * radius
|
||||
obs.append([x, y, z])
|
||||
|
||||
return obs
|
||||
|
|
|
@ -637,7 +637,8 @@ class CSiBORG1Field(BaseField):
|
|||
self._simname = "csiborg1"
|
||||
|
||||
def density_field(self, MAS, grid):
|
||||
fpath = self.paths.field("density", MAS, grid, self.nsim, "csiborg1")
|
||||
fpath = self.paths.field(
|
||||
"density", MAS, grid, self.nsim, self._simname)
|
||||
|
||||
if MAS == "SPH":
|
||||
with File(fpath, "r") as f:
|
||||
|
@ -652,7 +653,8 @@ class CSiBORG1Field(BaseField):
|
|||
return field
|
||||
|
||||
def velocity_field(self, MAS, grid):
|
||||
fpath = self.paths.field("velocity", MAS, grid, self.nsim, "csiborg1")
|
||||
fpath = self.paths.field(
|
||||
"velocity", MAS, grid, self.nsim, self._simname)
|
||||
|
||||
if MAS == "SPH":
|
||||
with File(fpath, "r") as f:
|
||||
|
@ -677,7 +679,7 @@ class CSiBORG1Field(BaseField):
|
|||
raise ValueError("The radial velocity field is only implemented "
|
||||
"for the flipped x- and z-axes.")
|
||||
|
||||
fpath = self.paths.field("radvel", MAS, grid, self.nsim, "csiborg1")
|
||||
fpath = self.paths.field("radvel", MAS, grid, self.nsim, self._simname)
|
||||
return np.load(fpath)
|
||||
|
||||
|
||||
|
|
316
notebooks/flow/quijote_Cell.ipynb
Normal file
316
notebooks/flow/quijote_Cell.ipynb
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -108,6 +108,8 @@ def simname_to_pretty(simname):
|
|||
"csiborg1": "CB1",
|
||||
"csiborg2_main": "CB2",
|
||||
"csiborg2X": "Manticore",
|
||||
"CF4": "CF4",
|
||||
"CF4gp": "CF4group",
|
||||
}
|
||||
|
||||
if isinstance(simname, list):
|
||||
|
|
|
@ -340,8 +340,8 @@ if __name__ == "__main__":
|
|||
args = parser.parse_args()
|
||||
|
||||
rmax = 200
|
||||
dr = 0.5
|
||||
smooth_scales = [0, 2, 4]
|
||||
dr = 0.25
|
||||
smooth_scales = [0]
|
||||
|
||||
comm = MPI.COMM_WORLD
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
|
|
|
@ -11,7 +11,8 @@ MAS="SPH"
|
|||
grid=1024
|
||||
|
||||
|
||||
for catalogue in "LOSS" "Foundation" "Pantheon+" "2MTF" "SFI_gals"; do
|
||||
# for catalogue in "LOSS" "Foundation" "Pantheon+" "2MTF" "SFI_gals"; do
|
||||
for catalogue in "LOSS"; do
|
||||
# for catalogue in "Foundation"; do
|
||||
pythoncm="$env $file --catalogue $catalogue --nsims $nsims --simname $simname --MAS $MAS --grid $grid"
|
||||
if [ $on_login -eq 1 ]; then
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
nthreads=1
|
||||
memory=64
|
||||
nthreads=10
|
||||
memory=12
|
||||
on_login=${1}
|
||||
queue="berg"
|
||||
env="/mnt/zfsusers/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
file="field_prop.py"
|
||||
kind="density"
|
||||
simname="csiborg1"
|
||||
nsims="9844"
|
||||
kind="velocity"
|
||||
simname="quijote"
|
||||
nsims="-1"
|
||||
MAS="PCS"
|
||||
grid=1024
|
||||
grid=256
|
||||
|
||||
|
||||
pythoncm="$env $file --nsims $nsims --simname $simname --kind $kind --MAS $MAS --grid $grid"
|
||||
|
|
|
@ -243,10 +243,10 @@ if __name__ == "__main__":
|
|||
"num_epochs": num_epochs}
|
||||
print_variables(main_params.keys(), main_params.values())
|
||||
|
||||
calibration_hyperparams = {"Vext_std": 250,
|
||||
"alpha_mean": 1.0, "alpha_std": 0.5,
|
||||
"beta_mean": 1.0, "beta_std": 0.5,
|
||||
"sigma_v_mean": 200., "sigma_v_std": 100.,
|
||||
calibration_hyperparams = {"Vext_std": 500,
|
||||
"alpha_min": -1.0, "alpha_max": 3.0,
|
||||
"beta_min": -1.0, "beta_max": 3.0,
|
||||
"sigma_v_min": 5.0, "sigma_v_max": 750.,
|
||||
"sample_alpha": sample_alpha,
|
||||
"sample_beta": sample_beta,
|
||||
}
|
||||
|
@ -254,15 +254,15 @@ if __name__ == "__main__":
|
|||
calibration_hyperparams.keys(), calibration_hyperparams.values())
|
||||
|
||||
if ARGS.catalogue in ["LOSS", "Foundation", "Pantheon+", "Pantheon+_groups", "Pantheon+_zSN"]: # noqa
|
||||
distmod_hyperparams = {"e_mu_mean": 0.1, "e_mu_std": 0.05,
|
||||
"mag_cal_mean": -18.25, "mag_cal_std": 0.5,
|
||||
"alpha_cal_mean": 0.148, "alpha_cal_std": 0.05,
|
||||
"beta_cal_mean": 3.112, "beta_cal_std": 1.0,
|
||||
distmod_hyperparams = {"e_mu_min": 0.001, "e_mu_max": 1.0,
|
||||
"mag_cal_mean": -18.25, "mag_cal_std": 2.0,
|
||||
"alpha_cal_mean": 0.148, "alpha_cal_std": 1.0,
|
||||
"beta_cal_mean": 3.112, "beta_cal_std": 2.0,
|
||||
}
|
||||
elif ARGS.catalogue in ["SFI_gals", "2MTF"]:
|
||||
distmod_hyperparams = {"e_mu_mean": 0.3, "e_mu_std": 0.15,
|
||||
"a_mean": -21., "a_std": 0.5,
|
||||
"b_mean": -5.95, "b_std": 0.25,
|
||||
distmod_hyperparams = {"e_mu_min": 0.001, "e_mu_max": 1.0,
|
||||
"a_mean": -21., "a_std": 5.0,
|
||||
"b_mean": -5.95, "b_std": 3.0,
|
||||
}
|
||||
else:
|
||||
raise ValueError(f"Unsupported catalogue: `{ARGS.catalogue}`.")
|
||||
|
|
|
@ -17,39 +17,16 @@ if [ "$on_login" != "1" ] && [ "$on_login" != "0" ]; then
|
|||
fi
|
||||
|
||||
|
||||
: << 'COMMENT'
|
||||
Finished running:
|
||||
- Carrick2015 with all catalogues
|
||||
|
||||
- Lilow2024 wbeta with all catalogues
|
||||
- Lilow2024 wobeta with all catalogues
|
||||
|
||||
- CF4 wbeta with all catalogues
|
||||
- CF4 wobeta with all catalogues
|
||||
- CF4gp wbeta with all catalogues
|
||||
- CF4gp wobeta with all catalogues
|
||||
|
||||
- csiborg1 wbeta with all catalogues
|
||||
- csiborg1 wobeta with all catalogues
|
||||
- csiborg2_main wbeta with all catalogues
|
||||
- csiborg2_main wobeta with all catalogues
|
||||
- csiborg2X wbeta with all catalogues
|
||||
- csiborg2X wobeta with all catalogues
|
||||
|
||||
- csiborg2_main/csiborg2X 2MTF & Pantheon+ boxes individually.
|
||||
|
||||
Remaining to do:
|
||||
- Lilow, CF4, and csiborgs with beta fixed.
|
||||
COMMENT
|
||||
|
||||
# Submit a job for each combination of simname, catalogue, ksim
|
||||
for simname in "Lilow2024" "CF4" "CF4gp" "csiborg1" "csiborg2_main" "csiborg2X"; do
|
||||
# for simname in "Lilow2024" "CF4" "CF4gp" "csiborg1" "csiborg2_main" "csiborg2X"; do
|
||||
for ksmooth in 0 1 2 3 4; do
|
||||
for simname in "Carrick2015"; do
|
||||
# for simname in "csiborg1" "csiborg2_main" "csiborg2X"; do
|
||||
for catalogue in "LOSS" "Foundation" "2MTF" "Pantheon+" "SFI_gals"; do
|
||||
for catalogue in "Pantheon+"; do
|
||||
# for catalogue in "2MTF"; do
|
||||
# for ksim in 0 1 2; do
|
||||
# for ksim in 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20; do
|
||||
for ksim in 0; do
|
||||
for ksim in "none"; do
|
||||
pythoncm="$env $file --catalogue $catalogue --simname $simname --ksim $ksim --ksmooth $ksmooth --ndevice $ndevice --device $device"
|
||||
|
||||
if [ $on_login -eq 1 ]; then
|
||||
|
@ -66,3 +43,5 @@ for simname in "Lilow2024" "CF4" "CF4gp" "csiborg1" "csiborg2_main" "csiborg2X";
|
|||
done
|
||||
done
|
||||
done
|
||||
|
||||
done
|
173
scripts/output/quijote_catalogue_from_field.py
Normal file
173
scripts/output/quijote_catalogue_from_field.py
Normal file
|
@ -0,0 +1,173 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""Quick script to write halo catalogues as a HDF5 file."""
|
||||
from os import remove
|
||||
from os.path import exists, join
|
||||
|
||||
import numpy as np
|
||||
from h5py import File
|
||||
|
||||
import csiborgtools
|
||||
from csiborgtools import fprint
|
||||
|
||||
###############################################################################
|
||||
# Evaluating fields at a fixed radius. #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def load_fields(nsim, MAS, grid, paths):
|
||||
# reader = csiborgtools.read.QuijoteField(nsim, paths)
|
||||
# velocity_field = reader.velocity_field(MAS, grid)
|
||||
|
||||
# reader = csiborgtools.read.CSiBORG2Field(nsim, "random", paths)
|
||||
# velocity_field = reader.velocity_field(MAS, grid)
|
||||
|
||||
folder = "/mnt/extraspace/rstiskalek/catalogs"
|
||||
fpath = join(folder, "twompp_velocity_carrick2015.npy")
|
||||
field = np.load(fpath).astype(np.float32)
|
||||
field[0] -= 89
|
||||
field[1] -= -131
|
||||
field[2] -= 17
|
||||
# field /= 0.43
|
||||
return field
|
||||
|
||||
# return velocity_field
|
||||
|
||||
|
||||
def uniform_points_at_radius(R, npoints, seed):
|
||||
gen = np.random.RandomState(seed)
|
||||
phi = gen.uniform(0, 2*np.pi, npoints)
|
||||
theta = np.arccos(gen.uniform(-1, 1, npoints))
|
||||
|
||||
return R * np.vstack([
|
||||
np.sin(theta) * np.cos(phi),
|
||||
np.sin(theta) * np.sin(phi),
|
||||
np.cos(theta)]).T
|
||||
|
||||
|
||||
def main_field(velocity_field, radius, observers, npoints, fname, boxsize):
|
||||
if exists(fname):
|
||||
print(f"Removing existing file `{fname}`.", flush=True)
|
||||
remove(fname)
|
||||
|
||||
points = uniform_points_at_radius(radius, npoints, 42)
|
||||
for i, observer in enumerate(observers):
|
||||
current_points = (points + observer)
|
||||
current_points_box_units = current_points / boxsize
|
||||
vel = np.vstack([csiborgtools.field.evaluate_cartesian_cic(
|
||||
velocity_field[i], pos=current_points_box_units,
|
||||
smooth_scales=None,) for i in range(3)]).T
|
||||
|
||||
vel_observer = np.vstack([csiborgtools.field.evaluate_cartesian_cic(
|
||||
velocity_field[i],
|
||||
pos=np.asarray(observer).reshape(1, 3) / boxsize,
|
||||
smooth_scales=None) for i in range(3)]).T[0]
|
||||
|
||||
with File(fname, 'a') as f:
|
||||
grp = f.create_group(f"obs_{i}")
|
||||
grp.create_dataset("pos", data=current_points)
|
||||
grp.create_dataset("vel", data=vel)
|
||||
grp.create_dataset("observer", data=observer)
|
||||
grp.create_dataset("vel_observer", data=vel_observer)
|
||||
|
||||
print(f"Written to `{fname}`.", flush=True)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Selecting particles in a thin radial shell. #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def load_particles(nsim, paths):
|
||||
reader = csiborgtools.read.QuijoteSnapshot(nsim, 4, paths)
|
||||
fprint("reading particles positions...")
|
||||
pos = reader.coordinates()
|
||||
fprint("reading particles velocities...")
|
||||
vel = reader.velocities()
|
||||
fprint("finished reading the snapshot.")
|
||||
return pos, vel
|
||||
|
||||
|
||||
def main_particles(pos, vel, rmin, rmax, observers, fname):
|
||||
if exists(fname):
|
||||
print(f"Removing existing file `{fname}`.", flush=True)
|
||||
remove(fname)
|
||||
|
||||
for i, observer in enumerate(observers):
|
||||
r = np.linalg.norm(pos - observer, axis=1)
|
||||
mask = (r > rmin) & (r < rmax)
|
||||
print(f"Observer {i}: {mask.sum()} particles in the shell.")
|
||||
with File(fname, 'a') as f:
|
||||
grp = f.create_group(f"obs_{i}")
|
||||
grp.create_dataset("pos", data=pos[mask])
|
||||
grp.create_dataset("vel", data=vel[mask])
|
||||
grp.create_dataset("observer", data=observer)
|
||||
|
||||
print(f"Written to `{fname}`.", flush=True)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Interface #
|
||||
###############################################################################
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
fdir = "/mnt/extraspace/rstiskalek/BBF/Quijote_field_points"
|
||||
|
||||
# grid = 512
|
||||
# MAS = "PCS"
|
||||
# boxsize = csiborgtools.simname2boxsize("quijote")
|
||||
# rmax = 200
|
||||
# radius_evaluate = 150
|
||||
# npoints = 1000
|
||||
|
||||
# fiducial_observers = csiborgtools.read.fiducial_observers(boxsize, rmax)
|
||||
# print(f"There are {len(fiducial_observers)} observers.")
|
||||
# # fiducial_observers = [[boxsize/2, boxsize/2, boxsize/2]]
|
||||
# nsims = [0]
|
||||
|
||||
# for nsim in nsims:
|
||||
# fname = join(fdir, f"field_points_{nsim}.h5")
|
||||
# velocity_field = load_fields(nsim, MAS, grid, paths)
|
||||
# main_field(velocity_field, radius_evaluate, fiducial_observers,
|
||||
# npoints, fname, boxsize)
|
||||
|
||||
rmin = 100
|
||||
rmax = 100.5
|
||||
boxsize = csiborgtools.simname2boxsize("quijote")
|
||||
fiducial_observers = csiborgtools.read.fiducial_observers(boxsize, 200)
|
||||
print(f"There are {len(fiducial_observers)} observers.")
|
||||
nsims = [0]
|
||||
|
||||
for nsim in nsims:
|
||||
fname = join(fdir, f"particles_points_{nsim}.h5")
|
||||
pos, vel = load_particles(nsim, paths)
|
||||
main_particles(pos, vel, rmin, rmax, fiducial_observers, fname)
|
||||
|
||||
# grid = None
|
||||
# MAS = None
|
||||
# boxsize = csiborgtools.simname2boxsize("Carrick2015")
|
||||
# radius_evaluate = 75
|
||||
# npoints = 1000
|
||||
|
||||
# fiducial_observers = [[boxsize/2, boxsize/2, boxsize/2]]
|
||||
# nsims = [0]
|
||||
|
||||
# for nsim in nsims:
|
||||
# fname = join(fdir, f"Carrick2015_field_points_{nsim}.h5")
|
||||
# velocity_field = load_fields(nsim, MAS, grid, paths)
|
||||
# main_field(velocity_field, radius_evaluate, fiducial_observers,
|
||||
# npoints, fname, boxsize)
|
|
@ -1,112 +0,0 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
Quick script to output either halo positions and masses or positions of
|
||||
galaxies in a survey as an ASCII file.
|
||||
"""
|
||||
from os.path import join
|
||||
|
||||
import csiborgtools
|
||||
import numpy as np
|
||||
from tqdm import tqdm
|
||||
|
||||
DIR_OUT = "/mnt/extraspace/rstiskalek/csiborg_postprocessing/ascii_positions"
|
||||
|
||||
|
||||
def write_simulation(simname, high_resolution_only=True):
|
||||
""""
|
||||
Write the positions, velocities and IDs of the particles in a simulation
|
||||
to an ASCII file. The output is `X Y Z VX VY VZ ID`.
|
||||
"""
|
||||
if not high_resolution_only:
|
||||
raise RuntimeError("Writing low-res particles is not implemented.")
|
||||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
|
||||
if "csiborg2" in simname:
|
||||
nsims = paths.get_ics(simname)
|
||||
kind = simname.split("_")[-1]
|
||||
|
||||
for nsim in tqdm(nsims, desc="Simulations"):
|
||||
reader = csiborgtools.read.CSiBORG2Snapshot(nsim, 99, kind, paths,
|
||||
flip_xz=False)
|
||||
x = np.hstack(
|
||||
[reader.coordinates(high_resolution_only=True),
|
||||
reader.velocities(high_resolution_only=True),
|
||||
reader.particle_ids(high_resolution_only=True).reshape(-1, 1)]
|
||||
)
|
||||
# Store positions and velocities with 6 decimal places and IDs as
|
||||
# integers.
|
||||
fmt_string = "%1.6f %1.6f %1.6f %1.6f %1.6f %1.6f %d"
|
||||
fname = join(DIR_OUT, f"high_res_particles_{simname}_{nsim}.txt")
|
||||
np.savetxt(fname, x, fmt=fmt_string)
|
||||
else:
|
||||
raise RuntimeError("Simulation not implemented..")
|
||||
|
||||
|
||||
def write_halos(simname):
|
||||
"""
|
||||
Watch out about the distinction between real and redshift space. The output
|
||||
is `X Y Z MASS`.
|
||||
"""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
if "csiborg2" in simname:
|
||||
nsims = paths.get_ics(simname)
|
||||
kind = simname.split("_")[-1]
|
||||
|
||||
for nsim in tqdm(nsims, desc="Looping over simulations"):
|
||||
cat = csiborgtools.read.CSiBORG2Catalogue(nsim, 99, kind, paths)
|
||||
pos = cat["cartesian_pos"]
|
||||
mass = cat["totmass"]
|
||||
# Stack positions and masses
|
||||
x = np.hstack([pos, mass.reshape(-1, 1)])
|
||||
|
||||
# Save to a file
|
||||
fname = join(DIR_OUT, f"halos_real_{simname}_{nsim}.txt")
|
||||
np.savetxt(fname, x)
|
||||
else:
|
||||
raise RuntimeError("Simulation not implemented..")
|
||||
|
||||
|
||||
def write_survey(survey_name, boxsize):
|
||||
"""Watch out about the distance definition."""
|
||||
if survey_name == "SDSS":
|
||||
survey = csiborgtools.SDSS()()
|
||||
dist, ra, dec = survey["DIST"], survey["RA"], survey["DEC"]
|
||||
elif survey_name == "SDSSxALFALFA":
|
||||
survey = csiborgtools.SDSSxALFALFA()()
|
||||
dist, ra, dec = survey["DIST"], survey["RA_1"], survey["DEC_1"]
|
||||
else:
|
||||
raise RuntimeError("Survey not implemented..")
|
||||
|
||||
# Convert to Cartesian coordinates
|
||||
X = np.vstack([dist, ra, dec]).T
|
||||
X = csiborgtools.radec_to_cartesian(X)
|
||||
|
||||
# Center the coordinates in the box
|
||||
X += boxsize / 2
|
||||
|
||||
fname = join(DIR_OUT, f"survey_{survey_name}.txt")
|
||||
np.savetxt(fname, X)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
write_simulation("csiborg2_main")
|
||||
|
||||
# write_halos("csiborg2_main")
|
||||
|
||||
# boxsize = 676.6
|
||||
# for survey in ["SDSS", "SDSSxALFALFA"]:
|
||||
# write_survey(survey, boxsize)
|
126
scripts/quijote_pecvel_covmat.py
Normal file
126
scripts/quijote_pecvel_covmat.py
Normal file
|
@ -0,0 +1,126 @@
|
|||
# Copyright (C) 2024 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
A script to calculate the bulk flow in Quijote simulations from either
|
||||
particles or FoF haloes and to also save the resulting smaller halo catalogues.
|
||||
|
||||
|
||||
|
||||
"""
|
||||
import csiborgtools
|
||||
import healpy as hp
|
||||
import numpy as np
|
||||
from csiborgtools.field import evaluate_cartesian_cic
|
||||
from h5py import File
|
||||
from tqdm import tqdm
|
||||
|
||||
|
||||
def load_field(nsim, MAS, grid, paths):
|
||||
"""Load the precomputed velocity field from the Quijote simulations."""
|
||||
reader = csiborgtools.read.QuijoteField(nsim, paths)
|
||||
return reader.velocity_field(MAS, grid)
|
||||
|
||||
|
||||
def skymap_coordinates(nside, R):
|
||||
"""Generate 3D pixel positions at a given radius."""
|
||||
theta, phi = hp.pix2ang(nside, np.arange(hp.nside2npix(nside)), )
|
||||
pos = R * np.vstack([np.sin(theta) * np.cos(phi),
|
||||
np.sin(theta) * np.sin(phi),
|
||||
np.cos(theta)]).T
|
||||
|
||||
# Quijote expects float32, otherwise it will crash
|
||||
return pos.astype(np.float32)
|
||||
|
||||
|
||||
def make_radvel_skymap(velocity_field, pos, observer, boxsize):
|
||||
"""
|
||||
Make a skymap of the radial velocity field at the given 3D positions which
|
||||
correspond to the pixels.
|
||||
"""
|
||||
# Velocities on the shell
|
||||
Vx, Vy, Vz = [evaluate_cartesian_cic(velocity_field[i], pos=pos / boxsize,
|
||||
smooth_scales=None) for i in range(3)]
|
||||
|
||||
# Observer velocity
|
||||
obs = np.asarray(observer).reshape(1, 3) / boxsize
|
||||
Vx_obs, Vy_obs, Vz_obs = [evaluate_cartesian_cic(
|
||||
velocity_field[i], pos=obs, smooth_scales=None)[0] for i in range(3)]
|
||||
|
||||
# Subtract observer velocity
|
||||
Vx -= Vx_obs
|
||||
Vy -= Vy_obs
|
||||
Vz -= Vz_obs
|
||||
|
||||
# Radial velocity
|
||||
norm_pos = pos - observer
|
||||
norm_pos /= np.linalg.norm(norm_pos, axis=1).reshape(-1, 1)
|
||||
Vrad = Vx * norm_pos[:, 0] + Vy * norm_pos[:, 1] + Vz * norm_pos[:, 2]
|
||||
|
||||
return Vrad
|
||||
|
||||
|
||||
def main(nsims, observers, nside, ell_max, radii, boxsize, MAS, grid, fname):
|
||||
"""Calculate the sky maps and C_ell."""
|
||||
# 3D pixel positions at each radius in box units
|
||||
map_pos = [skymap_coordinates(nside, R) for R in radii]
|
||||
|
||||
print(f"Writing to `{fname}`...")
|
||||
f = File(fname, 'w')
|
||||
f.create_dataset("ell", data=np.arange(ell_max + 1))
|
||||
f.create_dataset("radii", data=radii)
|
||||
f.attrs["num_simulations"] = len(nsims)
|
||||
f.attrs["num_observers"] = len(observers)
|
||||
f.attrs["num_radii"] = len(radii)
|
||||
f.attrs["npix_per_map"] = hp.nside2npix(nside)
|
||||
|
||||
for nsim in tqdm(nsims, desc="Simulations"):
|
||||
grp_sim = f.create_group(f"nsim_{nsim}")
|
||||
velocity_field = load_field(nsim, MAS, grid, paths)
|
||||
|
||||
for n in range(len(observers)):
|
||||
grp_observer = grp_sim.create_group(f"observer_{n}")
|
||||
|
||||
for i in range(len(radii)):
|
||||
pos = map_pos[i] + observers[n]
|
||||
|
||||
skymap = make_radvel_skymap(velocity_field, pos, observers[n],
|
||||
boxsize)
|
||||
C_ell = hp.sphtfunc.anafast(skymap, lmax=ell_max)
|
||||
|
||||
grp_observer.create_dataset(f"skymap_{i}", data=skymap)
|
||||
grp_observer.create_dataset(f"C_ell_{i}", data=C_ell)
|
||||
|
||||
print(f"Closing `{fname}`.")
|
||||
f.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
|
||||
MAS = "PCS"
|
||||
grid = 512
|
||||
nside = 256
|
||||
ell_max = 16
|
||||
boxsize = 1000
|
||||
Rmax = 200
|
||||
radii = np.linspace(100, 150, 5)
|
||||
fname = "/mnt/extraspace/rstiskalek/BBF/Quijote_Cell/C_ell_fiducial.h5"
|
||||
nsims = list(range(50))
|
||||
observers = csiborgtools.read.fiducial_observers(boxsize, Rmax)
|
||||
|
||||
main(nsims, observers, nside, ell_max, radii, boxsize, MAS, grid, fname)
|
25
scripts/quijote_pecvel_covmat.sh
Executable file
25
scripts/quijote_pecvel_covmat.sh
Executable file
|
@ -0,0 +1,25 @@
|
|||
#!/bin/bash
|
||||
nthreads=1
|
||||
memory=16
|
||||
on_login=${1}
|
||||
queue="berg"
|
||||
env="/mnt/zfsusers/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
file="quijote_pecvel_covmat.py"
|
||||
|
||||
|
||||
if [ "$on_login" != "1" ] && [ "$on_login" != "0" ]; then
|
||||
echo "Invalid input: 'on_login' (1). Please provide 1 or 0."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pythoncm="$env $file"
|
||||
if [ $on_login -eq 1 ]; then
|
||||
echo $pythoncm
|
||||
$pythoncm
|
||||
else
|
||||
cm="addqueue -q $queue -n $nthreads -m $memory $pythoncm"
|
||||
echo "Submitting:"
|
||||
echo $cm
|
||||
echo
|
||||
eval $cm
|
||||
fi
|
158
scripts/vrad_covariance.py
Normal file
158
scripts/vrad_covariance.py
Normal file
|
@ -0,0 +1,158 @@
|
|||
# Copyright (C) 2024 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
NOTE: The script is far from finished or written well.
|
||||
"""
|
||||
from os.path import join
|
||||
import numpy as np
|
||||
|
||||
import csiborgtools
|
||||
from h5py import File
|
||||
|
||||
|
||||
|
||||
from sklearn.neighbors import NearestNeighbors
|
||||
from numba import jit
|
||||
from scipy.stats import binned_statistic
|
||||
|
||||
|
||||
###
|
||||
|
||||
def find_indxs(rtree, r, radius):
|
||||
"""
|
||||
Find the indices of points that are within a given radius of a given
|
||||
point `r`.
|
||||
"""
|
||||
if isinstance(r, (int, float)):
|
||||
r = np.array(r)
|
||||
|
||||
return rtree.radius_neighbors(
|
||||
r.reshape(-1, 1), radius=radius, return_distance=False, )[0]
|
||||
|
||||
@jit(nopython=True)
|
||||
def dot_product_norm(x1_norm, x2_norm):
|
||||
"""Dot product of two normalised 1D vectors."""
|
||||
return x1_norm[0] * x2_norm[0] + x1_norm[1] * x2_norm[1] + x1_norm[2] * x2_norm[2] # noqa
|
||||
|
||||
|
||||
|
||||
@jit(nopython=True)
|
||||
def get_angdist_vrad_product(i_comb, j_comb, pos_norm, vrad, r, rbin_i, rbin_j):
|
||||
# TODO: Add itself?
|
||||
len_i = len(i_comb)
|
||||
len_j = len(j_comb)
|
||||
|
||||
cos_angdist_values = np.full(len_i * len_j, np.nan)
|
||||
vrad_product = np.full(len_i * len_j, np.nan)
|
||||
weights = np.full(len_i * len_j, np.nan)
|
||||
|
||||
k = 0
|
||||
for i in range(len_i):
|
||||
pos_norm_i = pos_norm[i_comb[i]]
|
||||
vrad_i = vrad[i_comb[i]]
|
||||
w_i = (rbin_i / r[i_comb[i]])**2
|
||||
for j in range(len_j):
|
||||
cos_angdist_values[k] = dot_product_norm(pos_norm_i, pos_norm[j_comb[j]])
|
||||
|
||||
# Product of the peculiar velocities
|
||||
vrad_product[k] = vrad_i * vrad[j_comb[j]]
|
||||
# Weight the product
|
||||
w = w_i * (rbin_j / r[j_comb[j]])**2
|
||||
vrad_product[k] *= w
|
||||
|
||||
weights[k] = w
|
||||
|
||||
k += 1
|
||||
|
||||
return cos_angdist_values, vrad_product, weights
|
||||
|
||||
|
||||
def main(out_summed_product, out_weights, ri, rj, angular_bins, pos, vel, observer, rmax):
|
||||
# Centre the positions at the observer
|
||||
pos = np.copy(pos) - observer
|
||||
r = np.linalg.norm(pos, axis=1)
|
||||
mask = r < rmax
|
||||
|
||||
# Select only the haloes within the radial range
|
||||
pos = pos[mask]
|
||||
vel = vel[mask]
|
||||
r = r[mask]
|
||||
|
||||
# Create a KDTree for the radial positions
|
||||
rtree = NearestNeighbors().fit(r.reshape(-1, 1))
|
||||
|
||||
# Calculate the radial velocity and the normalised position vector
|
||||
pos_norm = pos / r[:, None]
|
||||
vrad = np.sum(vel * pos_norm, axis=1)
|
||||
|
||||
# TODO: eventually here loop over the radii
|
||||
# for ....
|
||||
dr = 2.5
|
||||
i_indxs = find_indxs(rtree, ri, radius=dr)
|
||||
j_indxs = find_indxs(rtree, rj, radius=dr)
|
||||
|
||||
# Calculate the cosine of the angular distance and the product of the
|
||||
# radial velocities for each pair of points.
|
||||
cos_angdist, vrad_product, weights = get_angdist_vrad_product(
|
||||
i_indxs, j_indxs, pos_norm, vrad, r, ri, rj)
|
||||
|
||||
out_summed_product += binned_statistic(
|
||||
cos_angdist, vrad_product, bins=angular_bins, statistic="sum")[0]
|
||||
|
||||
out_weights += binned_statistic(
|
||||
cos_angdist, weights, bins=angular_bins, statistic="sum")[0]
|
||||
|
||||
return out_summed_product, out_weights
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
fdir = "/mnt/extraspace/rstiskalek/BBF/Quijote_C_ij"
|
||||
|
||||
rmax = 150
|
||||
nradial = 20
|
||||
nangular = 40
|
||||
ncatalogue = 1
|
||||
|
||||
# NOTE check this
|
||||
radial_bins = np.linspace(0, rmax, nradial + 1)
|
||||
angular_bins = np.linspace(-1, 1, nangular + 1)
|
||||
|
||||
summed_product = np.zeros(nangular)
|
||||
weights = np.zeros(nangular)
|
||||
|
||||
fiducial_observers = csiborgtools.read.fiducial_observers(1000, rmax)
|
||||
|
||||
ri = 100
|
||||
rj = 120
|
||||
|
||||
# for i in trange(ncatalogue, desc="Catalogues"):
|
||||
for i in [30]:
|
||||
cat = csiborgtools.read.QuijoteCatalogue(i)
|
||||
|
||||
for j in range(len(fiducial_observers)):
|
||||
# Loop over all the fiducial observers in this simulation
|
||||
observer = fiducial_observers[j]
|
||||
summed_product, weights = main(
|
||||
summed_product, weights, ri, rj, angular_bins,
|
||||
cat["cartesian_pos"], cat["cartesian_vel"], observer, rmax)
|
||||
|
||||
|
||||
# TODO: Save
|
||||
fname = join(fdir, "test.h5")
|
||||
print(f"Saving to `{fname}`.")
|
||||
with File(fname, 'w') as f:
|
||||
f.create_dataset("summed_product", data=summed_product)
|
||||
f.create_dataset("weights", data=weights)
|
||||
f.create_dataset("angular_bins", data=angular_bins)
|
Loading…
Reference in a new issue