mirror of
https://github.com/Richard-Sti/csiborgtools_public.git
synced 2025-05-13 22:21:12 +00:00
Calculate upglade redshifts (#128)
* Update redshift reading * Add helio to CMB redshift * Update imports * Update nb * Run for Quijote * Add script * Update * Update .gitignore * Update imports * Add Peery estimator * Add bulk flow scripts * Update typs * Add comment * Add blank space * Update submission script * Update description * Add barriers * Update nb * Update nb * Rename script * Move to old * Update imports * Add nb * Update script * Fix catalogue key * Update script * Update submit * Update comment * Update .gitignore * Update nb * Update for stationary obsrevers * Update submission * Add nb * Add better verbose control * Update nb * Update submit * Update nb * Add SN errors * Add draft of the script * Update verbosity flags * Add submission script * Debug script * Quickfix * Remove comment * Update nb * Update submission * Update nb * Processed UPGLADE
This commit is contained in:
parent
c447d2e7b0
commit
779f2e76ac
24 changed files with 2066 additions and 300 deletions
|
@ -77,9 +77,17 @@ def get_los(catalogue_name, simname, comm):
|
|||
with File(fname, 'r') as f:
|
||||
RA = f["RA"][:]
|
||||
dec = f["DEC"][:]
|
||||
elif catalogue_name == "UPGLADE":
|
||||
fname = "/mnt/users/rstiskalek/csiborgtools/data/upglade_z_0p05_all_PROCESSED.h5" # noqa
|
||||
with File(fname, 'r') as f:
|
||||
RA = f["RA"][:]
|
||||
dec = f["DEC"][:]
|
||||
else:
|
||||
raise ValueError(f"Unknown field name: `{catalogue_name}`.")
|
||||
|
||||
if comm.Get_rank() == 0:
|
||||
print(f"The dataset contains {len(RA)} objects.")
|
||||
|
||||
# The Carrick+2015 is in galactic coordinates, so we need to convert
|
||||
# the RA/dec to galactic coordinates.
|
||||
if simname == "Carrick2015":
|
||||
|
@ -210,7 +218,7 @@ def combine_from_simulations(catalogue_name, simname, nsims, outfolder,
|
|||
|
||||
|
||||
def interpolate_field(pos, simname, nsim, MAS, grid, dump_folder, rmax,
|
||||
dr, smooth_scales):
|
||||
dr, smooth_scales, verbose=False):
|
||||
"""
|
||||
Interpolate the density and velocity fields along the line of sight.
|
||||
|
||||
|
@ -243,11 +251,13 @@ def interpolate_field(pos, simname, nsim, MAS, grid, dump_folder, rmax,
|
|||
fname_out = join(dump_folder, f"los_{simname}_{nsim}.hdf5")
|
||||
|
||||
# First do the density field.
|
||||
if verbose:
|
||||
print(f"Interpolating density field for IC realisation `{nsim}`.",
|
||||
flush=True)
|
||||
density = get_field(simname, nsim, "density", MAS, grid)
|
||||
|
||||
rdist, finterp = csiborgtools.field.evaluate_los(
|
||||
density, sky_pos=pos, boxsize=boxsize, rmax=rmax, dr=dr,
|
||||
smooth_scales=smooth_scales, verbose=False)
|
||||
smooth_scales=smooth_scales, verbose=verbose)
|
||||
|
||||
print(f"Writing temporary file `{fname_out}`.")
|
||||
with File(fname_out, 'w') as f:
|
||||
|
@ -257,11 +267,14 @@ def interpolate_field(pos, simname, nsim, MAS, grid, dump_folder, rmax,
|
|||
del density, rdist, finterp
|
||||
collect()
|
||||
|
||||
if verbose:
|
||||
print(f"Interpolating velocity field for IC realisation `{nsim}`.",
|
||||
flush=True)
|
||||
velocity = get_field(simname, nsim, "velocity", MAS, grid)
|
||||
rdist, finterp = csiborgtools.field.evaluate_los(
|
||||
velocity[0], velocity[1], velocity[2],
|
||||
sky_pos=pos, boxsize=boxsize, rmax=rmax, dr=dr,
|
||||
smooth_scales=smooth_scales, verbose=False)
|
||||
smooth_scales=smooth_scales, verbose=verbose)
|
||||
|
||||
with File(fname_out, 'a') as f:
|
||||
f.create_dataset("velocity", data=finterp)
|
||||
|
@ -308,7 +321,8 @@ if __name__ == "__main__":
|
|||
|
||||
def main(nsim):
|
||||
interpolate_field(pos, args.simname, nsim, args.MAS, args.grid,
|
||||
dump_folder, rmax, dr, smooth_scales)
|
||||
dump_folder, rmax, dr, smooth_scales,
|
||||
verbose=comm.Get_size() == 1)
|
||||
|
||||
work_delegation(main, nsims, comm, master_verbose=True)
|
||||
comm.Barrier()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
nthreads=6
|
||||
memory=42
|
||||
nthreads=1
|
||||
memory=32
|
||||
on_login=0
|
||||
queue="berg"
|
||||
env="/mnt/users/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
|
|
|
@ -7,8 +7,9 @@ queue="berg"
|
|||
env="/mnt/users/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
file="flow_validation.py"
|
||||
|
||||
#"Pantheon+_zSN"
|
||||
catalogue="Pantheon+_groups"
|
||||
simname="csiborg2_varysmall"
|
||||
simname="Carrick2015"
|
||||
|
||||
|
||||
pythoncm="$env $file --catalogue $catalogue --simname $simname --ksmooth $ksmooth"
|
||||
|
|
165
scripts/old/field_bulk_estimator.py
Normal file
165
scripts/old/field_bulk_estimator.py
Normal file
|
@ -0,0 +1,165 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
A script to calculate the bulk flow in Quijote to compare the volume average
|
||||
definition to various estimators that rely on radial velocities (e.g. Nusser
|
||||
2014 and Peery+2018).
|
||||
"""
|
||||
from argparse import ArgumentParser
|
||||
from datetime import datetime
|
||||
from os import remove
|
||||
from os.path import join
|
||||
|
||||
import csiborgtools
|
||||
import numpy as np
|
||||
from mpi4py import MPI
|
||||
from taskmaster import work_delegation # noqa
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Read in information about the simulation #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def t():
|
||||
return datetime.now()
|
||||
|
||||
|
||||
def get_reader(simname, paths, nsim):
|
||||
"""Get the appropriate snaspshot reader for the simulation."""
|
||||
# We only want Quijote because it has all particles of the same mass.
|
||||
if simname == "quijote":
|
||||
# We want the z = 0 snapshots
|
||||
reader = csiborgtools.read.QuijoteSnapshot(nsim, 4, paths)
|
||||
else:
|
||||
raise ValueError(f"Unknown simname: `{simname}`.")
|
||||
|
||||
return reader
|
||||
|
||||
|
||||
def get_particles(reader, verbose=True):
|
||||
"""
|
||||
Get the distance of particles from the center of the box and their masses.
|
||||
"""
|
||||
if verbose:
|
||||
print(f"{t()},: reading coordinates and calculating radial distance.")
|
||||
pos = reader.coordinates().astype(np.float64)
|
||||
vel = reader.velocities().astype(np.float64)
|
||||
return pos, vel
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Main & command line interface #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def main(simname, nsim, folder, Rmax):
|
||||
observers = csiborgtools.read.fiducial_observers(boxsize, Rmax)
|
||||
distances = np.linspace(0, Rmax, 101)[1:]
|
||||
|
||||
reader = get_reader(simname, paths, nsim)
|
||||
pos, vel = get_particles(reader, verbose=False)
|
||||
mass = np.ones(len(pos)) # Quijote has equal masses
|
||||
|
||||
bf_volume = np.full((len(observers), len(distances), 3), np.nan)
|
||||
bf_peery = np.full_like(bf_volume, np.nan)
|
||||
bf_const = np.full_like(bf_volume, np.nan)
|
||||
|
||||
for i in range(len(observers)):
|
||||
print(f"{t()}: Calculating bulk flow for observer {i + 1} of simulation {nsim}.") # noqa
|
||||
|
||||
# Subtract the observer position.
|
||||
pos_current = pos - observers[i]
|
||||
# Get the distance of each particle from the observer and sort it.
|
||||
rdist = np.linalg.norm(pos_current, axis=1)
|
||||
indxs = np.argsort(rdist)
|
||||
|
||||
pos_current = pos_current[indxs]
|
||||
vel_current = vel[indxs]
|
||||
rdist = rdist[indxs]
|
||||
|
||||
# Volume average
|
||||
bf_volume[i, ...] = csiborgtools.field.particles_enclosed_momentum(
|
||||
rdist, mass, vel_current, distances)
|
||||
bf_volume[i, ...] /= csiborgtools.field.particles_enclosed_mass(
|
||||
rdist, mass, distances)[:, np.newaxis]
|
||||
|
||||
# Peery 2018 1 / r^2 weighted
|
||||
bf_peery[i, ...] = csiborgtools.field.bulkflow_peery2018(
|
||||
rdist, mass, pos_current, vel_current, distances, "1/r^2",
|
||||
verbose=False)
|
||||
|
||||
# Constant weight
|
||||
bf_const[i, ...] = csiborgtools.field.bulkflow_peery2018(
|
||||
rdist, mass, pos_current, vel_current, distances, "constant",
|
||||
verbose=False)
|
||||
|
||||
# Finally save the output
|
||||
fname = join(folder, f"bf_estimators_addconstant_{simname}_{nsim}.npz")
|
||||
print(f"Saving to `{fname}`.")
|
||||
np.savez(fname, bf_volume=bf_volume, bf_peery=bf_peery, bf_const=bf_const,
|
||||
distances=distances)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--simname", type=str, help="Simulation name.",
|
||||
choices=["quijote"]) # noqa
|
||||
args = parser.parse_args()
|
||||
Rmax = 150
|
||||
folder = "/mnt/extraspace/rstiskalek/csiborg_postprocessing/field_shells"
|
||||
|
||||
comm = MPI.COMM_WORLD
|
||||
rank = comm.Get_rank()
|
||||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
boxsize = csiborgtools.simname2boxsize(args.simname)
|
||||
|
||||
def main_wrapper(nsim):
|
||||
main(args.simname, nsim, folder, Rmax)
|
||||
|
||||
nsims = list(paths.get_ics(args.simname))
|
||||
if rank == 0:
|
||||
print(f"Running with {len(nsims)} Quijote simulations.")
|
||||
|
||||
comm.Barrier()
|
||||
work_delegation(main_wrapper, nsims, comm, master_verbose=True)
|
||||
comm.Barrier()
|
||||
|
||||
# Collect the results
|
||||
if rank == 0:
|
||||
for i, nsim in enumerate(nsims):
|
||||
fname = join(folder, f"bf_estimators_{args.simname}_{nsim}.npz")
|
||||
data = np.load(fname)
|
||||
|
||||
if i == 0:
|
||||
bf_volume = np.empty((len(nsims), *data["bf_volume"].shape))
|
||||
bf_peery = np.empty_like(bf_volume)
|
||||
bf_const = np.empty_like(bf_volume)
|
||||
|
||||
distances = data["distances"]
|
||||
|
||||
bf_volume[i, ...] = data["bf_volume"]
|
||||
bf_peery[i, ...] = data["bf_peery"]
|
||||
bf_const[i, ...] = data["bf_const"]
|
||||
|
||||
# Remove file from this rank
|
||||
remove(fname)
|
||||
|
||||
# Save the results
|
||||
fname = join(folder, f"bf_estimators_{args.simname}.npz")
|
||||
print(f"Saving final results to `{fname}`.")
|
||||
np.savez(fname, bf_volume=bf_volume, bf_peery=bf_peery,
|
||||
bf_const=bf_const, distances=distances)
|
21
scripts/old/field_bulk_estimator.sh
Executable file
21
scripts/old/field_bulk_estimator.sh
Executable file
|
@ -0,0 +1,21 @@
|
|||
nthreads=20
|
||||
memory=24
|
||||
on_login=0
|
||||
queue="berg"
|
||||
env="/mnt/zfsusers/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
file="field_bulk_estimator.py"
|
||||
|
||||
simname="quijote"
|
||||
|
||||
|
||||
pythoncm="$env $file --simname $simname"
|
||||
if [ $on_login -eq 1 ]; then
|
||||
echo $pythoncm
|
||||
$pythoncm
|
||||
else
|
||||
cm="addqueue -q $queue -n $nthreads -m $memory $pythoncm"
|
||||
echo "Submitting:"
|
||||
echo $cm
|
||||
echo
|
||||
eval $cm
|
||||
fi
|
162
scripts/post_upglade.py
Normal file
162
scripts/post_upglade.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
# Copyright (C) 2024 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
Script to calculate cosmological redshifts from observed redshifts assuming
|
||||
the Carrick+2015 peculiar velocity model. In the future this may be extended
|
||||
to include other peculiar velocity models.
|
||||
"""
|
||||
from datetime import datetime
|
||||
from os import remove
|
||||
from os.path import join
|
||||
|
||||
import csiborgtools
|
||||
import numpy as np
|
||||
from h5py import File
|
||||
from mpi4py import MPI
|
||||
from taskmaster import work_delegation # noqa
|
||||
from tqdm import tqdm
|
||||
|
||||
SPEED_OF_LIGHT = 299792.458 # km / s
|
||||
|
||||
|
||||
def t():
|
||||
return datetime.now().strftime("%H:%M:%S")
|
||||
|
||||
|
||||
def load_calibration(catalogue, simname, nsim, ksmooth, verbose=False):
|
||||
"""Load the pre-computed calibration samples."""
|
||||
fname = f"/mnt/extraspace/rstiskalek/csiborg_postprocessing/peculiar_velocity/flow_samples_{catalogue}_{simname}_smooth_{ksmooth}.hdf5" # noqa
|
||||
keys = ["Vext_x", "Vext_y", "Vext_z", "alpha", "beta", "sigma_v"]
|
||||
|
||||
calibration_samples = {}
|
||||
with File(fname, 'r') as f:
|
||||
for key in keys:
|
||||
# NOTE: here the posterior samples are down-sampled
|
||||
calibration_samples[key] = f[f"sim_{nsim}/{key}"][:][::10]
|
||||
|
||||
if verbose:
|
||||
k = list(calibration_samples.keys())[0]
|
||||
nsamples = len(calibration_samples[k])
|
||||
print(f"{t()}: found {nsamples} calibration posterior samples.",
|
||||
flush=True)
|
||||
|
||||
return calibration_samples
|
||||
|
||||
|
||||
def main(loader, model, indxs, fdir, fname, num_split, verbose):
|
||||
out = np.full(
|
||||
len(indxs), np.nan,
|
||||
dtype=[("mean_zcosmo", float), ("std_zcosmo", float)])
|
||||
|
||||
# Process each galaxy in this split
|
||||
for i, n in enumerate(tqdm(indxs, desc=f"Split {num_split}",
|
||||
disable=not verbose)):
|
||||
x, y = model.posterior_zcosmo(
|
||||
loader.cat["zcmb"][n], loader.cat["RA"][n], loader.cat["DEC"][n],
|
||||
loader.los_density[n], loader.los_radial_velocity[n],
|
||||
extra_sigma_v=loader.cat["e_zcmb"][n] * SPEED_OF_LIGHT,
|
||||
verbose=False)
|
||||
|
||||
mu, std = model.posterior_mean_std(x, y)
|
||||
out["mean_zcosmo"][i], out["std_zcosmo"][i] = mu, std
|
||||
|
||||
# Save the results of this rank
|
||||
fname = join(fdir, f"{fname}_{num_split}.hdf5")
|
||||
with File(fname, 'w') as f:
|
||||
f.create_dataset("mean_zcosmo", data=out["mean_zcosmo"])
|
||||
f.create_dataset("std_zcosmo", data=out["std_zcosmo"])
|
||||
f.create_dataset("indxs", data=indxs)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Command line interface #
|
||||
###############################################################################
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
comm = MPI.COMM_WORLD
|
||||
rank, size = comm.Get_rank(), comm.Get_size()
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
|
||||
# Calibration parameters
|
||||
simname = "Carrick2015"
|
||||
ksmooth = 0
|
||||
nsim = 0
|
||||
catalogue_calibration = "Pantheon+_zSN"
|
||||
|
||||
# Galaxy sample parameters
|
||||
catalogue = "UPGLADE"
|
||||
fpath_data = "/mnt/users/rstiskalek/csiborgtools/data/upglade_z_0p05_all_PROCESSED.h5" # noqa
|
||||
|
||||
# Number of splits for MPI
|
||||
nsplits = 1000
|
||||
|
||||
# Folder to save the results
|
||||
fdir = "/mnt/extraspace/rstiskalek/csiborg_postprocessing/peculiar_velocity/UPGLADE" # noqa
|
||||
fname = f"zcosmo_{catalogue}"
|
||||
|
||||
# Load in the data, calibration samples and the model
|
||||
loader = csiborgtools.flow.DataLoader(
|
||||
simname, nsim, catalogue, fpath_data, paths, ksmooth=ksmooth,
|
||||
verbose=rank == 0)
|
||||
calibration_samples = load_calibration(
|
||||
catalogue_calibration, simname, nsim, ksmooth, verbose=rank == 0)
|
||||
model = csiborgtools.flow.Observed2CosmologicalRedshift(
|
||||
calibration_samples, loader.rdist, loader._Omega_m)
|
||||
if rank == 0:
|
||||
print(f"{t()}: the catalogue size is {loader.cat['zcmb'].size}.")
|
||||
print(f"{t()}: loaded calibration samples and model.", flush=True)
|
||||
|
||||
# Decide how to split up the job
|
||||
if rank == 0:
|
||||
indxs = np.arange(loader.cat["zcmb"].size)
|
||||
split_indxs = np.array_split(indxs, nsplits)
|
||||
else:
|
||||
indxs = None
|
||||
split_indxs = None
|
||||
indxs = comm.bcast(indxs, root=0)
|
||||
split_indxs = comm.bcast(split_indxs, root=0)
|
||||
|
||||
# Process all splits with MPI, the rank 0 delegates the jobs.
|
||||
def main_wrapper(n):
|
||||
main(loader, model, split_indxs[n], fdir, fname, n, verbose=size == 1)
|
||||
|
||||
comm.Barrier()
|
||||
work_delegation(
|
||||
main_wrapper, list(range(nsplits)), comm, master_verbose=True)
|
||||
comm.Barrier()
|
||||
|
||||
# Combine the results to a single file
|
||||
if rank == 0:
|
||||
print("Combining results from all ranks.", flush=True)
|
||||
mean_zcosmo = np.full(loader.cat["zcmb"].size, np.nan)
|
||||
std_zcosmo = np.full_like(mean_zcosmo, np.nan)
|
||||
|
||||
for n in range(nsplits):
|
||||
fname_current = join(fdir, f"{fname}_{n}.hdf5")
|
||||
with File(fname_current, 'r') as f:
|
||||
mask = f["indxs"][:]
|
||||
mean_zcosmo[mask] = f["mean_zcosmo"][:]
|
||||
std_zcosmo[mask] = f["std_zcosmo"][:]
|
||||
|
||||
remove(fname_current)
|
||||
|
||||
# Save the results
|
||||
fname = join(fdir, f"{fname}.hdf5")
|
||||
print(f"Saving results to `{fname}`.")
|
||||
with File(fname, 'w') as f:
|
||||
f.create_dataset("mean_zcosmo", data=mean_zcosmo)
|
||||
f.create_dataset("std_zcosmo", data=std_zcosmo)
|
||||
f.create_dataset("indxs", data=indxs)
|
31
scripts/post_upglade.sh
Executable file
31
scripts/post_upglade.sh
Executable file
|
@ -0,0 +1,31 @@
|
|||
nthreads=${1}
|
||||
on_login=${2}
|
||||
memory=4
|
||||
queue="redwood"
|
||||
env="/mnt/zfsusers/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
file="post_upglade.py"
|
||||
|
||||
|
||||
if [[ "$on_login" != "0" && "$on_login" != "1" ]]
|
||||
then
|
||||
echo "Error: on_login must be either 0 or 1."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$nthreads" =~ ^[0-9]+$ ]] || [ "$nthreads" -le 0 ]; then
|
||||
echo "Error: nthreads must be an integer larger than 0."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
pythoncm="$env $file"
|
||||
if [ $on_login -eq 1 ]; then
|
||||
echo $pythoncm
|
||||
$pythoncm
|
||||
else
|
||||
cm="addqueue -q $queue -n $nthreads -m $memory $pythoncm"
|
||||
echo "Submitting:"
|
||||
echo $cm
|
||||
echo
|
||||
eval $cm
|
||||
fi
|
199
scripts/quijote_bulkflow.py
Normal file
199
scripts/quijote_bulkflow.py
Normal file
|
@ -0,0 +1,199 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
A script to calculate the bulk flow in Quijote simulations from either
|
||||
particles or FoF haloes and to also save the resulting smaller halo catalogues.
|
||||
"""
|
||||
from datetime import datetime
|
||||
from os.path import join
|
||||
|
||||
import csiborgtools
|
||||
import numpy as np
|
||||
from mpi4py import MPI
|
||||
from taskmaster import work_delegation # noqa
|
||||
from warnings import catch_warnings, simplefilter
|
||||
from h5py import File
|
||||
from sklearn.neighbors import NearestNeighbors
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Read in information about the simulation #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def t():
|
||||
return datetime.now()
|
||||
|
||||
|
||||
def get_data(nsim, verbose=True):
|
||||
if verbose:
|
||||
print(f"{t()}: reading particles of simulation `{nsim}`.")
|
||||
reader = csiborgtools.read.QuijoteSnapshot(nsim, 4, paths)
|
||||
part_pos = reader.coordinates().astype(np.float64)
|
||||
part_vel = reader.velocities().astype(np.float64)
|
||||
|
||||
if verbose:
|
||||
print(f"{t()}: reading haloes of simulation `{nsim}`.")
|
||||
reader = csiborgtools.read.QuijoteCatalogue(nsim)
|
||||
halo_pos = reader.coordinates
|
||||
halo_vel = reader.velocities
|
||||
halo_mass = reader.totmass
|
||||
|
||||
return part_pos, part_vel, halo_pos, halo_vel, halo_mass
|
||||
|
||||
|
||||
def volume_bulk_flow(rdist, mass, vel, distances):
|
||||
out = csiborgtools.field.particles_enclosed_momentum(
|
||||
rdist, mass, vel, distances)
|
||||
with catch_warnings():
|
||||
simplefilter("ignore", category=RuntimeWarning)
|
||||
out /= csiborgtools.field.particles_enclosed_mass(
|
||||
rdist, mass, distances)[:, np.newaxis]
|
||||
|
||||
return out
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Main & command line interface #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def main(nsim, folder, fname_basis, Rmax, subtract_observer_velocity,
|
||||
verbose=True):
|
||||
boxsize = csiborgtools.simname2boxsize("quijote")
|
||||
observers = csiborgtools.read.fiducial_observers(boxsize, Rmax)
|
||||
distances = np.linspace(0, Rmax, 101)[1:]
|
||||
part_pos, part_vel, halo_pos, halo_vel, halo_mass = get_data(nsim, verbose)
|
||||
|
||||
if verbose:
|
||||
print(f"{t()}: Fitting the particle and halo trees of simulation `{nsim}`.") # noqa
|
||||
part_tree = NearestNeighbors().fit(part_pos)
|
||||
halo_tree = NearestNeighbors().fit(halo_pos)
|
||||
|
||||
samples = {}
|
||||
bf_volume_part = np.full((len(observers), len(distances), 3), np.nan)
|
||||
bf_volume_halo = np.full_like(bf_volume_part, np.nan)
|
||||
bf_volume_halo_uniform = np.full_like(bf_volume_part, np.nan)
|
||||
bf_vrad_weighted_part = np.full_like(bf_volume_part, np.nan)
|
||||
bf_vrad_weighted_halo_uniform = np.full_like(bf_volume_part, np.nan)
|
||||
bf_vrad_weighted_halo = np.full_like(bf_volume_part, np.nan)
|
||||
|
||||
for i in range(len(observers)):
|
||||
print(f"{t()}: Calculating bulk flow for observer {i + 1} of simulation {nsim}.") # noqa
|
||||
|
||||
# Select particles within Rmax of the observer
|
||||
rdist_part, indxs = part_tree.radius_neighbors(
|
||||
np.asarray(observers[i]).reshape(1, -1), Rmax,
|
||||
return_distance=True, sort_results=True)
|
||||
rdist_part, indxs = rdist_part[0], indxs[0]
|
||||
|
||||
part_pos_current = part_pos[indxs] - observers[i]
|
||||
part_vel_current = part_vel[indxs]
|
||||
# Quijote particle masses are all equal
|
||||
part_mass = np.ones_like(rdist_part)
|
||||
|
||||
# Select haloes within Rmax of the observer
|
||||
rdist_halo, indxs = halo_tree.radius_neighbors(
|
||||
np.asarray(observers[i]).reshape(1, -1), Rmax,
|
||||
return_distance=True, sort_results=True)
|
||||
rdist_halo, indxs = rdist_halo[0], indxs[0]
|
||||
|
||||
halo_pos_current = halo_pos[indxs] - observers[i]
|
||||
halo_vel_current = halo_vel[indxs]
|
||||
halo_mass_current = halo_mass[indxs]
|
||||
|
||||
# Subtract the observer velocity
|
||||
if subtract_observer_velocity:
|
||||
rscale = 0.5 # Mpc / h
|
||||
weights = np.exp(-0.5 * (rdist_part / rscale)**2)
|
||||
obs_vel_x = np.average(part_vel_current[:, 0], weights=weights)
|
||||
obs_vel_y = np.average(part_vel_current[:, 1], weights=weights)
|
||||
obs_vel_z = np.average(part_vel_current[:, 2], weights=weights)
|
||||
|
||||
part_vel_current[:, 0] -= obs_vel_x
|
||||
part_vel_current[:, 1] -= obs_vel_y
|
||||
part_vel_current[:, 2] -= obs_vel_z
|
||||
|
||||
halo_vel_current[:, 0] -= obs_vel_x
|
||||
halo_vel_current[:, 1] -= obs_vel_y
|
||||
halo_vel_current[:, 2] -= obs_vel_z
|
||||
|
||||
# Calculate the volume average bulk flows
|
||||
bf_volume_part[i, ...] = volume_bulk_flow(
|
||||
rdist_part, part_mass, part_vel_current, distances)
|
||||
bf_volume_halo[i, ...] = volume_bulk_flow(
|
||||
rdist_halo, halo_mass_current, halo_vel_current, distances)
|
||||
bf_volume_halo_uniform[i, ...] = volume_bulk_flow(
|
||||
rdist_halo, np.ones_like(halo_mass_current), halo_vel_current,
|
||||
distances)
|
||||
bf_vrad_weighted_part[i, ...] = csiborgtools.field.bulkflow_peery2018(
|
||||
rdist_part, part_mass, part_pos_current, part_vel_current,
|
||||
distances, weights="1/r^2", verbose=False)
|
||||
|
||||
# Calculate the bulk flow from projected velocities w. 1/r^2 weights
|
||||
bf_vrad_weighted_halo_uniform[i, ...] = csiborgtools.field.bulkflow_peery2018( # noqa
|
||||
rdist_halo, np.ones_like(halo_mass_current), halo_pos_current,
|
||||
halo_vel_current, distances, weights="1/r^2", verbose=False)
|
||||
bf_vrad_weighted_halo[i, ...] = csiborgtools.field.bulkflow_peery2018(
|
||||
rdist_halo, halo_mass_current, halo_pos_current,
|
||||
halo_vel_current, distances, weights="1/r^2", verbose=False)
|
||||
|
||||
# Store the haloes around this observer
|
||||
samples[i] = {
|
||||
"halo_pos": halo_pos_current,
|
||||
"halo_vel": halo_vel_current,
|
||||
"halo_mass": halo_mass_current}
|
||||
|
||||
# Finally save the output
|
||||
fname = join(folder, f"{fname_basis}_{nsim}.hdf5")
|
||||
if verbose:
|
||||
print(f"Saving to `{fname}`.")
|
||||
with File(fname, 'w') as f:
|
||||
f["distances"] = distances
|
||||
f["bf_volume_part"] = bf_volume_part
|
||||
f["bf_volume_halo"] = bf_volume_halo
|
||||
f["bf_vrad_weighted_part"] = bf_vrad_weighted_part
|
||||
f["bf_volume_halo_uniform"] = bf_volume_halo_uniform
|
||||
f["bf_vrad_weighted_halo_uniform"] = bf_vrad_weighted_halo_uniform
|
||||
f["bf_vrad_weighted_halo"] = bf_vrad_weighted_halo
|
||||
|
||||
for i in range(len(observers)):
|
||||
g = f.create_group(f"obs_{str(i)}")
|
||||
g["halo_pos"] = samples[i]["halo_pos"]
|
||||
g["halo_vel"] = samples[i]["halo_vel"]
|
||||
g["halo_mass"] = samples[i]["halo_mass"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Rmax = 150
|
||||
subtract_observer_velocity = True
|
||||
folder = "/mnt/extraspace/rstiskalek/quijote/BulkFlow_fiducial"
|
||||
fname_basis = "sBF_nsim" if subtract_observer_velocity else "BF_nsim"
|
||||
|
||||
comm = MPI.COMM_WORLD
|
||||
rank = comm.Get_rank()
|
||||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
nsims = list(paths.get_ics("quijote"))
|
||||
|
||||
def main_wrapper(nsim):
|
||||
main(nsim, folder, fname_basis, Rmax, subtract_observer_velocity,
|
||||
verbose=rank == 0)
|
||||
|
||||
if rank == 0:
|
||||
print(f"Running with {len(nsims)} Quijote simulations.")
|
||||
|
||||
comm.Barrier()
|
||||
work_delegation(main_wrapper, nsims, comm, master_verbose=True)
|
19
scripts/quijote_bulkflow.sh
Executable file
19
scripts/quijote_bulkflow.sh
Executable file
|
@ -0,0 +1,19 @@
|
|||
nthreads=20
|
||||
memory=24
|
||||
on_login=0
|
||||
queue="berg"
|
||||
env="/mnt/zfsusers/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
file="quijote_bulkflow.py"
|
||||
|
||||
|
||||
pythoncm="$env $file"
|
||||
if [ $on_login -eq 1 ]; then
|
||||
echo $pythoncm
|
||||
$pythoncm
|
||||
else
|
||||
cm="addqueue -q $queue -n $nthreads -m $memory $pythoncm"
|
||||
echo "Submitting:"
|
||||
echo $cm
|
||||
echo
|
||||
eval $cm
|
||||
fi
|
Loading…
Add table
Add a link
Reference in a new issue