mirror of
https://github.com/Richard-Sti/csiborgtools_public.git
synced 2025-05-12 13:41:13 +00:00
Add density field plot and start preparing CSiBORG2 (#94)
* Add RAMSES2HDF5 conversion * Upload changes * Clean up * More clean up * updates * Little change * pep9 * Add basic SPH calculation for a snapshot * Add submit script * Remove echo * Little changes * Send off changes * Little formatting * Little updates * Add nthreads argument * Upload chagnes * Add nthreads arguemnts * Some local changes.. * Update scripts * Add submission script * Update script * Update params * Rename CSiBORGBox to CSiBORG1box * Rename CSiBORG1 reader * Move files * Rename folder again * Add basic plotting here * Add new skeletons * Move def * Update nbs * Edit directories * Rename files * Add units to converted snapshots * Fix empty dataset bug * Delete file * Edits to submission scripts * Edit paths * Update .gitignore * Fix attrs * Update weighting * Fix RA/dec bug * Add FORNAX cluster * Little edit * Remove boxes since will no longer need * Move func back * Edit to include sort by membership * Edit paths * Purge basic things * Start removing * Bring file back * Scratch * Update the rest * Improve the entire file * Remove old things * Remove old * Delete old things * Fully updates * Rename file * Edit submit script * Little things * Add print statement * Add here cols_to_structured * Edit halo cat * Remove old import * Add comment * Update paths manager * Move file * Remove file * Add chains
This commit is contained in:
parent
6042a87111
commit
aaa14fc880
30 changed files with 1682 additions and 1728 deletions
108
scripts_independent/field_obs_vp.py
Normal file
108
scripts_independent/field_obs_vp.py
Normal file
|
@ -0,0 +1,108 @@
|
|||
# Copyright (C) 2022 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
Script to calculate the peculiar velocity of an observer in the centre of the
|
||||
CSiBORG box.
|
||||
"""
|
||||
from argparse import ArgumentParser
|
||||
from distutils.util import strtobool
|
||||
|
||||
import numpy
|
||||
from mpi4py import MPI
|
||||
|
||||
from taskmaster import work_delegation
|
||||
from tqdm import tqdm
|
||||
from utils import get_nsims
|
||||
|
||||
try:
|
||||
import csiborgtools
|
||||
except ModuleNotFoundError:
|
||||
import sys
|
||||
sys.path.append("../")
|
||||
import csiborgtools
|
||||
|
||||
|
||||
def observer_peculiar_velocity(nsim, parser_args):
|
||||
"""
|
||||
Calculate the peculiar velocity of an observer in the centre of the box
|
||||
for several smoothing scales.
|
||||
"""
|
||||
pos = numpy.array([0.5, 0.5, 0.5]).reshape(-1, 3)
|
||||
boxsize = 677.7
|
||||
smooth_scales = [0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0]
|
||||
|
||||
observer_vp = numpy.full((len(smooth_scales), 3), numpy.nan,
|
||||
dtype=numpy.float32)
|
||||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
field_path = paths.field("velocity", parser_args.MAS, parser_args.grid,
|
||||
nsim, in_rsp=False)
|
||||
field0 = numpy.load(field_path)
|
||||
|
||||
for j, smooth_scale in enumerate(tqdm(smooth_scales,
|
||||
desc="Smoothing the fields",
|
||||
disable=not parser_args.verbose)):
|
||||
if smooth_scale > 0:
|
||||
field = [None, None, None]
|
||||
for k in range(3):
|
||||
field[k] = csiborgtools.field.smoothen_field(
|
||||
field0[k], smooth_scale, boxsize)
|
||||
else:
|
||||
field = field0
|
||||
|
||||
v = csiborgtools.field.evaluate_cartesian(
|
||||
field[0], field[1], field[2], pos=pos)
|
||||
observer_vp[j, 0] = v[0][0]
|
||||
observer_vp[j, 1] = v[1][0]
|
||||
observer_vp[j, 2] = v[2][0]
|
||||
|
||||
fout = paths.observer_peculiar_velocity(parser_args.MAS, parser_args.grid,
|
||||
nsim)
|
||||
if parser_args.verbose:
|
||||
print(f"Saving to ... `{fout}`")
|
||||
numpy.savez(fout, smooth_scales=smooth_scales, observer_vp=observer_vp)
|
||||
return observer_vp
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Command line interface #
|
||||
###############################################################################
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--nsims", type=int, nargs="+", default=None,
|
||||
help="IC realisations. `-1` for all simulations.")
|
||||
parser.add_argument("--kind", type=str,
|
||||
choices=["density", "rspdensity", "velocity", "radvel",
|
||||
"potential", "environment"],
|
||||
help="What derived field to calculate?")
|
||||
parser.add_argument("--MAS", type=str,
|
||||
choices=["NGP", "CIC", "TSC", "PCS"])
|
||||
parser.add_argument("--grid", type=int, help="Grid resolution.")
|
||||
parser.add_argument("--verbose", type=lambda x: bool(strtobool(x)),
|
||||
help="Verbosity flag for reading in particles.")
|
||||
parser.add_argument("--simname", type=str, default="csiborg",
|
||||
help="Verbosity flag for reading in particles.")
|
||||
parser_args = parser.parse_args()
|
||||
|
||||
comm = MPI.COMM_WORLD
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
nsims = get_nsims(parser_args, paths)
|
||||
|
||||
def main(nsim):
|
||||
return observer_peculiar_velocity(nsim, parser_args)
|
||||
|
||||
work_delegation(main, nsims, comm, master_verbose=True)
|
201
scripts_independent/field_sph.py
Normal file
201
scripts_independent/field_sph.py
Normal file
|
@ -0,0 +1,201 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
Script to construct the density and velocity fields for a simulation snapshot.
|
||||
The SPH filter is implemented in the cosmotool package.
|
||||
"""
|
||||
from argparse import ArgumentParser
|
||||
from os import environ, remove
|
||||
from os.path import join, exists
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
import hdf5plugin # noqa
|
||||
import numpy as np
|
||||
from h5py import File
|
||||
|
||||
|
||||
def now():
|
||||
return datetime.now()
|
||||
|
||||
|
||||
def generate_unique_id(file_path):
|
||||
"""
|
||||
Generate a unique ID for a file path.
|
||||
"""
|
||||
return file_path.replace('/', '_').replace(':', '_')
|
||||
|
||||
|
||||
def prepare_random(temporary_output_path, npart=100, dtype=np.float32):
|
||||
"""
|
||||
Prepare a random dataset for the SPH filter.
|
||||
"""
|
||||
print("Preparing random dataset.", flush=True)
|
||||
arr = np.full((npart, 7), np.nan, dtype=dtype)
|
||||
|
||||
arr[:, :3] = np.random.uniform(0, 1, (npart, 3))
|
||||
arr[:, 3:6] = np.random.normal(0, 1, (npart, 3))
|
||||
arr[:, 6] = np.ones(npart, dtype=dtype)
|
||||
|
||||
dset = np.random.random((npart, 7)).astype(dtype)
|
||||
dset[:, 6] = np.ones(npart, dtype=dtype)
|
||||
|
||||
with File(temporary_output_path, 'w') as target:
|
||||
target.create_dataset("particles", data=dset, dtype=dtype)
|
||||
|
||||
return 1.
|
||||
|
||||
|
||||
def prepare_gadget(snapshot_path, temporary_output_path):
|
||||
"""
|
||||
Prepare a GADGET snapshot for the SPH filter. Assumes there is only a
|
||||
single file per snapshot.
|
||||
"""
|
||||
with File(snapshot_path, 'r') as source, File(temporary_output_path, 'w') as target: # noqa
|
||||
boxsize = source["Header"].attrs["BoxSize"]
|
||||
|
||||
npart = sum(source["Header"].attrs["NumPart_Total"])
|
||||
nhighres = source["Header"].attrs["NumPart_Total"][1]
|
||||
|
||||
dset = target.create_dataset("particles", (npart, 7), dtype=np.float32)
|
||||
|
||||
# Copy to this dataset the high-resolution particles.
|
||||
dset[:nhighres, :3] = source["PartType1/Coordinates"][:]
|
||||
dset[:nhighres, 3:6] = source["PartType1/Velocities"][:]
|
||||
dset[:nhighres, 6] = np.ones(nhighres, dtype=np.float32) * source["Header"].attrs["MassTable"][1] # noqa
|
||||
|
||||
# Now copy the low-resolution particles.
|
||||
dset[nhighres:, :3] = source["PartType5/Coordinates"][:]
|
||||
dset[nhighres:, 3:6] = source["PartType5/Velocities"][:]
|
||||
dset[nhighres:, 6] = source["PartType5/Masses"][:]
|
||||
|
||||
return boxsize
|
||||
|
||||
|
||||
def run_sph_filter(particles_path, output_path, boxsize, resolution,
|
||||
SPH_executable):
|
||||
"""
|
||||
Run the SPH filter on a snapshot.
|
||||
"""
|
||||
if not exists(particles_path):
|
||||
raise RuntimeError(f"Particles file `{particles_path}` does not exist.") # noqa
|
||||
if not isinstance(boxsize, (int, float)):
|
||||
raise TypeError("`boxsize` must be a number.")
|
||||
if not isinstance(resolution, int):
|
||||
raise TypeError("`resolution` must be an integer.")
|
||||
if not exists(SPH_executable):
|
||||
raise RuntimeError(f"SPH executable `{SPH_executable}` does not exist.") # noqa
|
||||
|
||||
command = [SPH_executable, particles_path, str(1e14), str(boxsize),
|
||||
str(resolution), str(0), str(0), str(0), output_path, "1"]
|
||||
print(f"{now()}: executing `simple3DFilter`.", flush=True)
|
||||
start_time = now()
|
||||
process = subprocess.Popen(
|
||||
command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
universal_newlines=True)
|
||||
|
||||
for line in iter(process.stdout.readline, ""):
|
||||
print(line, end="", flush=True)
|
||||
process.wait()
|
||||
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError("`simple3DFilter`failed.")
|
||||
else:
|
||||
dt = now() - start_time
|
||||
print(f"{now()}: `simple3DFilter`completed successfully in {dt}.",
|
||||
flush=True)
|
||||
|
||||
|
||||
def main(snapshot_path, output_path, resolution, scratch_space, SPH_executable,
|
||||
snapshot_kind):
|
||||
"""
|
||||
Construct the density and velocity fields for a simulation snapshot using
|
||||
`cosmotool` [1].
|
||||
|
||||
Parameters
|
||||
----------
|
||||
snapshot_path : str
|
||||
Path to the simulation snapshot.
|
||||
output_path : str
|
||||
Path to the output HDF5 file.
|
||||
resolution : int
|
||||
Resolution of the density field.
|
||||
scratch_space : str
|
||||
Path to a folder where temporary files can be stored.
|
||||
SPH_executable : str
|
||||
Path to the `simple3DFilter` executable [1].
|
||||
snapshot_kind : str
|
||||
Kind of the simulation snapshot. Currently only `gadget4` is supported.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
References
|
||||
----------
|
||||
[1] https://bitbucket.org/glavaux/cosmotool/src/master/sample/simple3DFilter.cpp # noqa
|
||||
"""
|
||||
if snapshot_kind != "gadget4":
|
||||
raise NotImplementedError("Only GADGET HDF5 snapshots are supported.")
|
||||
|
||||
print("---------- SPH Density & Velocity Field Job Information ----------")
|
||||
print(f"Snapshot path: {snapshot_path}")
|
||||
print(f"Output path: {output_path}")
|
||||
print(f"Resolution: {resolution}")
|
||||
print(f"Scratch space: {scratch_space}")
|
||||
print(f"SPH executable: {SPH_executable}")
|
||||
print(f"Snapshot kind: {snapshot_kind}")
|
||||
print("------------------------------------------------------------------")
|
||||
print(flush=True)
|
||||
|
||||
temporary_output_path = join(
|
||||
scratch_space, generate_unique_id(snapshot_path))
|
||||
|
||||
if not temporary_output_path.endswith(".hdf5"):
|
||||
raise RuntimeError("Temporary output path must end with `.hdf5`.")
|
||||
|
||||
print(f"{now()}: preparing snapshot...", flush=True)
|
||||
boxsize = prepare_gadget(snapshot_path, temporary_output_path)
|
||||
print(f"{now()}: wrote temporary data to {temporary_output_path}.",
|
||||
flush=True)
|
||||
|
||||
run_sph_filter(temporary_output_path, output_path, boxsize, resolution,
|
||||
SPH_executable)
|
||||
print(f"{now()}: removing the temporary snapshot file.", flush=True)
|
||||
try:
|
||||
remove(temporary_output_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser(description="Generate SPH density and velocity field.") # noqa
|
||||
parser.add_argument("--snapshot_path", type=str, required=True,
|
||||
help="Path to the simulation snapshot.")
|
||||
parser.add_argument("--output_path", type=str, required=True,
|
||||
help="Path to the output HDF5 file.")
|
||||
parser.add_argument("--resolution", type=int, required=True,
|
||||
help="Resolution of the density and velocity field.")
|
||||
parser.add_argument("--scratch_space", type=str, required=True,
|
||||
help="Path to a folder where temporary files can be stored.") # noqa
|
||||
parser.add_argument("--SPH_executable", type=str, required=True,
|
||||
help="Path to the `simple3DFilter` executable.")
|
||||
parser.add_argument("--snapshot_kind", type=str, required=True,
|
||||
choices=["gadget4"],
|
||||
help="Kind of the simulation snapshot.")
|
||||
args = parser.parse_args()
|
||||
|
||||
main(args.snapshot_path, args.output_path, args.resolution,
|
||||
args.scratch_space, args.SPH_executable, args.snapshot_kind)
|
40
scripts_independent/field_sph.sh
Executable file
40
scripts_independent/field_sph.sh
Executable file
|
@ -0,0 +1,40 @@
|
|||
#!/bin/sh
|
||||
|
||||
#SBATCH --ntasks-per-node=1
|
||||
#SBATCH --nodes=1
|
||||
#SBATCH --cpus-per-task=16
|
||||
#SBATCH --mem-per-cpu=7000
|
||||
#SBATCH -J SPH
|
||||
#SBATCH -o output_%J.out
|
||||
#SBATCH -e error_%J.err
|
||||
#SBATCH -p cosma8-serial
|
||||
#SBATCH -A dp016
|
||||
#SBATCH -t 04:00:00
|
||||
#SBATCH --mail-type=BEGIN,END,FAIL
|
||||
#SBATCH --mail-user=richard.stiskalek@physics.ox.ac.uk
|
||||
|
||||
|
||||
module purge
|
||||
module load intel_comp/2019
|
||||
module load intel_mpi/2019
|
||||
module load hdf5
|
||||
module load fftw
|
||||
module load gsl
|
||||
module load cmake
|
||||
module load python/3.10.12
|
||||
module list
|
||||
|
||||
source /cosma/home/dp016/dc-stis1/csiborgtools/venv_csiborgtools/bin/activate
|
||||
export OMP_NUM_THREADS=16
|
||||
export OMP_NESTED=true
|
||||
|
||||
# ADD CHAINS HERE
|
||||
snapshot_path="/cosma8/data/dp016/dc-stis1/csiborg2_main/chain_15517/output/snapshot_099_full.hdf5"
|
||||
output_path="/cosma8/data/dp016/dc-stis1/csiborg2_main/field/chain_15517.hdf5"
|
||||
resolution=256
|
||||
scratch_space="/cosma8/data/dp016/dc-stis1/csiborg2_main/field"
|
||||
SPH_executable="/cosma8/data/dp016/dc-stis1/cosmotool/bld2/sample/simple3DFilter"
|
||||
snapshot_kind="gadget4"
|
||||
|
||||
|
||||
python3 field_sph.py --snapshot_path $snapshot_path --output_path $output_path --resolution $resolution --scratch_space $scratch_space --SPH_executable $SPH_executable --snapshot_kind $snapshot_kind
|
737
scripts_independent/process_snapshot.py
Normal file
737
scripts_independent/process_snapshot.py
Normal file
|
@ -0,0 +1,737 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
Script to process simulation snapshots to sorted HDF5 files. Be careful
|
||||
because reading the HDF5 file may require `hdf5plugin` package to be installed.
|
||||
The snapshot particles are sorted by their halo ID, so that particles of a halo
|
||||
can be accessed by slicing the array.
|
||||
|
||||
CSiBORG1 reader will complain unless it can find the halomaker FOF files
|
||||
where it expects them:
|
||||
fdir = f"/mnt/extraspace/rstiskalek/csiborg1/chain_{self.nsim}/FOF"
|
||||
"""
|
||||
from abc import ABC, abstractmethod
|
||||
from argparse import ArgumentParser
|
||||
from datetime import datetime
|
||||
from gc import collect
|
||||
from glob import glob, iglob
|
||||
from os import makedirs
|
||||
from os.path import basename, dirname, exists, join
|
||||
from warnings import catch_warnings, filterwarnings, warn
|
||||
|
||||
import hdf5plugin
|
||||
import numpy
|
||||
import pynbody
|
||||
import readgadget
|
||||
from astropy import constants, units
|
||||
from h5py import File
|
||||
from numba import jit
|
||||
from readfof import FoF_catalog
|
||||
from tqdm import tqdm, trange
|
||||
|
||||
MSUNCGS = constants.M_sun.cgs.value
|
||||
BLOSC_KWARGS = {"cname": "blosclz",
|
||||
"clevel": 9,
|
||||
"shuffle": hdf5plugin.Blosc.SHUFFLE,
|
||||
}
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Utility functions #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def now():
|
||||
"""
|
||||
Return current time.
|
||||
"""
|
||||
return datetime.now()
|
||||
|
||||
|
||||
def flip_cols(arr, col1, col2):
|
||||
"""
|
||||
Flip values in columns `col1` and `col2` of a structured array `arr`.
|
||||
"""
|
||||
if col1 not in arr.dtype.names or col2 not in arr.dtype.names:
|
||||
raise ValueError(f"Both `{col1}` and `{col2}` must exist in `arr`.")
|
||||
|
||||
arr[col1], arr[col2] = numpy.copy(arr[col2]), numpy.copy(arr[col1])
|
||||
|
||||
|
||||
def convert_str_to_num(s):
|
||||
"""
|
||||
Convert a string representation of a number to its appropriate numeric type
|
||||
(int or float).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
s : str
|
||||
The string representation of the number.
|
||||
|
||||
Returns
|
||||
-------
|
||||
num : int or float
|
||||
"""
|
||||
try:
|
||||
return int(s)
|
||||
except ValueError:
|
||||
try:
|
||||
return float(s)
|
||||
except ValueError:
|
||||
warn(f"Cannot convert string '{s}' to number", UserWarning)
|
||||
return s
|
||||
|
||||
|
||||
def cols_to_structured(N, cols):
|
||||
"""
|
||||
Allocate a structured array from `cols`, a list of (name, dtype) tuples.
|
||||
"""
|
||||
if not (isinstance(cols, list)
|
||||
and all(isinstance(c, tuple) and len(c) == 2 for c in cols)):
|
||||
raise TypeError("`cols` must be a list of (name, dtype) tuples.")
|
||||
|
||||
names, formats = zip(*cols)
|
||||
dtype = {"names": names, "formats": formats}
|
||||
|
||||
return numpy.full(N, numpy.nan, dtype=dtype)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Base reader of snapshots #
|
||||
###############################################################################
|
||||
|
||||
|
||||
class BaseReader(ABC):
|
||||
"""Base reader layout that every subsequent reader should follow."""
|
||||
@abstractmethod
|
||||
def read_info(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_snapshot(self, kind):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_halo_id(self, pids):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_halos(self):
|
||||
pass
|
||||
|
||||
|
||||
###############################################################################
|
||||
# CSiBORG particle reader #
|
||||
###############################################################################
|
||||
|
||||
|
||||
class CSiBORG1Reader:
|
||||
"""
|
||||
Object to read in CSiBORG snapshots from the binary files and halo
|
||||
catalogues.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nsim : int
|
||||
IC realisation index.
|
||||
which_snapshot : str
|
||||
Which snapshot to read. Options are `initial` or `final`.
|
||||
"""
|
||||
def __init__(self, nsim, which_snapshot):
|
||||
self.nsim = nsim
|
||||
base_dir = "/mnt/extraspace/hdesmond/"
|
||||
|
||||
if which_snapshot == "initial":
|
||||
self.nsnap = 1
|
||||
raise RuntimeError("TODO not implemented")
|
||||
self.source_dir = None
|
||||
elif which_snapshot == "final":
|
||||
sourcedir = join(base_dir, f"ramses_out_{nsim}")
|
||||
self.nsnap = max([int(basename(f).replace("output_", ""))
|
||||
for f in glob(join(sourcedir, "output_*"))])
|
||||
self.source_dir = join(sourcedir,
|
||||
f"output_{str(self.nsnap).zfill(5)}")
|
||||
else:
|
||||
raise ValueError(f"Unknown snapshot option `{which_snapshot}`.")
|
||||
|
||||
self.output_dir = f"/mnt/extraspace/rstiskalek/csiborg1/chain_{self.nsim}" # noqa
|
||||
self.output_snap = join(self.output_dir,
|
||||
f"snapshot_{str(self.nsnap).zfill(5)}.hdf5")
|
||||
self.output_cat = join(self.output_dir,
|
||||
f"fof_{str(self.nsnap).zfill(5)}.hdf5")
|
||||
self.halomaker_dir = join(self.output_dir, "FOF")
|
||||
|
||||
def read_info(self):
|
||||
filename = glob(join(self.source_dir, "info_*"))
|
||||
if len(filename) > 1:
|
||||
raise ValueError("Found too many `info` files.")
|
||||
filename = filename[0]
|
||||
|
||||
with open(filename, "r") as f:
|
||||
info = f.read().split()
|
||||
# Throw anything below ordering line out
|
||||
info = numpy.asarray(info[:info.index("ordering")])
|
||||
# Get indexes of lines with `=`. Indxs before/after be keys/vals
|
||||
eqs = numpy.asarray([i for i in range(info.size) if info[i] == '='])
|
||||
|
||||
keys = info[eqs - 1]
|
||||
vals = info[eqs + 1]
|
||||
return {key: convert_str_to_num(val) for key, val in zip(keys, vals)}
|
||||
|
||||
def read_snapshot(self, kind):
|
||||
with catch_warnings():
|
||||
filterwarnings("ignore", category=UserWarning)
|
||||
sim = pynbody.load(self.source_dir)
|
||||
|
||||
info = self.read_info()
|
||||
|
||||
if kind == "pid":
|
||||
x = numpy.array(sim["iord"], dtype=numpy.uint32)
|
||||
elif kind == "pos":
|
||||
x = numpy.array(sim[kind], dtype=numpy.float32)
|
||||
# Convert box units to Mpc / h
|
||||
box2mpc = (info["unit_l"] / units.kpc.to(units.cm) / info["aexp"]
|
||||
* 1e-3 * info["H0"] / 100)
|
||||
x *= box2mpc
|
||||
elif kind == "mass":
|
||||
x = numpy.array(sim[kind], dtype=numpy.float32)
|
||||
# Convert box units to Msun / h
|
||||
box2msun = (info["unit_d"] * info["unit_l"]**3 / MSUNCGS
|
||||
* info["H0"] / 100)
|
||||
x *= box2msun
|
||||
elif kind == "vel":
|
||||
x = numpy.array(sim[kind], dtype=numpy.float16)
|
||||
# Convert box units to km / s
|
||||
box2kms = (1e-2 * info["unit_l"] / info["unit_t"] / info["aexp"]
|
||||
* 1e-3)
|
||||
x *= box2kms
|
||||
else:
|
||||
raise ValueError(f"Unknown kind `{kind}`. "
|
||||
"Options are: `pid`, `pos`, `vel` or `mass`.")
|
||||
|
||||
# Because of a RAMSES bug x and z are flipped.
|
||||
if kind in ["pos", "vel"]:
|
||||
print(f"For kind `{kind}` flipping x and z.")
|
||||
x[:, [0, 2]] = x[:, [2, 0]]
|
||||
|
||||
del sim
|
||||
collect()
|
||||
|
||||
return x
|
||||
|
||||
def read_halo_id(self, pids):
|
||||
fpath = join(self.halomaker_dir, "*particle_membership*")
|
||||
fpath = next(iglob(fpath, recursive=True), None)
|
||||
if fpath is None:
|
||||
raise FileNotFoundError(f"Found no Halomaker files in `{self.halomaker_dir}`.") # noqa
|
||||
|
||||
print(f"{now()}: mapping particle IDs to their indices.")
|
||||
pids_idx = {pid: i for i, pid in enumerate(pids)}
|
||||
|
||||
# Unassigned particle IDs are assigned a halo ID of 0.
|
||||
print(f"{now()}: mapping HIDs to their array indices.")
|
||||
hids = numpy.zeros(pids.size, dtype=numpy.int32)
|
||||
|
||||
# Read line-by-line to avoid loading the whole file into memory.
|
||||
with open(fpath, 'r') as file:
|
||||
for line in tqdm(file, desc="Reading membership"):
|
||||
hid, pid = map(int, line.split())
|
||||
hids[pids_idx[pid]] = hid
|
||||
|
||||
del pids_idx
|
||||
collect()
|
||||
|
||||
return hids
|
||||
|
||||
def read_halos(self):
|
||||
info = self.read_info()
|
||||
h = info["H0"] / 100
|
||||
|
||||
fpath = join(self.halomaker_dir, "fort.132")
|
||||
hid = numpy.genfromtxt(fpath, usecols=0, dtype=numpy.int32)
|
||||
pos = numpy.genfromtxt(fpath, usecols=(1, 2, 3), dtype=numpy.float32)
|
||||
totmass = numpy.genfromtxt(fpath, usecols=4, dtype=numpy.float32)
|
||||
m200c = numpy.genfromtxt(fpath, usecols=5, dtype=numpy.float32)
|
||||
|
||||
dtype = {"names": ["index", "x", "y", "z", "totpartmass", "m200c"],
|
||||
"formats": [numpy.int32] + [numpy.float32] * 5}
|
||||
out = numpy.full(hid.size, numpy.nan, dtype=dtype)
|
||||
out["index"] = hid
|
||||
out["x"] = pos[:, 0] * h + 677.7 / 2
|
||||
out["y"] = pos[:, 1] * h + 677.7 / 2
|
||||
out["z"] = pos[:, 2] * h + 677.7 / 2
|
||||
|
||||
# Because of a RAMSES bug x and z are flipped.
|
||||
flip_cols(out, "x", "z")
|
||||
out["totpartmass"] = totmass * 1e11 * h
|
||||
out["m200c"] = m200c * 1e11 * h
|
||||
|
||||
return out
|
||||
|
||||
|
||||
###############################################################################
|
||||
# CSiBORG2 particle reader #
|
||||
###############################################################################
|
||||
|
||||
|
||||
class CSiBORG2Reader(BaseReader):
|
||||
"""
|
||||
Object to read in CSiBORG2 snapshots. Because this is Gadget4 the final
|
||||
snapshot is already sorted, however we still have to sort the initial
|
||||
snapshot.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nsim : int
|
||||
IC realisation index.
|
||||
which_snapshot : str
|
||||
Which snapshot to read. Options are `initial` or `final`.
|
||||
"""
|
||||
def __init__(self, nsim, which_snapshot, kind):
|
||||
self.nsim = nsim
|
||||
if kind not in ["main", "random", "varysmall"]:
|
||||
raise ValueError(f"Unknown kind `{kind}`.")
|
||||
base_dir = f"/mnt/extraspace/rstiskalek/csiborg2_{kind}"
|
||||
|
||||
if which_snapshot == "initial":
|
||||
self.nsnap = 0
|
||||
elif which_snapshot == "final":
|
||||
self.nsnap = 99
|
||||
else:
|
||||
raise ValueError(f"Unknown snapshot option `{which_snapshot}`.")
|
||||
|
||||
self.source_dir = join(
|
||||
base_dir, f"chain_{nsim}", "output",
|
||||
f"snapshot_{str(self.nsnap).zfill(3)}_full.hdf5")
|
||||
|
||||
self.output_dir = join(base_dir, f"chain_{nsim}", "output")
|
||||
self.output_snap = join(
|
||||
self.output_dir,
|
||||
f"snapshot_{str(self.nsnap).zfill(3)}_sorted.hdf5")
|
||||
self.output_cat = None
|
||||
|
||||
def read_info(self):
|
||||
fpath = join(dirname(self.source_dir), "snapshot_99_full.hdf5")
|
||||
|
||||
with File(fpath, 'r') as f:
|
||||
header = f["Header"]
|
||||
params = f["Parameters"]
|
||||
|
||||
out = {"BoxSize": header.attrs["BoxSize"],
|
||||
"MassTable": header.attrs["MassTable"],
|
||||
"NumPart_Total": header.attrs["NumPart_Total"],
|
||||
"Omega_m": params.attrs["Omega0"],
|
||||
"Omega_l": params.attrs["OmegaLambda"],
|
||||
"Omega_b": params.attrs["OmegaBaryon"],
|
||||
"h": params.attrs["HubbleParam"],
|
||||
"redshift": header.attrs["Redshift"],
|
||||
}
|
||||
return out
|
||||
|
||||
def read_snapshot(self, kind):
|
||||
raise RuntimeError("TODO Not implemented.")
|
||||
|
||||
def read_halo_id(self, pids):
|
||||
raise RuntimeError("TODO Not implemented.")
|
||||
|
||||
def read_halos(self):
|
||||
raise RuntimeError("TODO Not implemented.")
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Quijote particle reader #
|
||||
###############################################################################
|
||||
|
||||
|
||||
class QuijoteReader:
|
||||
"""
|
||||
Object to read in Quijote snapshots from the binary files.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nsim : int
|
||||
IC realisation index.
|
||||
which_snapshot : str
|
||||
Which snapshot to read. Options are `initial` or `final`.
|
||||
"""
|
||||
def __init__(self, nsim, which_snapshot):
|
||||
self.nsim = nsim
|
||||
quijote_dir = "/mnt/extraspace/rstiskalek/quijote"
|
||||
|
||||
if which_snapshot == "initial":
|
||||
self.nsnap = -1
|
||||
snap_str = "ICs"
|
||||
self.source_dir = join(quijote_dir, "Snapshots_fiducial",
|
||||
str(nsim), "ICs", "ics")
|
||||
elif which_snapshot == "final":
|
||||
self.nsnap = 4
|
||||
snap_str = str(self.nsnap).zfill(3)
|
||||
self.source_dir = join(
|
||||
quijote_dir, "Snapshots_fiducial",
|
||||
str(nsim), f"snapdir_{snap_str}", f"snap_{snap_str}")
|
||||
else:
|
||||
raise ValueError(f"Unknown snapshot option `{which_snapshot}`.")
|
||||
|
||||
self.fof_dir = join(quijote_dir, "Halos_fiducial", str(nsim))
|
||||
self.output_dir = f"/mnt/extraspace/rstiskalek/quijote/fiducial_processed/chain_{self.nsim}" # noqa
|
||||
self.output_snap = join(self.output_dir, f"snapshot_{snap_str}.hdf5")
|
||||
self.output_cat = join(self.output_dir, f"fof_{snap_str}.hdf5")
|
||||
|
||||
def read_info(self):
|
||||
header = readgadget.header(self.source_dir)
|
||||
out = {"BoxSize": header.boxsize / 1e3, # Mpc/h
|
||||
"Nall": header.nall[1], # Tot num of particles
|
||||
"PartMass": header.massarr[1] * 1e10, # Part mass in Msun/h
|
||||
"Omega_m": header.omega_m,
|
||||
"Omega_l": header.omega_l,
|
||||
"h": header.hubble,
|
||||
"redshift": header.redshift,
|
||||
}
|
||||
out["TotMass"] = out["Nall"] * out["PartMass"]
|
||||
out["Hubble"] = (100.0 * numpy.sqrt(
|
||||
header.omega_m * (1.0 + header.redshift)**3 + header.omega_l))
|
||||
return out
|
||||
|
||||
def read_snapshot(self, kind):
|
||||
info = self.read_info()
|
||||
ptype = [1] # DM
|
||||
|
||||
if kind == "pid":
|
||||
return readgadget.read_block(self.source_dir, "ID ", ptype)
|
||||
elif kind == "pos":
|
||||
pos = readgadget.read_block(self.source_dir, "POS ", ptype) / 1e3
|
||||
return pos.astype(numpy.float32)
|
||||
elif kind == "vel":
|
||||
vel = readgadget.read_block(self.source_dir, "VEL ", ptype)
|
||||
vel = vel.astype(numpy.float16)
|
||||
vel *= (1 + info["redshift"]) # km / s
|
||||
return vel
|
||||
elif kind == "mass":
|
||||
return numpy.full(info["Nall"], info["PartMass"],
|
||||
dtype=numpy.float32)
|
||||
else:
|
||||
raise ValueError(f"Unknown kind `{kind}`. "
|
||||
"Options are: `pid`, `pos`, `vel` or `mass`.")
|
||||
|
||||
def read_halo_id(self, pids):
|
||||
cat = FoF_catalog(self.fof_dir, self.nsnap)
|
||||
|
||||
group_pids = cat.GroupIDs
|
||||
group_len = cat.GroupLen
|
||||
|
||||
# Create a mapping from particle ID to FoF group ID.
|
||||
print(f"{now()}: mapping particle IDs to their indices.")
|
||||
ks = numpy.insert(numpy.cumsum(group_len), 0, 0)
|
||||
with catch_warnings():
|
||||
# Ignore because we are casting NaN as integer.
|
||||
filterwarnings("ignore", category=RuntimeWarning)
|
||||
pid2hid = numpy.full((group_pids.size, 2), numpy.nan,
|
||||
dtype=numpy.uint64)
|
||||
for i, (k0, kf) in enumerate(zip(ks[:-1], ks[1:])):
|
||||
pid2hid[k0:kf, 0] = i + 1
|
||||
pid2hid[k0:kf, 1] = group_pids[k0:kf]
|
||||
pid2hid = {pid: hid for hid, pid in pid2hid}
|
||||
|
||||
# Create the final array of hids matchign the snapshot array.
|
||||
# Unassigned particles have hid 0.
|
||||
print(f"{now()}: mapping HIDs to their array indices.")
|
||||
hids = numpy.full(pids.size, 0, dtype=numpy.uint32)
|
||||
for i in trange(pids.size):
|
||||
hids[i] = pid2hid.get(pids[i], 0)
|
||||
|
||||
return hids
|
||||
|
||||
def read_halos(self):
|
||||
fof = FoF_catalog(self.fof_dir, self.nsnap, long_ids=False, swap=False,
|
||||
SFR=False, read_IDs=False)
|
||||
|
||||
cols = [("x", numpy.float32),
|
||||
("y", numpy.float32),
|
||||
("z", numpy.float32),
|
||||
("vx", numpy.float32),
|
||||
("vy", numpy.float32),
|
||||
("vz", numpy.float32),
|
||||
("GroupMass", numpy.float32),
|
||||
("npart", numpy.int32),
|
||||
("index", numpy.int32)
|
||||
]
|
||||
data = cols_to_structured(fof.GroupLen.size, cols)
|
||||
|
||||
pos = fof.GroupPos / 1e3
|
||||
vel = fof.GroupVel * (1 + self.read_info()["redshift"])
|
||||
for i, p in enumerate(["x", "y", "z"]):
|
||||
data[p] = pos[:, i]
|
||||
data[f"v{p}"] = vel[:, i]
|
||||
data["GroupMass"] = fof.GroupMass * 1e10
|
||||
data["npart"] = fof.GroupLen
|
||||
# We want to start indexing from 1. Index 0 is reserved for
|
||||
# particles unassigned to any FoF group.
|
||||
data["index"] = 1 + numpy.arange(data.size, dtype=numpy.uint32)
|
||||
return data
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Group Offsets #
|
||||
###############################################################################
|
||||
|
||||
|
||||
@jit(nopython=True, boundscheck=False)
|
||||
def minmax_halo(hid, halo_ids, start_loop=0):
|
||||
"""
|
||||
Find the start and end index of a halo in a sorted array of halo IDs.
|
||||
This is much faster than using `numpy.where` and then `numpy.min` and
|
||||
`numpy.max`.
|
||||
"""
|
||||
start = None
|
||||
end = None
|
||||
|
||||
for i in range(start_loop, halo_ids.size):
|
||||
n = halo_ids[i]
|
||||
if n == hid:
|
||||
if start is None:
|
||||
start = i
|
||||
end = i
|
||||
elif n > hid:
|
||||
break
|
||||
return start, end
|
||||
|
||||
|
||||
def make_offset_map(part_hids):
|
||||
"""
|
||||
Make group offsets for a list of particles' halo IDs. This is a
|
||||
2-dimensional array, where the first column is the halo ID, the second
|
||||
column is the start index of the halo in the particle list, and the third
|
||||
index is the end index of the halo in the particle list. The start index is
|
||||
inclusive, while the end index is exclusive.
|
||||
"""
|
||||
unique_halo_ids = numpy.unique(part_hids)
|
||||
unique_halo_ids = unique_halo_ids[unique_halo_ids != 0]
|
||||
with catch_warnings():
|
||||
filterwarnings("ignore", category=RuntimeWarning)
|
||||
halo_map = numpy.full((unique_halo_ids.size, 3), numpy.nan,
|
||||
dtype=numpy.uint32)
|
||||
start_loop, niters = 0, unique_halo_ids.size
|
||||
for i in trange(niters):
|
||||
hid = unique_halo_ids[i]
|
||||
k0, kf = minmax_halo(hid, part_hids, start_loop=start_loop)
|
||||
halo_map[i, :] = hid, k0, kf
|
||||
start_loop = kf
|
||||
|
||||
return halo_map, unique_halo_ids
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Process the final snapshot and sort it by groups #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def process_final_snapshot(nsim, simname):
|
||||
"""
|
||||
Read in the snapshot particles, sort them by their halo ID and dump
|
||||
into a HDF5 file. Stores the first and last index of each halo in the
|
||||
particle array for fast slicing of the array to acces particles of a single
|
||||
halo.
|
||||
"""
|
||||
if simname == "csiborg1":
|
||||
reader = CSiBORG1Reader(nsim, "final")
|
||||
elif simname == "quijote":
|
||||
reader = QuijoteReader(nsim, "final")
|
||||
else:
|
||||
raise RuntimeError(f"Simulation `{simname}` is not supported.")
|
||||
|
||||
if not exists(reader.output_dir):
|
||||
makedirs(reader.output_dir)
|
||||
|
||||
print("---- Processing Final Snapshot Information ----")
|
||||
print(f"Simulation index: {nsim}")
|
||||
print(f"Simulation name: {simname}")
|
||||
print(f"Output snapshot: {reader.output_snap}")
|
||||
print(f"Output catalogue: {reader.output_cat}")
|
||||
print("-----------------------------------------------")
|
||||
print(flush=True)
|
||||
|
||||
# First off load the particle IDs from the raw data.
|
||||
pids = reader.read_snapshot("pid")
|
||||
|
||||
# Then, load the halo ids and make sure their ordering is the same as the
|
||||
# particle IDs ordering.
|
||||
print(f"{now()}: loading HIDs.")
|
||||
halo_ids = reader.read_halo_id(pids)
|
||||
print(f"{now()}: sorting HIDs.")
|
||||
|
||||
# Get a mask that sorts the halo ids and then write the information to
|
||||
# the data files sorted by it.
|
||||
sort_indxs = numpy.argsort(halo_ids)
|
||||
halo_ids = halo_ids[sort_indxs]
|
||||
|
||||
with File(reader.output_snap, 'w') as f:
|
||||
print(f"{now()}: creating dataset `ParticleIDs`...",
|
||||
flush=True)
|
||||
f.create_dataset("ParticleIDs", data=pids[sort_indxs],
|
||||
**hdf5plugin.Blosc(**BLOSC_KWARGS))
|
||||
del pids
|
||||
collect()
|
||||
|
||||
print(f"{now()}: creating dataset `Coordinates`...",
|
||||
flush=True)
|
||||
f.create_dataset(
|
||||
"Coordinates", data=reader.read_snapshot("pos")[sort_indxs],
|
||||
**hdf5plugin.Blosc(**BLOSC_KWARGS))
|
||||
|
||||
print(f"{now()}: creating dataset `Velocities`...",
|
||||
flush=True)
|
||||
f.create_dataset(
|
||||
"Velocities", data=reader.read_snapshot("vel")[sort_indxs],
|
||||
**hdf5plugin.Blosc(**BLOSC_KWARGS))
|
||||
|
||||
print(f"{now()}: creating dataset `Masses`...",
|
||||
flush=True)
|
||||
f.create_dataset(
|
||||
"Masses", data=reader.read_snapshot("mass")[sort_indxs],
|
||||
**hdf5plugin.Blosc(**BLOSC_KWARGS))
|
||||
|
||||
if simname == "csiborg1":
|
||||
header = f.create_dataset("Header", (0,))
|
||||
header.attrs["BoxSize"] = 677.7 # Mpc/h
|
||||
header.attrs["Omega0"] = 0.307
|
||||
header.attrs["OmegaBaryon"] = 0.0
|
||||
header.attrs["OmegaLambda"] = 0.693
|
||||
header.attrs["HubleParam"] = 0.6777
|
||||
header.attrs["Redshift"] = 0.0
|
||||
elif simname == "quijote":
|
||||
info = reader.read_info()
|
||||
|
||||
header = f.create_dataset("Header", (0,))
|
||||
header.attrs["BoxSize"] = info["BoxSize"]
|
||||
header.attrs["Omega0"] = info["Omega_m"]
|
||||
header.attrs["OmegaLambda"] = info["Omega_l"]
|
||||
header.attrs["OmegaBaryon"] = 0.0
|
||||
header.attrs["HubleParam"] = info["h"]
|
||||
header.attrs["Redshift"] = info["redshift"]
|
||||
else:
|
||||
raise ValueError(f"Unknown simname `{simname}`.")
|
||||
|
||||
print(f"{now()}: done with `{reader.output_snap}`.",
|
||||
flush=True)
|
||||
|
||||
# Lastly, create the halo mapping and default catalogue.
|
||||
print(f"{datetime.now()}: creating `GroupOffset`...")
|
||||
halo_map, unique_halo_ids = make_offset_map(halo_ids)
|
||||
# Dump the halo mapping.
|
||||
with File(reader.output_cat, "w") as f:
|
||||
f.create_dataset("GroupOffset", data=halo_map)
|
||||
|
||||
# Add the halo finder catalogue
|
||||
print(f"{now()}: adding the halo finder catalogue.")
|
||||
with File(reader.output_cat, "r+") as f:
|
||||
cat = reader.read_halos()
|
||||
hid2pos = {hid: i for i, hid in enumerate(unique_halo_ids)}
|
||||
|
||||
for key in cat.dtype.names:
|
||||
x = numpy.full(unique_halo_ids.size, numpy.nan,
|
||||
dtype=cat[key].dtype)
|
||||
|
||||
for i in range(len(cat)):
|
||||
j = hid2pos[cat["index"][i]]
|
||||
x[j] = cat[key][i]
|
||||
f.create_dataset(key, data=x)
|
||||
|
||||
|
||||
def process_initial_snapshot(nsim, simname):
|
||||
"""
|
||||
Sort the initial snapshot particles according to their final snapshot and
|
||||
add them to the final snapshot's HDF5 file.
|
||||
"""
|
||||
if simname == "csiborg1":
|
||||
reader = CSiBORG1Reader(nsim, "initial")
|
||||
output_snap_final = CSiBORG1Reader(nsim, "final").output_snap
|
||||
elif simname == "quijote":
|
||||
reader = QuijoteReader(nsim, "initial")
|
||||
output_snap_final = QuijoteReader(nsim, "final").output_snap
|
||||
elif "csiborg2" in simname:
|
||||
reader = CSiBORG2Reader(nsim, "initial", simname.split("_")[1])
|
||||
output_snap_final = CSiBORG2Reader(nsim, "final", simname.split("_")[1]).output_snap # noqa
|
||||
raise RuntimeError("TODO Not implemented.")
|
||||
else:
|
||||
raise RuntimeError(f"Simulation `{simname}` is not supported.")
|
||||
|
||||
print("---- Processing Initial Snapshot Information ----")
|
||||
print(f"Simulation index: {nsim}")
|
||||
print(f"Simulation name: {simname}")
|
||||
print(f"Output snapshot: {reader.output_snap}")
|
||||
print(f"Output catalogue: {reader.output_cat}")
|
||||
print("-----------------------------------------------")
|
||||
print(flush=True)
|
||||
|
||||
print(f"{now()}: loading and sorting the initial PID.")
|
||||
sort_indxs = numpy.argsort(reader.read_snapshot("pid"))
|
||||
|
||||
print(f"{now()}: loading the final particles.")
|
||||
with File(output_snap_final, "r") as f:
|
||||
sort_indxs_final = f["ParticleIDs"][:]
|
||||
f.close()
|
||||
|
||||
print(f"{now()}: sorting the particles according to the final snapshot.")
|
||||
sort_indxs_final = numpy.argsort(numpy.argsort(sort_indxs_final))
|
||||
sort_indxs = sort_indxs[sort_indxs_final]
|
||||
|
||||
del sort_indxs_final
|
||||
collect()
|
||||
|
||||
print(f"{now()}: loading and sorting the initial particle position.")
|
||||
pos = reader.read_snapshot("pos")[sort_indxs]
|
||||
|
||||
del sort_indxs
|
||||
collect()
|
||||
|
||||
# In Quijote some particles are positioned precisely at the edge of the
|
||||
# box. Move them to be just inside.
|
||||
if simname == "quijote":
|
||||
boxsize = reader.read_info()["BoxSize"]
|
||||
mask = pos >= boxsize
|
||||
if numpy.any(mask):
|
||||
spacing = numpy.spacing(pos[mask])
|
||||
assert numpy.max(spacing) <= 1e-3
|
||||
pos[mask] -= spacing
|
||||
|
||||
print(f"{now()}: dumping particles `{reader.output_snap}`.")
|
||||
with File(reader.output_snap, 'w') as f:
|
||||
f.create_dataset("Coordinates", data=pos,
|
||||
**hdf5plugin.Blosc(**BLOSC_KWARGS))
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Process the initial snapshot and sort it like the final snapshot #
|
||||
###############################################################################
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser(description="Tool to manage the `raw` simulation data.") # noqa
|
||||
parser.add_argument("--nsim", type=int, required=True,
|
||||
help="Simulation index.")
|
||||
parser.add_argument("--simname", type=str, required=True,
|
||||
choices=["csiborg1", "quijote"],
|
||||
help="Simulation name.")
|
||||
parser.add_argument("--mode", type=int, required=True, choices=[0, 1, 2],
|
||||
help="0: process final snapshot, 1: process initial snapshot, 2: process both.") # noqa
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.mode == 0:
|
||||
process_final_snapshot(args.nsim, args.simname)
|
||||
elif args.mode == 1:
|
||||
process_initial_snapshot(args.nsim, args.simname)
|
||||
else:
|
||||
process_final_snapshot(args.nsim, args.simname)
|
||||
process_initial_snapshot(args.nsim, args.simname)
|
89
scripts_independent/run_field_sph.py
Normal file
89
scripts_independent/run_field_sph.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
Script to write the SLURM submission script and submit it to the queue to
|
||||
calculate the SPH density & velocity field.
|
||||
"""
|
||||
from os import system
|
||||
|
||||
|
||||
def write_submit(chain_index, kind, resolution, nthreads):
|
||||
if kind not in ["main", "random", "varysmall"]:
|
||||
raise RuntimeError(f"Unknown kind `{kind}`.")
|
||||
|
||||
txt = f"""#!/bin/sh
|
||||
|
||||
#SBATCH --ntasks-per-node=1
|
||||
#SBATCH --nodes=1
|
||||
#SBATCH --cpus-per-task={nthreads}
|
||||
#SBATCH --mem-per-cpu=7000
|
||||
#SBATCH -J SPH_{chain_index}
|
||||
#SBATCH -o output_{chain_index}_%J.out
|
||||
#SBATCH -e error_{chain_index}_%J.err
|
||||
#SBATCH -p cosma8-serial
|
||||
#SBATCH -A dp016
|
||||
#SBATCH -t 16:00:00
|
||||
#SBATCH --mail-type=BEGIN,END,FAIL
|
||||
#SBATCH --mail-user=richard.stiskalek@physics.ox.ac.uk
|
||||
|
||||
|
||||
module purge
|
||||
module load intel_comp/2019
|
||||
module load intel_mpi/2019
|
||||
module load hdf5
|
||||
module load fftw
|
||||
module load gsl
|
||||
module load cmake
|
||||
module load python/3.10.12
|
||||
module list
|
||||
|
||||
source /cosma/home/dp016/dc-stis1/csiborgtools/venv_csiborgtools/bin/activate
|
||||
export OMP_NUM_THREADS={nthreads}
|
||||
export OMP_NESTED=true
|
||||
|
||||
snapshot_path="/cosma8/data/dp016/dc-stis1/csiborg2_{kind}/chain_{chain_index}/output/snapshot_099_full.hdf5"
|
||||
output_path="/cosma8/data/dp016/dc-stis1/csiborg2_{kind}/field/chain_{chain_index}_{resolution}.hdf5"
|
||||
resolution={resolution}
|
||||
scratch_space="/snap8/scratch/dp016/dc-stis1/"
|
||||
SPH_executable="/cosma8/data/dp016/dc-stis1/cosmotool/bld2/sample/simple3DFilter"
|
||||
snapshot_kind="gadget4"
|
||||
|
||||
python3 field_sph.py --snapshot_path $snapshot_path --output_path $output_path --resolution $resolution --scratch_space $scratch_space --SPH_executable $SPH_executable --snapshot_kind $snapshot_kind
|
||||
"""
|
||||
fname = f"submit_SPH_{kind}_{chain_index}.sh"
|
||||
print(f"Writing file: `{fname}`.")
|
||||
with open(fname, "w") as txtfile:
|
||||
txtfile.write(txt)
|
||||
# Make the file executable
|
||||
system(f"chmod +x {fname}")
|
||||
return fname
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# kind = "main"
|
||||
# chains = [15617, 15717, 15817, 15917, 16017, 16117, 16217, 16317, 16417, 16517, 16617, 16717, 16817, 16917, 17017, 17117, 17217, 17317, 17417]
|
||||
|
||||
# kind = "varysmall"
|
||||
# chains = ["16417_001", "16417_025", "16417_050", "16417_075", "16417_100", "16417_125", "16417_150", "16417_175", "16417_200", "16417_225", "16417_250", "16417_275", "16417_300", "16417_325", "16417_350", "16417_375", "16417_400", "16417_425", "16417_450", "16417_475"]
|
||||
|
||||
kind = "random"
|
||||
chains = [1, 25, 50, 75, 100, 125, 150, 175, 200, 225, 250, 275, 300, 325, 350, 375, 400, 425, 450, 475]
|
||||
|
||||
resolution = 1024
|
||||
nthreads = 32
|
||||
|
||||
for chain_index in chains:
|
||||
fname = write_submit(chain_index, kind, resolution, nthreads)
|
||||
system(f"sbatch {fname}")
|
31
scripts_independent/run_process_snapshot.py
Normal file
31
scripts_independent/run_process_snapshot.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
from os import system
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Quijote chains
|
||||
chains = [1]
|
||||
simname = "quijote"
|
||||
mode = 2
|
||||
|
||||
env = "/mnt/zfsusers/rstiskalek/csiborgtools/venv_csiborg/bin/python"
|
||||
memory = 64
|
||||
|
||||
for chain in chains:
|
||||
out = f"output_{simname}_{chain}_%j.out"
|
||||
cmd = f"addqueue -q berg -o {out} -n 1x1 -m {memory} {env} process_snapshot.py --nsim {chain} --simname {simname} --mode {mode}" # noqa
|
||||
print(cmd)
|
||||
system(cmd)
|
||||
print()
|
77
scripts_independent/sort_ramseshdf5.py
Normal file
77
scripts_independent/sort_ramseshdf5.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
|
||||
def add_initial_snapshot(nsim, simname, halo_finder, verbose):
|
||||
"""
|
||||
Sort the initial snapshot particles according to their final snapshot and
|
||||
add them to the final snapshot's HDF5 file.
|
||||
"""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
fname = paths.processed_output(nsim, simname, halo_finder)
|
||||
|
||||
if simname == "csiborg":
|
||||
partreader = csiborgtools.read.CSiBORGReader(paths)
|
||||
else:
|
||||
partreader = csiborgtools.read.QuijoteReader(paths)
|
||||
|
||||
fprint(f"processing simulation `{nsim}`.", verbose)
|
||||
if simname == "csiborg":
|
||||
nsnap0 = 1
|
||||
elif simname == "quijote":
|
||||
nsnap0 = -1
|
||||
else:
|
||||
raise ValueError(f"Unknown simulation `{simname}`.")
|
||||
|
||||
fprint("loading and sorting the initial PID.", verbose)
|
||||
sort_indxs = numpy.argsort(partreader.read_snapshot(nsnap0, nsim, "pid"))
|
||||
|
||||
fprint("loading the final particles.", verbose)
|
||||
with h5py.File(fname, "r") as f:
|
||||
sort_indxs_final = f["snapshot_final/pid"][:]
|
||||
f.close()
|
||||
|
||||
fprint("sorting the particles according to the final snapshot.", verbose)
|
||||
sort_indxs_final = numpy.argsort(numpy.argsort(sort_indxs_final))
|
||||
sort_indxs = sort_indxs[sort_indxs_final]
|
||||
|
||||
del sort_indxs_final
|
||||
collect()
|
||||
|
||||
fprint("loading and sorting the initial particle position.", verbose)
|
||||
pos = partreader.read_snapshot(nsnap0, nsim, "pos")[sort_indxs]
|
||||
|
||||
del sort_indxs
|
||||
collect()
|
||||
|
||||
# In Quijote some particles are position precisely at the edge of the
|
||||
# box. Move them to be just inside.
|
||||
if simname == "quijote":
|
||||
mask = pos >= 1
|
||||
if numpy.any(mask):
|
||||
spacing = numpy.spacing(pos[mask])
|
||||
assert numpy.max(spacing) <= 1e-5
|
||||
pos[mask] -= spacing
|
||||
|
||||
fprint(f"dumping particles for `{nsim}` to `{fname}`.", verbose)
|
||||
with h5py.File(fname, "r+") as f:
|
||||
if "snapshot_initial" in f.keys():
|
||||
del f["snapshot_initial"]
|
||||
group = f.create_group("snapshot_initial")
|
||||
group.attrs["header"] = "Initial snapshot data."
|
||||
dset = group.create_dataset("pos", data=pos)
|
||||
dset.attrs["header"] = "DM particle positions in box units."
|
||||
|
||||
f.close()
|
Loading…
Add table
Add a link
Reference in a new issue