mirror of
https://github.com/Richard-Sti/csiborgtools.git
synced 2025-04-18 04:10:53 +00:00
* Move paths to a separate file * Add mmain reader * Add a verbosity flag * Fix imports * Fix bug * Rename files * Return ultimate parents * Add script to generate mmain * Remove mmain path * edit path * Add mmain path * Change function name * Rename function * Turn off verbose * Fix list requirement * Edit init match paths * Fix init pathing * Edit paths docs * Edit dumpdir name * Rename path * Fix split paths * Remove unused import * Add comment * Update readme * remove read mmain * Rename haloatalogue * Fix minor bugs * Update nbs * Add create directory option * Move split jobs * Move spliot jobs * Remove splitting * Add import * Edit script * Deeper split folder * Fix paths bug * Rename catalogue * Rename Catalogue * Add new clumpread * Edit paths * add knn paths * Update commenting * Update imports * Add more conversions * Update temp file * Add a note * Add catalogue * Cooment * Update TODO * Update script * add nb * Update * pep8 * edit paths & pep8 * Fix knn auto paths * add paths docs * Add auto and cross knn paths * Add new paths * Simplify tpcf reading * pep8 patch * update readme * Update progress * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * Pep 8 and restructure * add lambda spin * add clump and halo * add checks * Edit halo profile fit * Update gitignore * backup script
112 lines
3.8 KiB
Python
112 lines
3.8 KiB
Python
# Copyright (C) 2022 Richard Stiskalek, Harry Desmond
|
|
# This program is free software; you can redistribute it and/or modify it
|
|
# under the terms of the GNU General Public License as published by the
|
|
# Free Software Foundation; either version 3 of the License, or (at your
|
|
# option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful, but
|
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
|
# Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License along
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
"""
|
|
I/O functions for analysing the CSiBORG realisations.
|
|
"""
|
|
from os import remove
|
|
from os.path import join
|
|
|
|
import numpy
|
|
from tqdm import trange
|
|
|
|
|
|
def dump_split(arr, nsplit, nsnap, nsim, paths):
|
|
"""
|
|
Dump an array from a split.
|
|
|
|
Parameters
|
|
----------
|
|
arr : n-dimensional or structured array
|
|
Array to be saved.
|
|
nsplit : int
|
|
Split index.
|
|
nsnap : int
|
|
Snapshot index.
|
|
nsim : int
|
|
IC realisation index.
|
|
paths : py:class`csiborgtools.read.CSiBORGPaths`
|
|
CSiBORG paths-handling object with set `n_sim` and `n_snap`.
|
|
|
|
Returns
|
|
-------
|
|
None
|
|
"""
|
|
fname = join(paths.temp_dumpdir, "ramses_out_{}_{}_{}.npy"
|
|
.format(str(nsim).zfill(5), str(nsnap).zfill(5), nsplit))
|
|
numpy.save(fname, arr)
|
|
|
|
|
|
def combine_splits(nsplits, nsnap, nsim, part_reader, cols_add,
|
|
remove_splits=False, verbose=True):
|
|
"""
|
|
Combine results of many splits saved from `dump_split`. Identifies to which
|
|
clump the clumps in the split correspond to by matching their index.
|
|
Returns an array that contains the original clump data along with the newly
|
|
calculated quantities.
|
|
|
|
Paramaters
|
|
----------
|
|
nsplits : int
|
|
Total number of clump splits.
|
|
nsnap : int
|
|
Snapshot index.
|
|
nsim : int
|
|
IC realisation index.
|
|
part_reader : py:class`csiborgtools.read.ParticleReadear`
|
|
CSiBORG particle reader.
|
|
cols_add : list of `(str, dtype)`
|
|
Colums to add. Must be formatted as, for example,
|
|
`[("npart", numpy.float64), ("totpartmass", numpy.float64)]`.
|
|
remove_splits : bool, optional
|
|
Whether to remove the splits files. By default `False`.
|
|
verbose : bool, optional
|
|
Verbosity flag. By default `True`.
|
|
|
|
Returns
|
|
-------
|
|
out : structured array
|
|
Clump array with appended results from the splits.
|
|
"""
|
|
clumps = part_reader.read_clumps(nsnap, nsim, cols=None)
|
|
# Get the old + new dtypes and create an empty array
|
|
descr = clumps.dtype.descr + cols_add
|
|
out = numpy.full(clumps.size, numpy.nan, dtype=descr)
|
|
for par in clumps.dtype.names: # Now put the old values into the array
|
|
out[par] = clumps[par]
|
|
|
|
# Filename of splits data
|
|
froot = "ramses_out_{}_{}".format(str(nsim).zfill(5), str(nsnap).zfill(5))
|
|
fname = join(part_reader.paths.temp_dumpdir, froot + "_{}.npy")
|
|
|
|
# Iterate over splits and add to the output array
|
|
cols_add_names = [col[0] for col in cols_add]
|
|
iters = trange(nsplits) if verbose else range(nsplits)
|
|
for n in iters:
|
|
fnamesplit = fname.format(n)
|
|
arr = numpy.load(fnamesplit)
|
|
|
|
# Check that all halo indices from the split are in the clump file
|
|
if not numpy.alltrue(numpy.isin(arr["index"], out["index"])):
|
|
raise KeyError("....")
|
|
# Mask of where to put the values from the split
|
|
mask = numpy.isin(out["index"], arr["index"])
|
|
for par in cols_add_names:
|
|
out[par][mask] = arr[par]
|
|
|
|
# Now remove this split
|
|
if remove_splits:
|
|
remove(fnamesplit)
|
|
|
|
return out
|