mirror of
https://github.com/Richard-Sti/csiborgtools_public.git
synced 2025-06-08 09:51:12 +00:00
Particle match & file system & phase space (#11)
* Create file system * add doc * add n_sim n_snap directly to paths * Move things to a single particle reader for consistency * add docstring * add srdcir, dumpdir and mmain_path * make boxunits work with paths * switch to using paths * add tempdumpdir * rm dependence on old functions * rm comment * rm unused import * go back to all imports * fix import bug * rm dependence on old functions * modernize code! * fix typo * fix typo * update fits to new data structureing * change docs * add julia repo * add setup * add install commands * ignore install files * add array flattening * update dependene * add positions reader * update manifest and projects * add func * update gitignore * pos matching progress * move file * rm comment * add velocities getter * fix bug * fix name bug * fix path bug * fix args func * add redshift calculation to catalogues * add shortcut to set n_sim and n_snap * if cond bug * add the cosine similarity * add verbosit iterator * add docs * update README * update README * update README
This commit is contained in:
parent
c748c87e45
commit
161c27d995
22 changed files with 1443 additions and 4178 deletions
|
@ -22,7 +22,7 @@ from os import remove
|
|||
from warnings import warn
|
||||
from os.path import join
|
||||
from tqdm import trange
|
||||
from ..read import nparts_to_start_ind
|
||||
from ..read import ParticleReader
|
||||
|
||||
|
||||
def clump_with_particles(particle_clumps, clumps):
|
||||
|
@ -44,14 +44,14 @@ def clump_with_particles(particle_clumps, clumps):
|
|||
return numpy.isin(clumps["index"], particle_clumps)
|
||||
|
||||
|
||||
def distribute_halos(Nsplits, clumps):
|
||||
def distribute_halos(n_splits, clumps):
|
||||
"""
|
||||
Evenly distribute clump indices to smaller splits. Clumps should only be
|
||||
clumps that contain particles.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
Nsplits : int
|
||||
n_splits : int
|
||||
Number of splits.
|
||||
clumps : structured array
|
||||
The clumps array.
|
||||
|
@ -59,22 +59,23 @@ def distribute_halos(Nsplits, clumps):
|
|||
Returns
|
||||
-------
|
||||
splits : 2-dimensional array
|
||||
Array of starting and ending indices of each CPU of shape `(Njobs, 2)`.
|
||||
Array of starting and ending indices of each CPU of shape
|
||||
`(njobs, 2)`.
|
||||
"""
|
||||
# Make sure these are unique IDs
|
||||
indxs = clumps["index"]
|
||||
if indxs.size > numpy.unique((indxs)).size:
|
||||
raise ValueError("`clump_indxs` constains duplicate indices.")
|
||||
Ntotal = indxs.size
|
||||
Njobs_per_cpu = numpy.ones(Nsplits, dtype=int) * Ntotal // Nsplits
|
||||
# Split the remainder Ntotal % Njobs among the CPU
|
||||
Njobs_per_cpu[:Ntotal % Nsplits] += 1
|
||||
start = nparts_to_start_ind(Njobs_per_cpu)
|
||||
return numpy.vstack([start, start + Njobs_per_cpu]).T
|
||||
njobs_per_cpu = numpy.ones(n_splits, dtype=int) * Ntotal // n_splits
|
||||
# Split the remainder Ntotal % njobs among the CPU
|
||||
njobs_per_cpu[:Ntotal % n_splits] += 1
|
||||
start = ParticleReader.nparts_to_start_ind(njobs_per_cpu)
|
||||
return numpy.vstack([start, start + njobs_per_cpu]).T
|
||||
|
||||
|
||||
def dump_split_particles(particles, particle_clumps, clumps, Nsplits,
|
||||
dumpfolder, Nsim, Nsnap, verbose=True):
|
||||
def dump_split_particles(particles, particle_clumps, clumps, n_splits,
|
||||
paths, verbose=True):
|
||||
"""
|
||||
Save the data needed for each split so that a process does not have to load
|
||||
everything.
|
||||
|
@ -87,14 +88,10 @@ def dump_split_particles(particles, particle_clumps, clumps, Nsplits,
|
|||
Array of particles' clump IDs.
|
||||
clumps : structured array
|
||||
The clumps array.
|
||||
Nsplits : int
|
||||
n_splits : int
|
||||
Number of times to split the clumps.
|
||||
dumpfolder : str
|
||||
Path to the folder where to dump the splits.
|
||||
Nsim : int
|
||||
CSiBORG simulation index.
|
||||
Nsnap : int
|
||||
Snapshot index.
|
||||
paths : py:class`csiborgtools.read.CSiBORGPaths`
|
||||
CSiBORG paths-handling object with set `n_sim` and `n_snap`.
|
||||
verbose : bool, optional
|
||||
Verbosity flag. By default `True`.
|
||||
|
||||
|
@ -112,10 +109,10 @@ def dump_split_particles(particles, particle_clumps, clumps, Nsplits,
|
|||
.format(with_particles.sum() / with_particles.size * 100))
|
||||
|
||||
# The starting clump index of each split
|
||||
splits = distribute_halos(Nsplits, clumps)
|
||||
fname = join(dumpfolder, "out_{}_snap_{}_{}.npz")
|
||||
splits = distribute_halos(n_splits, clumps)
|
||||
fname = join(paths.temp_dumpdir, "out_{}_snap_{}_{}.npz")
|
||||
|
||||
iters = trange(Nsplits) if verbose else range(Nsplits)
|
||||
iters = trange(n_splits) if verbose else range(n_splits)
|
||||
tot = 0
|
||||
for n in iters:
|
||||
# Lower and upper array index of the clumps array
|
||||
|
@ -133,7 +130,7 @@ def dump_split_particles(particles, particle_clumps, clumps, Nsplits,
|
|||
"with no particles.".format(n, indxs.size, npart_unique))
|
||||
# Dump it!
|
||||
tot += mask.sum()
|
||||
fout = fname.format(Nsim, Nsnap, n)
|
||||
fout = fname.format(paths.n_sim, paths.n_snap, n)
|
||||
numpy.savez(fout, particles[mask], particle_clumps[mask], clumps[i:j])
|
||||
|
||||
# There are particles whose clump ID is > 1 and have no counterpart in the
|
||||
|
@ -144,15 +141,15 @@ def dump_split_particles(particles, particle_clumps, clumps, Nsplits,
|
|||
"size `{}`.".format(tot, particle_clumps.size))
|
||||
|
||||
|
||||
def split_jobs(Njobs, Ncpu):
|
||||
def split_jobs(njobs, ncpu):
|
||||
"""
|
||||
Split `Njobs` amongst `Ncpu`.
|
||||
Split `njobs` amongst `ncpu`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
Njobs : int
|
||||
njobs : int
|
||||
Number of jobs.
|
||||
Ncpu : int
|
||||
ncpu : int
|
||||
Number of CPUs.
|
||||
|
||||
Returns
|
||||
|
@ -160,29 +157,25 @@ def split_jobs(Njobs, Ncpu):
|
|||
jobs : list of lists of integers
|
||||
Outer list of each CPU and inner lists for CPU's jobs.
|
||||
"""
|
||||
njobs_per_cpu, njobs_remainder = divmod(Njobs, Ncpu)
|
||||
jobs = numpy.arange(njobs_per_cpu * Ncpu).reshape((njobs_per_cpu, Ncpu)).T
|
||||
njobs_per_cpu, njobs_remainder = divmod(njobs, ncpu)
|
||||
jobs = numpy.arange(njobs_per_cpu * ncpu).reshape((njobs_per_cpu, ncpu)).T
|
||||
jobs = jobs.tolist()
|
||||
for i in range(njobs_remainder):
|
||||
jobs[i].append(njobs_per_cpu * Ncpu + i)
|
||||
jobs[i].append(njobs_per_cpu * ncpu + i)
|
||||
|
||||
return jobs
|
||||
|
||||
|
||||
def load_split_particles(Nsplit, dumpfolder, Nsim, Nsnap, remove_split=False):
|
||||
def load_split_particles(n_split, paths, remove_split=False):
|
||||
"""
|
||||
Load particles of a split saved by `dump_split_particles`.
|
||||
|
||||
Parameters
|
||||
--------
|
||||
Nsplit : int
|
||||
n_split : int
|
||||
Split index.
|
||||
dumpfolder : str
|
||||
Path to the folder where the splits were dumped.
|
||||
Nsim : int
|
||||
CSiBORG simulation index.
|
||||
Nsnap : int
|
||||
Snapshot index.
|
||||
paths : py:class`csiborgtools.read.CSiBORGPaths`
|
||||
CSiBORG paths-handling object with set `n_sim` and `n_snap`.
|
||||
remove_split : bool, optional
|
||||
Whether to remove the split file. By default `False`.
|
||||
|
||||
|
@ -196,7 +189,8 @@ def load_split_particles(Nsplit, dumpfolder, Nsim, Nsnap, remove_split=False):
|
|||
Clumps belonging to this split.
|
||||
"""
|
||||
fname = join(
|
||||
dumpfolder, "out_{}_snap_{}_{}.npz".format(Nsim, Nsnap, Nsplit))
|
||||
paths.temp_dumpdir, "out_{}_snap_{}_{}.npz".format(
|
||||
paths.n_sim, paths.n_snap, n_split))
|
||||
file = numpy.load(fname)
|
||||
particles, clump_indxs, clumps = (file[f] for f in file.files)
|
||||
if remove_split:
|
||||
|
|
|
@ -122,7 +122,38 @@ class RealisationsMatcher:
|
|||
"""
|
||||
return [i for i in range(self.cats.N) if i != n_sim]
|
||||
|
||||
def cross_knn_position_single(self, n_sim, nmult=5, dlogmass=2):
|
||||
def cosine_similarity(self, x, y):
|
||||
r"""
|
||||
Calculate the cosine similarity between two Cartesian vectors. Defined
|
||||
as :math:`\Sum_{i} x_i y_{i} / (|x| |y|)`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : 1-dimensional array
|
||||
The first vector.
|
||||
y : 1- or 2-dimensional array
|
||||
The second vector. Can be 2-dimensional of shape `(n_samples, 3)`,
|
||||
in which case the calculation is broadcasted.
|
||||
|
||||
Returns
|
||||
-------
|
||||
out : float or 1-dimensional array
|
||||
The cosine similarity. If y is 1-dimensinal returns only a float.
|
||||
"""
|
||||
# Quick check of dimensions
|
||||
if x.ndim != 1:
|
||||
raise ValueError("`x` must be a 1-dimensional array.")
|
||||
y = y.reshape(-1, 3) if y.ndim == 1 else y
|
||||
|
||||
out = numpy.sum(x * y, axis=1)
|
||||
out /= numpy.linalg.norm(x) * numpy.linalg.norm(y, axis=1)
|
||||
|
||||
if out.size == 1:
|
||||
return out[0]
|
||||
return out
|
||||
|
||||
def cross_knn_position_single(self, n_sim, nmult=5, dlogmass=2,
|
||||
verbose=True):
|
||||
r"""
|
||||
Find all neighbours within :math:`n_{\rm mult} R_{200c}` of halos in
|
||||
the `nsim`th simulation. Also enforces that the neighbours'
|
||||
|
@ -153,8 +184,13 @@ class RealisationsMatcher:
|
|||
pos = self.cats[n_sim].positions
|
||||
|
||||
matches = [None] * (self.cats.N - 1)
|
||||
# Verbose iterator
|
||||
if verbose:
|
||||
iters = enumerate(tqdm(self.search_sim_indices(n_sim)))
|
||||
else:
|
||||
iters = enumerate(self.search_sim_indices(n_sim))
|
||||
# Search for neighbours in the other simulations
|
||||
for count, i in enumerate(self.search_sim_indices(n_sim)):
|
||||
for count, i in iters:
|
||||
dist, indxs = self.cats[i].radius_neigbours(pos, r200 * nmult)
|
||||
# Get rid of neighbors whose mass is too off
|
||||
for j, indx in enumerate(indxs):
|
||||
|
|
|
@ -13,11 +13,7 @@
|
|||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
from .readsim import (get_csiborg_ids, get_sim_path, get_snapshots, # noqa
|
||||
get_snapshot_path, get_maximum_snapshot, read_info, nparts_to_start_ind, # noqa
|
||||
open_particle, open_unbinding, read_particle, # noqa
|
||||
drop_zero_indx, # noqa
|
||||
read_clumpid, read_clumps, read_mmain) # noqa
|
||||
from .readsim import (CSiBORGPaths, ParticleReader, read_mmain, get_positions) # noqa
|
||||
from .make_cat import (HaloCatalogue, CombinedHaloCatalogue) # noqa
|
||||
from .readobs import (PlanckClusters, MCXCClusters, TwoMPPGalaxies, TwoMPPGroups) # noqa
|
||||
from .outsim import (dump_split, combine_splits) # noqa
|
||||
|
|
|
@ -19,9 +19,9 @@ Functions to read in the particle and clump files.
|
|||
import numpy
|
||||
from os.path import join
|
||||
from tqdm import trange
|
||||
from copy import deepcopy
|
||||
from sklearn.neighbors import NearestNeighbors
|
||||
from .readsim import (get_sim_path, read_mmain, get_csiborg_ids,
|
||||
get_maximum_snapshot)
|
||||
from .readsim import read_mmain
|
||||
from ..utils import (flip_cols, add_columns)
|
||||
from ..units import (BoxUnits, cartesian_to_radec)
|
||||
|
||||
|
@ -32,35 +32,23 @@ class HaloCatalogue:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
n_sim: int
|
||||
Initial condition index.
|
||||
n_snap: int
|
||||
Snapshot index.
|
||||
paths : py:class:`csiborgtools.read.CSiBORGPaths`
|
||||
CSiBORG paths-handling object with set `n_sim` and `n_snap`.
|
||||
minimum_m500 : float, optional
|
||||
The minimum :math:`M_{rm 500c} / M_\odot` mass. By default no
|
||||
threshold.
|
||||
dumpdir : str, optional
|
||||
Path to where files from `run_fit_halos` are stored. By default
|
||||
`/mnt/extraspace/rstiskalek/csiborg/`.
|
||||
mmain_path : str, optional
|
||||
Path to where mmain files are stored. By default
|
||||
`/mnt/zfsusers/hdesmond/Mmain`.
|
||||
"""
|
||||
_box = None
|
||||
_n_sim = None
|
||||
_n_snap = None
|
||||
_paths = None
|
||||
_data = None
|
||||
_knn = None
|
||||
_positions = None
|
||||
|
||||
def __init__(self, n_sim, n_snap, minimum_m500=None,
|
||||
dumpdir="/mnt/extraspace/rstiskalek/csiborg/",
|
||||
mmain_path="/mnt/zfsusers/hdesmond/Mmain"):
|
||||
self._box = BoxUnits(n_snap, get_sim_path(n_sim))
|
||||
def __init__(self, paths, minimum_m500=None):
|
||||
self._box = BoxUnits(paths)
|
||||
minimum_m500 = 0 if minimum_m500 is None else minimum_m500
|
||||
self._set_data(n_sim, n_snap, dumpdir, mmain_path, minimum_m500)
|
||||
self._nsim = n_sim
|
||||
self._nsnap = n_snap
|
||||
self._paths = paths
|
||||
self._set_data(minimum_m500)
|
||||
# Initialise the KNN
|
||||
knn = NearestNeighbors()
|
||||
knn.fit(self.positions)
|
||||
|
@ -74,7 +62,6 @@ class HaloCatalogue:
|
|||
Returns
|
||||
-------
|
||||
cat : structured array
|
||||
Catalogue.
|
||||
"""
|
||||
if self._data is None:
|
||||
raise ValueError("`data` is not set!")
|
||||
|
@ -88,7 +75,6 @@ class HaloCatalogue:
|
|||
Returns
|
||||
-------
|
||||
box : :py:class:`csiborgtools.units.BoxUnits`
|
||||
The box object.
|
||||
"""
|
||||
return self._box
|
||||
|
||||
|
@ -100,10 +86,20 @@ class HaloCatalogue:
|
|||
Returns
|
||||
-------
|
||||
cosmo : `astropy` cosmology object
|
||||
Box cosmology.
|
||||
"""
|
||||
return self.box.cosmo
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
"""
|
||||
The paths-handling object.
|
||||
|
||||
Returns
|
||||
-------
|
||||
paths : :py:class:`csiborgtools.read.CSiBORGPaths`
|
||||
"""
|
||||
return self._paths
|
||||
|
||||
@property
|
||||
def n_snap(self):
|
||||
"""
|
||||
|
@ -112,9 +108,8 @@ class HaloCatalogue:
|
|||
Returns
|
||||
-------
|
||||
n_snap : int
|
||||
Snapshot ID.
|
||||
"""
|
||||
return self._n_snap
|
||||
return self.paths.n_snap
|
||||
|
||||
@property
|
||||
def n_sim(self):
|
||||
|
@ -124,27 +119,37 @@ class HaloCatalogue:
|
|||
Returns
|
||||
-------
|
||||
n_sim : int
|
||||
The IC ID.
|
||||
"""
|
||||
return self._n_sim
|
||||
return self.paths.n_sim
|
||||
|
||||
def _set_data(self, n_sim, n_snap, dumpdir, mmain_path, minimum_m500):
|
||||
def _set_data(self, minimum_m500):
|
||||
"""
|
||||
Loads the data, merges with mmain, does various coordinate transforms.
|
||||
"""
|
||||
# Load the processed data
|
||||
fname = "ramses_out_{}_{}.npy".format(
|
||||
str(n_sim).zfill(5), str(n_snap).zfill(5))
|
||||
data = numpy.load(join(dumpdir, fname))
|
||||
str(self.n_sim).zfill(5), str(self.n_snap).zfill(5))
|
||||
data = numpy.load(join(self.paths.dumpdir, fname))
|
||||
|
||||
# Load the mmain file and add it to the data
|
||||
mmain = read_mmain(n_sim, mmain_path)
|
||||
mmain = read_mmain(self.n_sim, self.paths.mmain_path)
|
||||
data = self.merge_mmain_to_clumps(data, mmain)
|
||||
flip_cols(data, "peak_x", "peak_z")
|
||||
|
||||
# Cut on number of particles and finite m200
|
||||
data = data[(data["npart"] > 100) & numpy.isfinite(data["m200"])]
|
||||
|
||||
# Calculate redshift
|
||||
pos = [data["peak_{}".format(p)] - 0.5 for p in ("x", "y", "z")]
|
||||
vel = [data["v{}".format(p)] for p in ("x", "y", "z")]
|
||||
zpec = self.box.box2pecredshift(*vel, *pos)
|
||||
zobs = self.box.box2obsredshift(*vel, *pos)
|
||||
zcosmo = self.box.box2cosmoredshift(
|
||||
sum(pos[i]**2 for i in range(3))**0.5)
|
||||
|
||||
data = add_columns(data, [zpec, zobs, zcosmo],
|
||||
["zpec", "zobs", "zcosmo"])
|
||||
|
||||
# Unit conversion
|
||||
convert_cols = ["m200", "m500", "totpartmass", "mass_mmain",
|
||||
"r200", "r500", "Rs", "rho0",
|
||||
|
@ -203,6 +208,18 @@ class HaloCatalogue:
|
|||
"""
|
||||
return self._positions
|
||||
|
||||
@property
|
||||
def velocities(self):
|
||||
"""
|
||||
Cartesian velocities of halos.
|
||||
|
||||
Returns
|
||||
-------
|
||||
vel : 2-dimensional array
|
||||
Array of shape `(n_halos, 3)`.
|
||||
"""
|
||||
return numpy.vstack([self["v{}".format(p)] for p in ("x", "y", "z")]).T
|
||||
|
||||
def radius_neigbours(self, X, radius):
|
||||
"""
|
||||
Return sorted nearest neigbours within `radius` or `X`.
|
||||
|
@ -245,15 +262,12 @@ class CombinedHaloCatalogue:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
paths : py:class`csiborgtools.read.CSiBORGPaths`
|
||||
CSiBORG paths-handling object. Doest not have to have set set `n_sim`
|
||||
and `n_snap`.
|
||||
minimum_m500 : float, optional
|
||||
The minimum :math:`M_{rm 500c} / M_\odot` mass. By default no
|
||||
threshold.
|
||||
dumpdir : str, optional
|
||||
Path to where files from `run_fit_halos` are stored. By default
|
||||
`/mnt/extraspace/rstiskalek/csiborg/`.
|
||||
mmain_path : str, optional
|
||||
Path to where mmain files are stored. By default
|
||||
`/mnt/zfsusers/hdesmond/Mmain`.
|
||||
verbose : bool, optional
|
||||
Verbosity flag for reading the catalogues.
|
||||
"""
|
||||
|
@ -261,19 +275,18 @@ class CombinedHaloCatalogue:
|
|||
_n_snaps = None
|
||||
_cats = None
|
||||
|
||||
def __init__(self, minimum_m500=None,
|
||||
dumpdir="/mnt/extraspace/rstiskalek/csiborg/",
|
||||
mmain_path="/mnt/zfsusers/hdesmond/Mmain", verbose=True):
|
||||
def __init__(self, paths, minimum_m500=None, verbose=True):
|
||||
# Read simulations and their maximum snapshots
|
||||
# NOTE remove this later and take all cats
|
||||
self._n_sims = get_csiborg_ids("/mnt/extraspace/hdesmond")[:10]
|
||||
n_snaps = [get_maximum_snapshot(get_sim_path(i)) for i in self._n_sims]
|
||||
self._n_sims = paths.ic_ids[:10]
|
||||
n_snaps = [paths.get_maximum_snapshot(i) for i in self._n_sims]
|
||||
self._n_snaps = numpy.asanyarray(n_snaps)
|
||||
|
||||
cats = [None] * self.N
|
||||
for i in trange(self.N) if verbose else range(self.N):
|
||||
cats[i] = HaloCatalogue(self._n_sims[i], self._n_snaps[i],
|
||||
minimum_m500, dumpdir, mmain_path)
|
||||
paths = deepcopy(paths)
|
||||
paths.set_info(self.n_sims[i], self.n_snaps[i])
|
||||
cats[i] = HaloCatalogue(paths, minimum_m500)
|
||||
self._cats = cats
|
||||
|
||||
@property
|
||||
|
|
|
@ -21,13 +21,12 @@ import numpy
|
|||
from os.path import join
|
||||
from os import remove
|
||||
from tqdm import trange
|
||||
from .readsim import (get_sim_path, read_clumps)
|
||||
|
||||
I64 = numpy.int64
|
||||
F64 = numpy.float64
|
||||
|
||||
|
||||
def dump_split(arr, Nsplit, Nsim, Nsnap, outdir):
|
||||
def dump_split(arr, n_split, paths):
|
||||
"""
|
||||
Dump an array from a split.
|
||||
|
||||
|
@ -35,11 +34,13 @@ def dump_split(arr, Nsplit, Nsim, Nsnap, outdir):
|
|||
----------
|
||||
arr : n-dimensional or structured array
|
||||
Array to be saved.
|
||||
Nsplit : int
|
||||
n_split: int
|
||||
The split index.
|
||||
Nsim : int
|
||||
paths : py:class`csiborgtools.read.CSiBORGPaths`
|
||||
CSiBORG paths-handling object with set `n_sim` and `n_snap`.
|
||||
n_sim : int
|
||||
The CSiBORG realisation index.
|
||||
Nsnap : int
|
||||
n_snap : int
|
||||
The index of a redshift snapshot.
|
||||
outdir : string
|
||||
Directory where to save the temporary files.
|
||||
|
@ -48,13 +49,14 @@ def dump_split(arr, Nsplit, Nsim, Nsnap, outdir):
|
|||
-------
|
||||
None
|
||||
"""
|
||||
Nsim = str(Nsim).zfill(5)
|
||||
Nsnap = str(Nsnap).zfill(5)
|
||||
fname = join(outdir, "ramses_out_{}_{}_{}.npy".format(Nsim, Nsnap, Nsplit))
|
||||
n_sim = str(paths.n_sim).zfill(5)
|
||||
n_snap = str(paths.n_snap).zfill(5)
|
||||
fname = join(paths.temp_dumpdir, "ramses_out_{}_{}_{}.npy"
|
||||
.format(n_sim, n_snap, n_split))
|
||||
numpy.save(fname, arr)
|
||||
|
||||
|
||||
def combine_splits(Nsplits, Nsim, Nsnap, outdir, cols_add, remove_splits=False,
|
||||
def combine_splits(n_splits, part_reader, cols_add, remove_splits=False,
|
||||
verbose=True):
|
||||
"""
|
||||
Combine results of many splits saved from `dump_split`. Identifies to which
|
||||
|
@ -64,14 +66,10 @@ def combine_splits(Nsplits, Nsim, Nsnap, outdir, cols_add, remove_splits=False,
|
|||
|
||||
Paramaters
|
||||
----------
|
||||
Nsplits : int
|
||||
n_splits : int
|
||||
The total number of clump splits.
|
||||
Nsim : int
|
||||
The CSiBORG realisation index.
|
||||
Nsnap : int
|
||||
The index of a redshift snapshot.
|
||||
outdir : str
|
||||
Directory where to save the new array.
|
||||
part_reader : py:class`csiborgtools.read.ParticleReadear`
|
||||
CSiBORG particle reader.
|
||||
cols_add : list of `(str, dtype)`
|
||||
Colums to add. Must be formatted as, for example,
|
||||
`[("npart", numpy.float64), ("totpartmass", numpy.float64)]`.
|
||||
|
@ -86,8 +84,10 @@ def combine_splits(Nsplits, Nsim, Nsnap, outdir, cols_add, remove_splits=False,
|
|||
Clump array with appended results from the splits.
|
||||
"""
|
||||
# Load clumps to see how many there are and will add to this array
|
||||
simpath = get_sim_path(Nsim)
|
||||
clumps = read_clumps(Nsnap, simpath, cols=None)
|
||||
n_sim = part_reader.paths.n_sim
|
||||
n_snap = part_reader.paths.n_snap
|
||||
clumps = part_reader.read_clumps(cols=None)
|
||||
|
||||
# Get the old + new dtypes and create an empty array
|
||||
descr = clumps.dtype.descr + cols_add
|
||||
out = numpy.full(clumps.size, numpy.nan, dtype=descr)
|
||||
|
@ -96,12 +96,13 @@ def combine_splits(Nsplits, Nsim, Nsnap, outdir, cols_add, remove_splits=False,
|
|||
out[par] = clumps[par]
|
||||
|
||||
# Filename of splits data
|
||||
froot = "ramses_out_{}_{}".format(str(Nsim).zfill(5), str(Nsnap).zfill(5))
|
||||
fname = join(outdir, froot + "_{}.npy")
|
||||
froot = "ramses_out_{}_{}".format(
|
||||
str(n_sim).zfill(5), str(n_snap).zfill(5))
|
||||
fname = join(part_reader.paths.temp_dumpdir, froot + "_{}.npy")
|
||||
|
||||
# Iterate over splits and add to the output array
|
||||
cols_add_names = [col[0] for col in cols_add]
|
||||
iters = trange(Nsplits) if verbose else range(Nsplits)
|
||||
iters = trange(n_splits) if verbose else range(n_splits)
|
||||
for n in iters:
|
||||
fnamesplit = fname.format(n)
|
||||
arr = numpy.load(fnamesplit)
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -20,7 +20,7 @@ import numpy
|
|||
from scipy.interpolate import interp1d
|
||||
from astropy.cosmology import LambdaCDM
|
||||
from astropy import (constants, units)
|
||||
from ..read import read_info
|
||||
from ..read import ParticleReader
|
||||
|
||||
|
||||
# Map of unit conversions
|
||||
|
@ -38,18 +38,17 @@ class BoxUnits:
|
|||
|
||||
Paramaters
|
||||
----------
|
||||
Nsnap : int
|
||||
Snapshot index.
|
||||
simpath : str
|
||||
Path to the simulation where its snapshot index folders are stored.
|
||||
paths : py:class`csiborgtools.read.CSiBORGPaths`
|
||||
CSiBORG paths-handling object with set `n_sim` and `n_snap`.
|
||||
"""
|
||||
_cosmo = None
|
||||
|
||||
def __init__(self, Nsnap, simpath):
|
||||
def __init__(self, paths):
|
||||
"""
|
||||
Read in the snapshot info file and set the units from it.
|
||||
"""
|
||||
info = read_info(Nsnap, simpath)
|
||||
partreader = ParticleReader(paths)
|
||||
info = partreader.read_info()
|
||||
pars = ["boxlen", "time", "aexp", "H0",
|
||||
"omega_m", "omega_l", "omega_k", "omega_b",
|
||||
"unit_l", "unit_d", "unit_t"]
|
||||
|
@ -220,6 +219,8 @@ class BoxUnits:
|
|||
r"""
|
||||
Convert the box comoving distance to cosmological redshift.
|
||||
|
||||
NOTE: this likely is already the observed redshift.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
dist : float
|
||||
|
@ -236,8 +237,24 @@ class BoxUnits:
|
|||
|
||||
def box2pecredshift(self, vx, vy, vz, px, py, pz, p0x=0, p0y=0, p0z=0):
|
||||
"""
|
||||
TODO: docs
|
||||
Convert the box phase-space information to a peculiar redshift.
|
||||
|
||||
NOTE: there is some confusion about this.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
vx, vy, vz : 1-dimensional arrays
|
||||
The Cartesian velocity components.
|
||||
px, py, pz : 1-dimensional arrays
|
||||
The Cartesian position vectors components.
|
||||
p0x, p0y, p0z : floats
|
||||
The centre of the box. By default 0, in which it is assumed that
|
||||
the coordinates are already centred.
|
||||
|
||||
Returns
|
||||
-------
|
||||
pec_redshift : 1-dimensional array
|
||||
The peculiar redshift.
|
||||
"""
|
||||
# Peculiar velocity along the radial distance
|
||||
r = numpy.vstack([px - p0x, py - p0y, pz - p0z]).T
|
||||
|
@ -251,8 +268,24 @@ class BoxUnits:
|
|||
|
||||
def box2obsredshift(self, vx, vy, vz, px, py, pz, p0x=0, p0y=0, p0z=0):
|
||||
"""
|
||||
TODO: docs
|
||||
Convert the box phase-space information to an 'observed' redshift.
|
||||
|
||||
NOTE: there is some confusion about this.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
vx, vy, vz : 1-dimensional arrays
|
||||
The Cartesian velocity components.
|
||||
px, py, pz : 1-dimensional arrays
|
||||
The Cartesian position vectors components.
|
||||
p0x, p0y, p0z : floats
|
||||
The centre of the box. By default 0, in which it is assumed that
|
||||
the coordinates are already centred.
|
||||
|
||||
Returns
|
||||
-------
|
||||
obs_redshift : 1-dimensional array
|
||||
The observed redshift.
|
||||
"""
|
||||
r = numpy.vstack([px - p0x, py - p0y, pz - p0z]).T
|
||||
zcosmo = self.box2cosmoredshift(numpy.sum(r**2, axis=1)**0.5)
|
||||
|
|
|
@ -15,4 +15,4 @@
|
|||
|
||||
from .recarray_manip import (cols_to_structured, add_columns, rm_columns, # noqa
|
||||
list_to_ndarray, array_to_structured, # noqa
|
||||
flip_cols) # noqa
|
||||
flip_cols, extract_from_structured) # noqa
|
||||
|
|
|
@ -209,3 +209,35 @@ def flip_cols(arr, col1, col2):
|
|||
dum = numpy.copy(arr[col1])
|
||||
arr[col1] = arr[col2]
|
||||
arr[col2] = dum
|
||||
|
||||
|
||||
def extract_from_structured(arr, cols):
|
||||
"""
|
||||
Extract columns `cols` from a structured array. The array dtype is set
|
||||
to be that of the first column in `cols`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arr : structured array
|
||||
Array from which to extract columns.
|
||||
cols : list of str or str
|
||||
Column to extract.
|
||||
|
||||
Returns
|
||||
-------
|
||||
out : 2- or 1-dimensional array
|
||||
Array with shape `(n_particles, len(cols))`. If `len(cols)` is 1
|
||||
flattens the array.
|
||||
"""
|
||||
cols = [cols] if isinstance(cols, str) else cols
|
||||
for col in cols:
|
||||
if col not in arr.dtype.names:
|
||||
raise ValueError("Invalid column `{}`!".format(col))
|
||||
# Preallocate an array and populate it
|
||||
out = numpy.zeros((arr.size, len(cols)), dtype=arr[cols[0]].dtype)
|
||||
for i, col in enumerate(cols):
|
||||
out[:, i] = arr[col]
|
||||
# Optionally flatten
|
||||
if len(cols) == 1:
|
||||
return out.reshape(-1,)
|
||||
return out
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue