mirror of
https://github.com/Richard-Sti/csiborgtools_public.git
synced 2025-05-13 14:11:11 +00:00
Quijote snapshots support (#77)
* Renaming * Edit docs * Delete old function * Add a blank space * Rename particle reader * Add comments * Rename * Rename * edit get_snapshots * More renaming * Remove old correction * Add import * Add basics of the Quijote reader * Add a blank space * Fix paths * Rename function * Fix HID and path * Add more FoF reading * Move definition * Adding arguments * Renaming * Add kwargs for backward comp * FoF Quijote return only hids * Add sorting of quijote * Add path to CSiBORG ICs snapshot * Add support for Quijote * initmatch paths for quijote * Add kwargs * Fix blank lines * Rename kwarg * Remove unused import * Remove hardcoded numbers * Update for Quijote * Do not store velocities in QUijote ICs * Box units mass Quijote * Fix typo * Ensure particles are not right at the edge * Add structfit paths for QUuijote * Basic CSiBORG units * Add more quijote halo reading * Add Quijote fitting * Docs changes * Docs changes
This commit is contained in:
parent
e08c741fc8
commit
fb4b4edf19
18 changed files with 800 additions and 300 deletions
|
@ -51,7 +51,7 @@ MAS = "CIC" # mass asignment scheme
|
|||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
box = csiborgtools.read.CSiBORGBox(paths)
|
||||
reader = csiborgtools.read.ParticleReader(paths)
|
||||
reader = csiborgtools.read.CSiBORGReader(paths)
|
||||
ics = paths.get_ics("csiborg")
|
||||
nsims = len(ics)
|
||||
|
||||
|
@ -66,8 +66,8 @@ jobs = csiborgtools.utils.split_jobs(nsims, nproc)[rank]
|
|||
for n in jobs:
|
||||
print(f"Rank {rank} at {datetime.now()}: saving {n}th delta.", flush=True)
|
||||
nsim = ics[n]
|
||||
particles = reader.read_particle(max(paths.get_snapshots(nsim)), nsim,
|
||||
["x", "y", "z", "M"], verbose=False)
|
||||
particles = reader.read_particle(max(paths.get_snapshots(nsim, "csiborg")),
|
||||
nsim, ["x", "y", "z", "M"], verbose=False)
|
||||
# Halfwidth -- particle selection
|
||||
if args.halfwidth < 0.5:
|
||||
particles = csiborgtools.read.halfwidth_select(
|
||||
|
|
|
@ -58,9 +58,10 @@ def density_field(nsim, parser_args, to_save=True):
|
|||
field : 3-dimensional array
|
||||
"""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim))["particles"]
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim, "csiborg"))
|
||||
parts = parts["particles"]
|
||||
gen = csiborgtools.field.DensityField(box, parser_args.MAS)
|
||||
|
||||
if parser_args.kind == "density":
|
||||
|
@ -114,9 +115,10 @@ def velocity_field(nsim, parser_args, to_save=True):
|
|||
"Smoothed velocity field is not implemented.")
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
mpart = 1.1641532e-10 # Particle mass in CSiBORG simulations.
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim))["particles"]
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim, "csiborg"))
|
||||
parts = parts["particles"]
|
||||
|
||||
gen = csiborgtools.field.VelocityField(box, parser_args.MAS)
|
||||
field = gen(parts, parser_args.grid, mpart, verbose=parser_args.verbose)
|
||||
|
@ -152,7 +154,7 @@ def potential_field(nsim, parser_args, to_save=True):
|
|||
potential : 3-dimensional array
|
||||
"""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
|
||||
# Load the real space overdensity field
|
||||
|
@ -168,7 +170,8 @@ def potential_field(nsim, parser_args, to_save=True):
|
|||
field = gen(rho)
|
||||
|
||||
if parser_args.in_rsp:
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim))["particles"]
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim, "csiborg"))
|
||||
parts = parts["particles"]
|
||||
field = csiborgtools.field.field2rsp(*field, parts=parts, box=box,
|
||||
verbose=parser_args.verbose)
|
||||
if to_save:
|
||||
|
@ -207,7 +210,7 @@ def radvel_field(nsim, parser_args, to_save=True):
|
|||
raise NotImplementedError(
|
||||
"Smoothed radial vel. field not implemented.")
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
|
||||
vel = numpy.load(paths.field("velocity", parser_args.MAS, parser_args.grid,
|
||||
|
@ -245,7 +248,7 @@ def environment_field(nsim, parser_args, to_save=True):
|
|||
env : 3-dimensional array
|
||||
"""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
density_gen = csiborgtools.field.DensityField(box, parser_args.MAS)
|
||||
gen = csiborgtools.field.TidalTensorField(box, parser_args.MAS)
|
||||
|
@ -268,7 +271,8 @@ def environment_field(nsim, parser_args, to_save=True):
|
|||
|
||||
# Optionally drag the field to RSP.
|
||||
if parser_args.in_rsp:
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim))["particles"]
|
||||
parts = csiborgtools.read.read_h5(paths.particles(nsim, "csiborg"))
|
||||
parts = parts["particles"]
|
||||
fields = (tensor_field.T00, tensor_field.T11, tensor_field.T22,
|
||||
tensor_field.T01, tensor_field.T02, tensor_field.T12)
|
||||
|
||||
|
|
|
@ -81,8 +81,8 @@ def _main(nsim, simname, verbose):
|
|||
verbose : bool
|
||||
Verbosity flag.
|
||||
"""
|
||||
if simname == "quijote":
|
||||
raise NotImplementedError("Quijote not implemented yet.")
|
||||
# if simname == "quijote":
|
||||
# raise NotImplementedError("Quijote not implemented yet.")
|
||||
|
||||
cols = [("index", numpy.int32),
|
||||
("npart", numpy.int32),
|
||||
|
@ -95,17 +95,22 @@ def _main(nsim, simname, verbose):
|
|||
("m200c", numpy.float32),
|
||||
("lambda200c", numpy.float32),]
|
||||
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
nsnap = max(paths.get_snapshots(nsim, simname))
|
||||
if simname == "csiborg":
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
cat = csiborgtools.read.CSiBORGHaloCatalogue(
|
||||
nsim, paths, with_lagpatch=False, load_initial=False, rawdata=True,
|
||||
load_fitted=False)
|
||||
else:
|
||||
box = csiborgtools.read.QuijoteBox(nsnap, nsim, paths)
|
||||
cat = csiborgtools.read.QuijoteHaloCatalogue(
|
||||
nsim, paths, nsnap, load_initial=False, rawdata=True)
|
||||
|
||||
# Particle archive
|
||||
f = csiborgtools.read.read_h5(paths.particles(nsim))
|
||||
f = csiborgtools.read.read_h5(paths.particles(nsim, simname))
|
||||
particles = f["particles"]
|
||||
halo_map = f["halomap"]
|
||||
hid2map = {hid: i for i, hid in enumerate(halo_map[:, 0])}
|
||||
cat = csiborgtools.read.CSiBORGHaloCatalogue(
|
||||
nsim, paths, with_lagpatch=False, load_initial=False, rawdata=True,
|
||||
load_fitted=False)
|
||||
|
||||
out = csiborgtools.read.cols_to_structured(len(cat), cols)
|
||||
for i in trange(len(cat)) if verbose else range(len(cat)):
|
||||
|
@ -121,7 +126,7 @@ def _main(nsim, simname, verbose):
|
|||
for key in _out.keys():
|
||||
out[key][i] = _out[key]
|
||||
|
||||
fout = paths.structfit(nsnap, nsim)
|
||||
fout = paths.structfit(nsnap, nsim, simname)
|
||||
if verbose:
|
||||
print(f"Saving to `{fout}`.", flush=True)
|
||||
numpy.save(fout, out)
|
||||
|
|
|
@ -50,9 +50,6 @@ def _main(nsim, simname, verbose):
|
|||
verbose : bool
|
||||
Verbosity flag.
|
||||
"""
|
||||
if simname == "quijote":
|
||||
raise NotImplementedError("Quijote not implemented yet.")
|
||||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
cols = [("index", numpy.int32),
|
||||
("x", numpy.float32),
|
||||
|
@ -61,15 +58,26 @@ def _main(nsim, simname, verbose):
|
|||
("lagpatch_size", numpy.float32),
|
||||
("lagpatch_ncells", numpy.int32),]
|
||||
|
||||
parts = csiborgtools.read.read_h5(paths.initmatch(nsim, "particles"))
|
||||
fname = paths.initmatch(nsim, simname, "particles")
|
||||
parts = csiborgtools.read.read_h5(fname)
|
||||
parts = parts['particles']
|
||||
halo_map = csiborgtools.read.read_h5(paths.particles(nsim))
|
||||
halo_map = csiborgtools.read.read_h5(paths.particles(nsim, simname))
|
||||
halo_map = halo_map["halomap"]
|
||||
|
||||
cat = csiborgtools.read.CSiBORGHaloCatalogue(
|
||||
nsim, paths, rawdata=True, load_fitted=False, load_initial=False)
|
||||
if simname == "csiborg":
|
||||
cat = csiborgtools.read.CSiBORGHaloCatalogue(
|
||||
nsim, paths, rawdata=True, load_fitted=False, load_initial=False)
|
||||
else:
|
||||
cat = csiborgtools.read.QuijoteHaloCatalogue(nsim, paths, nsnap=4)
|
||||
hid2map = {hid: i for i, hid in enumerate(halo_map[:, 0])}
|
||||
|
||||
# Initialise the overlapper.
|
||||
if simname == "csiborg":
|
||||
kwargs = {"box_size": 2048, "bckg_halfsize": 475}
|
||||
else:
|
||||
kwargs = {"box_size": 512, "bckg_halfsize": 256}
|
||||
overlapper = csiborgtools.match.ParticleOverlap(**kwargs)
|
||||
|
||||
out = csiborgtools.read.cols_to_structured(len(cat), cols)
|
||||
for i, hid in enumerate(tqdm(cat["index"]) if verbose else cat["index"]):
|
||||
out["index"][i] = hid
|
||||
|
@ -88,12 +96,11 @@ def _main(nsim, simname, verbose):
|
|||
out["lagpatch_size"][i] = numpy.percentile(distances, 99)
|
||||
|
||||
# Calculate the number of cells with > 0 density.
|
||||
overlapper = csiborgtools.match.ParticleOverlap()
|
||||
delta = overlapper.make_delta(pos, mass, subbox=True)
|
||||
out["lagpatch_ncells"][i] = csiborgtools.fits.delta2ncells(delta)
|
||||
|
||||
# Now save it
|
||||
fout = paths.initmatch(nsim, "fit")
|
||||
fout = paths.initmatch(nsim, simname, "fit")
|
||||
if verbose:
|
||||
print(f"{datetime.now()}: dumping fits to .. `{fout}`.", flush=True)
|
||||
with open(fout, "wb") as f:
|
||||
|
|
|
@ -30,12 +30,15 @@ except ModuleNotFoundError:
|
|||
|
||||
|
||||
def pair_match(nsim0, nsimx, sigma, smoothen, verbose):
|
||||
# TODO fix this.
|
||||
simname = "csiborg"
|
||||
overlapper_kwargs = {"box_size": 512, "bckg_halfsize": 475}
|
||||
from csiborgtools.read import CSiBORGHaloCatalogue, read_h5
|
||||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
smooth_kwargs = {"sigma": sigma, "mode": "constant", "cval": 0.0}
|
||||
overlapper = csiborgtools.match.ParticleOverlap()
|
||||
matcher = csiborgtools.match.RealisationsMatcher()
|
||||
overlapper = csiborgtools.match.ParticleOverlap(**overlapper_kwargs)
|
||||
matcher = csiborgtools.match.RealisationsMatcher(**overlapper_kwargs)
|
||||
|
||||
# Load the raw catalogues (i.e. no selection) including the initial CM
|
||||
# positions and the particle archives.
|
||||
|
@ -45,12 +48,12 @@ def pair_match(nsim0, nsimx, sigma, smoothen, verbose):
|
|||
catx = CSiBORGHaloCatalogue(nsimx, paths, load_initial=True, bounds=bounds,
|
||||
with_lagpatch=True, load_clumps_cat=True)
|
||||
|
||||
clumpmap0 = read_h5(paths.particles(nsim0))["clumpmap"]
|
||||
parts0 = read_h5(paths.initmatch(nsim0, "particles"))["particles"]
|
||||
clumpmap0 = read_h5(paths.particles(nsim0, simname))["clumpmap"]
|
||||
parts0 = read_h5(paths.initmatch(nsim0, simname, "particles"))["particles"]
|
||||
clid2map0 = {clid: i for i, clid in enumerate(clumpmap0[:, 0])}
|
||||
|
||||
clumpmapx = read_h5(paths.particles(nsimx))["clumpmap"]
|
||||
partsx = read_h5(paths.initmatch(nsimx, "particles"))["particles"]
|
||||
clumpmapx = read_h5(paths.particles(nsimx, simname))["clumpmap"]
|
||||
partsx = read_h5(paths.initmatch(nsimx, simname, "particles"))["particles"]
|
||||
clid2mapx = {clid: i for i, clid in enumerate(clumpmapx[:, 0])}
|
||||
|
||||
# We generate the background density fields. Loads halos's particles one by
|
||||
|
|
|
@ -57,7 +57,7 @@ def copy_membership(nsim, verbose=True):
|
|||
print(f"Loading from ... `{fpath}`.")
|
||||
data = numpy.genfromtxt(fpath, dtype=int)
|
||||
|
||||
fout = paths.fof_membership(nsim)
|
||||
fout = paths.fof_membership(nsim, "csiborg")
|
||||
if verbose:
|
||||
print(f"Saving to ... `{fout}`.")
|
||||
numpy.save(fout, data)
|
||||
|
@ -77,7 +77,7 @@ def copy_catalogue(nsim, verbose=True):
|
|||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
source = join("/mnt/extraspace/jeg/greenwhale/Constrained_Sims",
|
||||
f"sim_{nsim}/halo_catalog_{nsim}_FOF.txt")
|
||||
dest = paths.fof_cat(nsim)
|
||||
dest = paths.fof_cat(nsim, "csiborg")
|
||||
if verbose:
|
||||
print("Copying`{}` to `{}`.".format(source, dest))
|
||||
copy(source, dest)
|
||||
|
@ -96,14 +96,14 @@ def sort_fofid(nsim, verbose=True):
|
|||
Verbosity flag.
|
||||
"""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
fpath = paths.fof_membership(nsim)
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
fpath = paths.fof_membership(nsim, "csiborg")
|
||||
if verbose:
|
||||
print(f"{datetime.now()}: loading from ... `{fpath}`.")
|
||||
# Columns are halo ID, particle ID.
|
||||
fof = numpy.load(fpath)
|
||||
|
||||
reader = csiborgtools.read.ParticleReader(paths)
|
||||
reader = csiborgtools.read.CSiBORGReader(paths)
|
||||
pars_extract = ["x"] # Dummy variable
|
||||
__, pids = reader.read_particle(nsnap, nsim, pars_extract,
|
||||
return_structured=False, verbose=verbose)
|
||||
|
@ -123,7 +123,7 @@ def sort_fofid(nsim, verbose=True):
|
|||
hid, pid = fof[i]
|
||||
fof_hids[pids_idx[pid]] = hid
|
||||
|
||||
fout = paths.fof_membership(nsim, sorted=True)
|
||||
fout = paths.fof_membership(nsim, "csiborg", sorted=True)
|
||||
if verbose:
|
||||
print(f"Saving the sorted data to ... `{fout}`")
|
||||
numpy.save(fout, fof_hids)
|
||||
|
|
|
@ -58,10 +58,10 @@ for i, nsim in enumerate(nsims):
|
|||
if rank == 0:
|
||||
now = datetime.now()
|
||||
print(f"{now}: calculating {i}th simulation `{nsim}`.", flush=True)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
box = csiborgtools.read.CSiBORGBox(nsnap, nsim, paths)
|
||||
|
||||
f = csiborgtools.read.read_h5(paths.particles(nsim))
|
||||
f = csiborgtools.read.read_h5(paths.particles(nsim, "csiborg"))
|
||||
particles = f["particles"]
|
||||
clump_map = f["clumpmap"]
|
||||
clid2map = {clid: i for i, clid in enumerate(clump_map[:, 0])}
|
||||
|
|
|
@ -38,7 +38,7 @@ mmain_reader = csiborgtools.read.MmainReader(paths)
|
|||
|
||||
|
||||
def do_mmain(nsim):
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
nsnap = max(paths.get_snapshots(nsim, "csiborg"))
|
||||
# NOTE: currently works for highest snapshot anyway
|
||||
mmain, ultimate_parent = mmain_reader.make_mmain(nsim, verbose=False)
|
||||
numpy.savez(paths.mmain(nsnap, nsim),
|
||||
|
|
|
@ -60,6 +60,11 @@ def minmax_halo(hid, halo_ids, start_loop=0):
|
|||
return start, end
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Sorting and dumping #
|
||||
###############################################################################
|
||||
|
||||
|
||||
def main(nsim, simname, verbose):
|
||||
"""
|
||||
Read in the snapshot particles, sort them by their FoF halo ID and dump
|
||||
|
@ -81,20 +86,21 @@ def main(nsim, simname, verbose):
|
|||
None
|
||||
"""
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
partreader = csiborgtools.read.ParticleReader(paths)
|
||||
if simname == "csiborg":
|
||||
partreader = csiborgtools.read.CSiBORGReader(paths)
|
||||
else:
|
||||
partreader = csiborgtools.read.QuijoteReader(paths)
|
||||
|
||||
if simname == "quijote":
|
||||
raise NotImplementedError("Not implemented for Quijote yet.")
|
||||
|
||||
# Keep "ID" as the last column!
|
||||
pars_extract = ['x', 'y', 'z', 'vx', 'vy', 'vz', 'M', "ID"]
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
fname = paths.particles(nsim)
|
||||
nsnap = max(paths.get_snapshots(nsim, simname))
|
||||
fname = paths.particles(nsim, simname)
|
||||
# We first read in the halo IDs of the particles and infer the sorting.
|
||||
# Right away we dump the halo IDs to a HDF5 file and clear up memory.
|
||||
if verbose:
|
||||
print(f"{datetime.now()}: loading particles {nsim}.", flush=True)
|
||||
part_hids = partreader.read_fof_hids(nsim)
|
||||
print(f"{datetime.now()}: loading PIDs of IC {nsim}.", flush=True)
|
||||
part_hids = partreader.read_fof_hids(
|
||||
nsnap=nsnap, nsim=nsim, verbose=verbose)
|
||||
if verbose:
|
||||
print(f"{datetime.now()}: sorting PIDs of IC {nsim}.", flush=True)
|
||||
sort_indxs = numpy.argsort(part_hids).astype(numpy.int32)
|
||||
part_hids = part_hids[sort_indxs]
|
||||
with h5py.File(fname, "w") as f:
|
||||
|
@ -106,6 +112,10 @@ def main(nsim, simname, verbose):
|
|||
# Next we read in the particles and sort them by their halo ID.
|
||||
# We cannot directly read this as an unstructured array because the float32
|
||||
# precision is insufficient to capture the halo IDs.
|
||||
if simname == "csiborg":
|
||||
pars_extract = ['x', 'y', 'z', 'vx', 'vy', 'vz', 'M', "ID"]
|
||||
else:
|
||||
pars_extract = None
|
||||
parts, pids = partreader.read_particle(
|
||||
nsnap, nsim, pars_extract, return_structured=False, verbose=verbose)
|
||||
# Now we in two steps save the particles and particle IDs.
|
||||
|
@ -129,11 +139,11 @@ def main(nsim, simname, verbose):
|
|||
collect()
|
||||
|
||||
if verbose:
|
||||
print(f"{datetime.now()}: creating halo map for {nsim}.", flush=True)
|
||||
print(f"{datetime.now()}: creating a halo map for {nsim}.", flush=True)
|
||||
# Load clump IDs back to memory
|
||||
with h5py.File(fname, "r") as f:
|
||||
part_hids = f["halo_ids"][:]
|
||||
# We loop over the unique clump IDs.
|
||||
# We loop over the unique halo IDs.
|
||||
unique_halo_ids = numpy.unique(part_hids)
|
||||
halo_map = numpy.full((unique_halo_ids.size, 3), numpy.nan,
|
||||
dtype=numpy.int32)
|
||||
|
@ -148,7 +158,7 @@ def main(nsim, simname, verbose):
|
|||
start_loop = kf
|
||||
|
||||
# We save the mapping to a HDF5 file
|
||||
with h5py.File(paths.particles(nsim), "r+") as f:
|
||||
with h5py.File(fname, "r+") as f:
|
||||
f.create_dataset("halomap", data=halo_map)
|
||||
f.close()
|
||||
|
||||
|
|
|
@ -50,34 +50,55 @@ def _main(nsim, simname, verbose):
|
|||
verbose : bool
|
||||
Verbosity flag.
|
||||
"""
|
||||
if simname == "quijote":
|
||||
raise NotImplementedError("Quijote not implemented yet.")
|
||||
|
||||
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
|
||||
partreader = csiborgtools.read.ParticleReader(paths)
|
||||
if simname == "csiborg":
|
||||
partreader = csiborgtools.read.CSiBORGReader(paths)
|
||||
else:
|
||||
partreader = csiborgtools.read.QuijoteReader(paths)
|
||||
|
||||
if verbose:
|
||||
print(f"{datetime.now()}: reading and processing simulation {nsim}.",
|
||||
print(f"{datetime.now()}: reading and processing simulation `{nsim}`.",
|
||||
flush=True)
|
||||
# We first load the particle IDs in the final snapshot.
|
||||
pidf = csiborgtools.read.read_h5(paths.particles(nsim))
|
||||
pidf = csiborgtools.read.read_h5(paths.particles(nsim, simname))
|
||||
pidf = pidf["particle_ids"]
|
||||
# Then we load the particles in the initil snapshot and make sure that
|
||||
# their particle IDs are sorted as in the final snapshot. Again, because of
|
||||
# precision this must be read as structured.
|
||||
# NOTE: ID has to be the last column.
|
||||
pars_extract = ["x", "y", "z", "M", "ID"]
|
||||
if simname == "csiborg":
|
||||
pars_extract = ["x", "y", "z", "M", "ID"]
|
||||
# CSiBORG's initial snapshot ID
|
||||
nsnap = 1
|
||||
else:
|
||||
pars_extract = None
|
||||
# Use this to point the reader to the ICs snapshot
|
||||
nsnap = -1
|
||||
part0, pid0 = partreader.read_particle(
|
||||
1, nsim, pars_extract, return_structured=False, verbose=verbose)
|
||||
nsnap, nsim, pars_extract, return_structured=False, verbose=verbose)
|
||||
# Quijote's initial snapshot information also contains velocities but we
|
||||
# don't need those.
|
||||
if simname == "quijote":
|
||||
part0 = part0[:, [0, 1, 2, 6]]
|
||||
# In Quijote some particles are position precisely at the edge of the
|
||||
# box. Move them to be just inside.
|
||||
pos = part0[:, :3]
|
||||
mask = pos >= 1
|
||||
if numpy.any(mask):
|
||||
spacing = numpy.spacing(pos[mask])
|
||||
assert numpy.max(spacing) <= 1e-5
|
||||
pos[mask] -= spacing
|
||||
|
||||
# First enforce them to already be sorted and then apply reverse
|
||||
# sorting from the final snapshot.
|
||||
part0 = part0[numpy.argsort(pid0)]
|
||||
del pid0
|
||||
collect()
|
||||
part0 = part0[numpy.argsort(numpy.argsort(pidf))]
|
||||
fout = paths.initmatch(nsim, simname, "particles")
|
||||
if verbose:
|
||||
print(f"{datetime.now()}: dumping particles for {nsim}.", flush=True)
|
||||
with h5py.File(paths.initmatch(nsim, "particles"), "w") as f:
|
||||
print(f"{datetime.now()}: dumping particles for `{nsim}` to `{fout}`",
|
||||
flush=True)
|
||||
with h5py.File(fout, "w") as f:
|
||||
f.create_dataset("particles", data=part0)
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue