mirror of
https://github.com/Richard-Sti/csiborgtools_public.git
synced 2025-05-14 06:31:11 +00:00
Improve paths and documentation (#38)
* pep8 * style issue * Documentation edits * Remove old files * Remove more old files * Doc & stop setting snap and nsim in paths * add nsnap, nsim support * Remove blank space * docs * Docs edits * Reduce redudant code * docs * Documentation * Docs * Simplify * add nsnap nsim arguments * Remove redundant docs * Fix typos * Shorten catalogue * Remove import * Remove blank line * Docs only edits * Rearrange imports * Remove blank space * Rearrange imports * Rearrange imports * Remove blank space * Update submission scritp * Edit docs * Remove blank line * new paths * Update submission scripts * Update file handling * Remove blank line * Move things around * Ne paths * Edit submission script * edit paths * Fix typo * Fix bug * Remove import * Update nb
This commit is contained in:
parent
035d7e0071
commit
8d34b832af
32 changed files with 954 additions and 5358 deletions
|
@ -1,68 +0,0 @@
|
|||
# Copyright (C) 2022 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""A script to dump or remove files for POWMES."""
|
||||
|
||||
from os.path import join, exists
|
||||
from argparse import ArgumentParser
|
||||
import numpy
|
||||
from datetime import datetime
|
||||
from os import remove
|
||||
from mpi4py import MPI
|
||||
try:
|
||||
import csiborgtools
|
||||
except ModuleNotFoundError:
|
||||
import sys
|
||||
sys.path.append("../")
|
||||
import csiborgtools
|
||||
import utils
|
||||
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--mode", type=str, choices=["dump", "remove"])
|
||||
args = parser.parse_args()
|
||||
|
||||
F64 = numpy.float64
|
||||
I64 = numpy.int64
|
||||
|
||||
# Get MPI things
|
||||
comm = MPI.COMM_WORLD
|
||||
rank = comm.Get_rank()
|
||||
nproc = comm.Get_size()
|
||||
|
||||
|
||||
dumpdir = join(utils.dumpdir, "temp_powmes")
|
||||
fout = join(dumpdir, "out_{}_{}.ascii")
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
|
||||
|
||||
n_sims = paths.ic_ids[:1]
|
||||
for i in csiborgtools.fits.split_jobs(len(n_sims), nproc)[rank]:
|
||||
print("{}: calculating {}th simulation.".format(datetime.now(), i))
|
||||
n_sim = n_sims[i]
|
||||
n_snap = paths.get_maximum_snapshot(n_sim)
|
||||
paths.set_info(n_sim, n_snap)
|
||||
|
||||
f = fout.format(n_sim, n_snap)
|
||||
if args.mode == "dump":
|
||||
# Read the particles
|
||||
reader = csiborgtools.read.ParticleReader(paths)
|
||||
particles = reader.read_particle(["x", "y", "z", "M"])
|
||||
csiborgtools.read.make_ascii_powmes(particles, f, verbose=True)
|
||||
else:
|
||||
if exists(f):
|
||||
remove(f)
|
||||
|
||||
comm.Barrier()
|
||||
if rank == 0:
|
||||
print("All finished! See you!")
|
|
@ -1,13 +0,0 @@
|
|||
nthreads=1
|
||||
memory=75
|
||||
queue="berg"
|
||||
env="/mnt/zfsusers/rstiskalek/csiborgtools/venv_galomatch/bin/python"
|
||||
file="run_asciipos.py"
|
||||
mode="dump"
|
||||
|
||||
cm="addqueue -q $queue -n $nthreads -m $memory $env $file --mode $mode"
|
||||
|
||||
echo "Submitting:"
|
||||
echo $cm
|
||||
echo
|
||||
$cm
|
|
@ -1,71 +0,0 @@
|
|||
# Copyright (C) 2022 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
MPI script to run the CSiBORG realisations matcher.
|
||||
|
||||
TODO
|
||||
----
|
||||
- [ ] Update this script
|
||||
"""
|
||||
import numpy
|
||||
from datetime import datetime
|
||||
from mpi4py import MPI
|
||||
from os.path import join
|
||||
try:
|
||||
import csiborgtools
|
||||
except ModuleNotFoundError:
|
||||
import sys
|
||||
sys.path.append("../")
|
||||
import csiborgtools
|
||||
import utils
|
||||
|
||||
# Get MPI things
|
||||
comm = MPI.COMM_WORLD
|
||||
rank = comm.Get_rank()
|
||||
nproc = comm.Get_size()
|
||||
|
||||
# File paths
|
||||
fperm = join(utils.dumpdir, "overlap", "cross_{}.npy")
|
||||
# fperm = join(utils.dumpdir, "match", "cross_matches.npy")
|
||||
|
||||
# Set up the catalogue
|
||||
paths = csiborgtools.read.CSiBORGPaths(to_new=False)
|
||||
print("{}: started reading in the combined catalogue.".format(datetime.now()),
|
||||
flush=True)
|
||||
cat = csiborgtools.read.CombinedHaloCatalogue(
|
||||
paths, min_m500=None, max_dist=None, verbose=False)
|
||||
print("{}: finished reading in the combined catalogue with `{}`."
|
||||
.format(datetime.now(), cat.n_sims), flush=True)
|
||||
matcher = csiborgtools.match.RealisationsMatcher(cat)
|
||||
|
||||
|
||||
for i in csiborgtools.fits.split_jobs(len(cat.n_sims), nproc)[rank]:
|
||||
n = cat.n_sims[i]
|
||||
print("{}: rank {} working on simulation `{}`."
|
||||
.format(datetime.now(), rank, n), flush=True)
|
||||
out = matcher.cross_knn_position_single(
|
||||
i, nmult=15, dlogmass=2, init_dist=True, overlap=False, verbose=False,
|
||||
overlapper_kwargs={"smooth_scale": 1})
|
||||
|
||||
# Dump the result
|
||||
fout = fperm.format(n)
|
||||
print("Saving results to `{}`.".format(fout))
|
||||
with open(fout, "wb") as f:
|
||||
numpy.save(fout, out)
|
||||
|
||||
|
||||
comm.Barrier()
|
||||
if rank == 0:
|
||||
print("All finished.")
|
|
@ -1,17 +0,0 @@
|
|||
nthreads=1
|
||||
memory=32
|
||||
queue="berg"
|
||||
env="/mnt/zfsusers/rstiskalek/csiborgtools/venv_galomatch/bin/python"
|
||||
file="run_crossmatch.py"
|
||||
|
||||
pythoncm="$env $file"
|
||||
# echo "Submitting:"
|
||||
# echo $pythoncm
|
||||
# echo
|
||||
# $pythoncm
|
||||
|
||||
cm="addqueue -q $queue -n $nthreads -m $memory $pythoncm"
|
||||
echo "Submitting:"
|
||||
echo $cm
|
||||
echo
|
||||
$cm
|
|
@ -16,14 +16,14 @@
|
|||
MPI script to calculate the matter cross power spectrum between CSiBORG
|
||||
IC realisations. Units are Mpc/h.
|
||||
"""
|
||||
from gc import collect
|
||||
from argparse import ArgumentParser
|
||||
from os import remove
|
||||
from os.path import join
|
||||
from itertools import combinations
|
||||
from datetime import datetime
|
||||
import numpy
|
||||
import joblib
|
||||
from datetime import datetime
|
||||
from itertools import combinations
|
||||
from os.path import join
|
||||
from os import remove
|
||||
from gc import collect
|
||||
from mpi4py import MPI
|
||||
import Pk_library as PKL
|
||||
try:
|
||||
|
@ -32,9 +32,9 @@ except ModuleNotFoundError:
|
|||
import sys
|
||||
sys.path.append("../")
|
||||
import csiborgtools
|
||||
import utils
|
||||
|
||||
|
||||
dumpdir = "/mnt/extraspace/rstiskalek/csiborg/"
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--grid", type=int)
|
||||
parser.add_argument("--halfwidth", type=float, default=0.5)
|
||||
|
@ -47,27 +47,24 @@ nproc = comm.Get_size()
|
|||
MAS = "CIC" # mass asignment scheme
|
||||
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
ics = paths.ic_ids
|
||||
n_sims = len(ics)
|
||||
box = csiborgtools.units.BoxUnits(paths)
|
||||
reader = csiborgtools.read.ParticleReader(paths)
|
||||
ics = paths.ic_ids(tonew=False)
|
||||
nsims = len(ics)
|
||||
|
||||
# File paths
|
||||
ftemp = join(utils.dumpdir, "temp_crosspk",
|
||||
ftemp = join(dumpdir, "temp_crosspk",
|
||||
"out_{}_{}" + "_{}".format(args.halfwidth))
|
||||
fout = join(utils.dumpdir, "crosspk",
|
||||
fout = join(dumpdir, "crosspk",
|
||||
"out_{}_{}" + "_{}.p".format(args.halfwidth))
|
||||
|
||||
|
||||
jobs = csiborgtools.fits.split_jobs(n_sims, nproc)[rank]
|
||||
jobs = csiborgtools.fits.split_jobs(nsims, nproc)[rank]
|
||||
for n in jobs:
|
||||
print("Rank {}@{}: saving {}th delta.".format(rank, datetime.now(), n))
|
||||
# Set the paths
|
||||
n_sim = ics[n]
|
||||
paths.set_info(n_sim, paths.get_maximum_snapshot(n_sim))
|
||||
# Set reader and the box
|
||||
reader = csiborgtools.read.ParticleReader(paths)
|
||||
box = csiborgtools.units.BoxUnits(paths)
|
||||
# Read particles
|
||||
particles = reader.read_particle(["x", "y", "z", "M"], verbose=False)
|
||||
nsim = ics[n]
|
||||
particles = reader.read_particle(max(paths.get_snapshots(nsim)), nsim,
|
||||
["x", "y", "z", "M"], verbose=False)
|
||||
# Halfwidth -- particle selection
|
||||
if args.halfwidth < 0.5:
|
||||
particles = csiborgtools.read.halfwidth_select(
|
||||
|
@ -85,9 +82,9 @@ for n in jobs:
|
|||
collect()
|
||||
|
||||
# Dump the results
|
||||
with open(ftemp.format(n_sim, "delta") + ".npy", "wb") as f:
|
||||
with open(ftemp.format(nsim, "delta") + ".npy", "wb") as f:
|
||||
numpy.save(f, delta)
|
||||
joblib.dump([aexp, length], ftemp.format(n_sim, "lengths") + ".p")
|
||||
joblib.dump([aexp, length], ftemp.format(nsim, "lengths") + ".p")
|
||||
|
||||
# Try to clean up memory
|
||||
del delta
|
||||
|
@ -97,8 +94,8 @@ for n in jobs:
|
|||
comm.Barrier()
|
||||
|
||||
# Get off-diagonal elements and append the diagoal
|
||||
combs = [c for c in combinations(range(n_sims), 2)]
|
||||
for i in range(n_sims):
|
||||
combs = [c for c in combinations(range(nsims), 2)]
|
||||
for i in range(nsims):
|
||||
combs.append((i, i))
|
||||
prev_delta = [-1, None, None, None] # i, delta, aexp, length
|
||||
|
||||
|
|
|
@ -16,11 +16,11 @@
|
|||
MPI script to evaluate field properties at the galaxy positions.
|
||||
"""
|
||||
from argparse import ArgumentParser
|
||||
import numpy
|
||||
from datetime import datetime
|
||||
from mpi4py import MPI
|
||||
from os.path import join
|
||||
from os import remove
|
||||
from datetime import datetime
|
||||
import numpy
|
||||
from mpi4py import MPI
|
||||
try:
|
||||
import csiborgtools
|
||||
except ModuleNotFoundError:
|
||||
|
@ -29,6 +29,7 @@ except ModuleNotFoundError:
|
|||
import csiborgtools
|
||||
import utils
|
||||
|
||||
dumpdir = "/mnt/extraspace/rstiskalek/csiborg/"
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--survey", type=str, choices=["SDSS"])
|
||||
parser.add_argument("--grid", type=int)
|
||||
|
@ -52,30 +53,28 @@ pos = pos.astype(numpy.float32)
|
|||
# File paths
|
||||
fname = "out_{}_{}_{}_{}_{}".format(
|
||||
survey.name, args.grid, args.MAS, args.halfwidth, args.smooth_scale)
|
||||
ftemp = join(utils.dumpdir, "temp_fields", fname + "_{}.npy")
|
||||
fperm = join(utils.dumpdir, "fields", fname + ".npy")
|
||||
ftemp = join(dumpdir, "temp_fields", fname + "_{}.npy")
|
||||
fperm = join(dumpdir, "fields", fname + ".npy")
|
||||
|
||||
# Edit depending on what is calculated
|
||||
dtype = {"names": ["delta", "phi"], "formats": [numpy.float32] * 2}
|
||||
|
||||
# CSiBORG simulation paths
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
ics = paths.ic_ids
|
||||
n_sims = len(ics)
|
||||
ics = paths.ic_ids(tonew=False)
|
||||
nsims = len(ics)
|
||||
|
||||
for n in csiborgtools.fits.split_jobs(n_sims, nproc)[rank]:
|
||||
for n in csiborgtools.fits.split_jobs(nsims, nproc)[rank]:
|
||||
print("Rank {}@{}: working on {}th IC.".format(rank, datetime.now(), n),
|
||||
flush=True)
|
||||
# Set the paths
|
||||
n_sim = ics[n]
|
||||
paths.set_info(n_sim, paths.get_maximum_snapshot(n_sim))
|
||||
|
||||
# Set reader and the box
|
||||
nsim = ics[n]
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
reader = csiborgtools.read.ParticleReader(paths)
|
||||
box = csiborgtools.units.BoxUnits(paths)
|
||||
box = csiborgtools.units.BoxUnits(nsnap, nsim, paths)
|
||||
|
||||
# Read particles and select a subset of them
|
||||
particles = reader.read_particle(["x", "y", "z", "M"], verbose=False)
|
||||
particles = reader.read_particle(nsnap, nsim, ["x", "y", "z", "M"],
|
||||
verbose=False)
|
||||
if args.halfwidth < 0.5:
|
||||
particles = csiborgtools.read.halfwidth_select(
|
||||
args.halfwidth, particles)
|
||||
|
@ -101,7 +100,7 @@ for n in csiborgtools.fits.split_jobs(n_sims, nproc)[rank]:
|
|||
# ...
|
||||
|
||||
# Dump the results
|
||||
with open(ftemp.format(n_sim), "wb") as f:
|
||||
with open(ftemp.format(nsim), "wb") as f:
|
||||
numpy.save(f, out)
|
||||
|
||||
# Wait for all ranks to finish
|
||||
|
@ -109,16 +108,16 @@ comm.Barrier()
|
|||
if rank == 0:
|
||||
print("Collecting files...", flush=True)
|
||||
|
||||
out = numpy.full((n_sims, pos.shape[0]), numpy.nan, dtype=dtype)
|
||||
out = numpy.full((nsims, pos.shape[0]), numpy.nan, dtype=dtype)
|
||||
|
||||
for n in range(n_sims):
|
||||
n_sim = ics[n]
|
||||
with open(ftemp.format(n_sim), "rb") as f:
|
||||
for n in range(nsims):
|
||||
nsim = ics[n]
|
||||
with open(ftemp.format(nsim), "rb") as f:
|
||||
fin = numpy.load(f, allow_pickle=True)
|
||||
for name in dtype["names"]:
|
||||
out[name][n, ...] = fin[name]
|
||||
# Remove the temporary file
|
||||
remove(ftemp.format(n_sim))
|
||||
remove(ftemp.format(nsim))
|
||||
|
||||
print("Saving results to `{}`.".format(fperm), flush=True)
|
||||
with open(fperm, "wb") as f:
|
||||
|
|
|
@ -14,12 +14,11 @@
|
|||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
A script to fit halos (concentration, ...). The particle array of each CSiBORG
|
||||
realisation must have been split in advance by `run_split_halos`.
|
||||
realisation must have been split in advance by `runsplit_halos`.
|
||||
"""
|
||||
|
||||
import numpy
|
||||
from datetime import datetime
|
||||
from os.path import join
|
||||
from datetime import datetime
|
||||
import numpy
|
||||
from mpi4py import MPI
|
||||
try:
|
||||
import csiborgtools
|
||||
|
@ -29,47 +28,48 @@ except ModuleNotFoundError:
|
|||
import csiborgtools
|
||||
import utils
|
||||
|
||||
F64 = numpy.float64
|
||||
I64 = numpy.int64
|
||||
|
||||
|
||||
# Get MPI things
|
||||
comm = MPI.COMM_WORLD
|
||||
rank = comm.Get_rank()
|
||||
nproc = comm.Get_size()
|
||||
|
||||
|
||||
dumpdir = utils.dumpdir
|
||||
loaddir = join(utils.dumpdir, "temp")
|
||||
cols_collect = [("npart", I64), ("totpartmass", F64), ("Rs", F64),
|
||||
("vx", F64), ("vy", F64), ("vz", F64),
|
||||
("Lx", F64), ("Ly", F64), ("Lz", F64),
|
||||
("rho0", F64), ("conc", F64), ("rmin", F64),
|
||||
("rmax", F64), ("r200", F64), ("r500", F64),
|
||||
("m200", F64), ("m500", F64), ("lambda200c", F64)]
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
dumpdir = "/mnt/extraspace/rstiskalek/csiborg/"
|
||||
loaddir = join(dumpdir, "temp")
|
||||
cols_collect = [("npart", numpy.int64), ("totpartmass", numpy.float64),
|
||||
("Rs", numpy.float64), ("vx", numpy.float64),
|
||||
("vy", numpy.float64), ("vz", numpy.float64),
|
||||
("Lx", numpy.float64), ("Ly", numpy.float64),
|
||||
("Lz", numpy.float64), ("rho0", numpy.float64),
|
||||
("conc", numpy.float64), ("rmin", numpy.float64),
|
||||
("rmax", numpy.float64), ("r200", numpy.float64),
|
||||
("r500", numpy.float64), ("m200", numpy.float64),
|
||||
("m500", numpy.float64), ("lambda200c", numpy.float64)]
|
||||
|
||||
for i, n_sim in enumerate(paths.ic_ids):
|
||||
|
||||
for i, nsim in enumerate(paths.ic_ids(tonew=False)):
|
||||
if rank == 0:
|
||||
print("{}: calculating {}th simulation.".format(datetime.now(), i))
|
||||
# Correctly set the paths!
|
||||
n_snap = paths.get_maximum_snapshot(n_sim)
|
||||
paths.set_info(n_sim, n_snap)
|
||||
|
||||
box = csiborgtools.units.BoxUnits(paths)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
box = csiborgtools.units.BoxUnits(nsnap, nsim, paths)
|
||||
|
||||
jobs = csiborgtools.fits.split_jobs(utils.Nsplits, nproc)[rank]
|
||||
for n_split in jobs:
|
||||
for nsplit in jobs:
|
||||
parts, part_clumps, clumps = csiborgtools.fits.load_split_particles(
|
||||
n_split, paths, remove_split=False)
|
||||
nsplit, nsnap, nsim, paths, remove_split=False)
|
||||
|
||||
N = clumps.size
|
||||
cols = [("index", I64), ("npart", I64), ("totpartmass", F64),
|
||||
("Rs", F64), ("rho0", F64), ("conc", F64), ("lambda200c", F64),
|
||||
("vx", F64), ("vy", F64), ("vz", F64),
|
||||
("Lx", F64), ("Ly", F64), ("Lz", F64),
|
||||
("rmin", F64), ("rmax", F64),
|
||||
("r200", F64), ("r500", F64), ("m200", F64), ("m500", F64)]
|
||||
cols = [("index", numpy.int64), ("npart", numpy.int64),
|
||||
("totpartmass", numpy.float64), ("Rs", numpy.float64),
|
||||
("rho0", numpy.float64), ("conc", numpy.float64),
|
||||
("lambda200c", numpy.float64), ("vx", numpy.float64),
|
||||
("vy", numpy.float64), ("vz", numpy.float64),
|
||||
("Lx", numpy.float64), ("Ly", numpy.float64),
|
||||
("Lz", numpy.float64), ("rmin", numpy.float64),
|
||||
("rmax", numpy.float64), ("r200", numpy.float64),
|
||||
("r500", numpy.float64), ("m200", numpy.float64),
|
||||
("m500", numpy.float64)]
|
||||
out = csiborgtools.utils.cols_to_structured(N, cols)
|
||||
out["index"] = clumps["index"]
|
||||
|
||||
|
@ -106,7 +106,7 @@ for i, n_sim in enumerate(paths.ic_ids):
|
|||
out["rho0"][n] = nfwpost.rho0_from_Rs(Rs)
|
||||
out["conc"][n] = out["r200"][n] / Rs
|
||||
|
||||
csiborgtools.read.dump_split(out, n_split, paths)
|
||||
csiborgtools.read.dump_split(out, nsplit, nsnap, nsim, paths)
|
||||
|
||||
# Wait until all jobs finished before moving to another simulation
|
||||
comm.Barrier()
|
||||
|
@ -116,11 +116,10 @@ for i, n_sim in enumerate(paths.ic_ids):
|
|||
print("Collecting results!")
|
||||
partreader = csiborgtools.read.ParticleReader(paths)
|
||||
out_collected = csiborgtools.read.combine_splits(
|
||||
utils.Nsplits, partreader, cols_collect, remove_splits=True,
|
||||
verbose=False)
|
||||
utils.Nsplits, nsnap, nsim, partreader, cols_collect,
|
||||
remove_splits=True, verbose=False)
|
||||
fname = join(paths.dumpdir, "ramses_out_{}_{}.npy"
|
||||
.format(str(paths.n_sim).zfill(5),
|
||||
str(paths.n_snap).zfill(5)))
|
||||
.format(str(nsim).zfill(5), str(nsnap).zfill(5)))
|
||||
print("Saving results to `{}`.".format(fname))
|
||||
numpy.save(fname, out_collected)
|
||||
|
||||
|
|
|
@ -19,14 +19,14 @@ are grouped in a clump at present redshift.
|
|||
Optionally also dumps the clumps information, however watch out as this will
|
||||
eat up a lot of memory.
|
||||
"""
|
||||
import numpy
|
||||
from argparse import ArgumentParser
|
||||
from distutils.util import strtobool
|
||||
from datetime import datetime
|
||||
from mpi4py import MPI
|
||||
from gc import collect
|
||||
from os.path import join
|
||||
from os import remove
|
||||
from gc import collect
|
||||
from argparse import ArgumentParser
|
||||
from datetime import datetime
|
||||
from distutils.util import strtobool
|
||||
import numpy
|
||||
from mpi4py import MPI
|
||||
try:
|
||||
import csiborgtools
|
||||
except ModuleNotFoundError:
|
||||
|
@ -44,9 +44,8 @@ parser = ArgumentParser()
|
|||
parser.add_argument("--dump_clumps", type=lambda x: bool(strtobool(x)))
|
||||
args = parser.parse_args()
|
||||
|
||||
init_paths = csiborgtools.read.CSiBORGPaths(to_new=True)
|
||||
fin_paths = csiborgtools.read.CSiBORGPaths(to_new=False)
|
||||
nsims = init_paths.ic_ids
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
nsims = paths.ic_ids(tonew=True)
|
||||
|
||||
# Output files
|
||||
dumpdir = "/mnt/extraspace/rstiskalek/csiborg/"
|
||||
|
@ -58,22 +57,18 @@ for nsim in nsims:
|
|||
if rank == 0:
|
||||
print("{}: reading simulation {}.".format(datetime.now(), nsim),
|
||||
flush=True)
|
||||
|
||||
# Set the snapshot numbers
|
||||
init_paths.set_info(nsim, init_paths.get_minimum_snapshot(nsim))
|
||||
fin_paths.set_info(nsim, fin_paths.get_maximum_snapshot(nsim))
|
||||
# Set the readers
|
||||
init_reader = csiborgtools.read.ParticleReader(init_paths)
|
||||
fin_reader = csiborgtools.read.ParticleReader(fin_paths)
|
||||
nsnap_min = min(paths.get_snapshots(nsim))
|
||||
nsnap_max = max(paths.get_snapshots(nsim))
|
||||
reader = csiborgtools.read.ParticleReader(paths)
|
||||
|
||||
# Read and sort the initial particle files by their particle IDs
|
||||
part0 = init_reader.read_particle(["x", "y", "z", "M", "ID"],
|
||||
verbose=False)
|
||||
part0 = reader.read_particle(nsnap_min, nsim, ["x", "y", "z", "M", "ID"],
|
||||
verbose=False)
|
||||
part0 = part0[numpy.argsort(part0["ID"])]
|
||||
|
||||
# Order the final snapshot clump IDs by the particle IDs
|
||||
pid = fin_reader.read_particle(["ID"], verbose=False)["ID"]
|
||||
clump_ids = fin_reader.read_clumpid(verbose=False)
|
||||
pid = reader.read_particle(nsnap_max, nsim, ["ID"], verbose=False)["ID"]
|
||||
clump_ids = reader.read_clumpid(nsnap_max, nsim, verbose=False)
|
||||
clump_ids = clump_ids[numpy.argsort(pid)]
|
||||
|
||||
del pid
|
||||
|
|
|
@ -62,6 +62,7 @@ ics = [7444, 7468, 7492, 7516, 7540, 7564, 7588, 7612, 7636, 7660, 7684,
|
|||
dumpdir = "/mnt/extraspace/rstiskalek/csiborg/knn"
|
||||
fout_auto = join(dumpdir, "auto", "knncdf_{}.p")
|
||||
fout_cross = join(dumpdir, "cross", "knncdf_{}_{}.p")
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
|
||||
|
||||
###############################################################################
|
||||
|
@ -72,11 +73,11 @@ knncdf = csiborgtools.match.kNN_CDF()
|
|||
|
||||
def do_auto(ic):
|
||||
out = {}
|
||||
cat = csiborgtools.read.HaloCatalogue(ic, max_dist=Rmax)
|
||||
cat = csiborgtools.read.HaloCatalogue(ic, paths, max_dist=Rmax)
|
||||
|
||||
for i, mmin in enumerate(mass_threshold):
|
||||
knn = NearestNeighbors()
|
||||
knn.fit(cat.positions[cat["totpartmass"] > mmin, ...])
|
||||
knn.fit(cat.positions(False)[cat["totpartmass"] > mmin, ...])
|
||||
|
||||
rs, cdf = knncdf(knn, nneighbours=args.nneighbours, Rmax=Rmax,
|
||||
rmin=args.rmin, rmax=args.rmax, nsamples=args.nsamples,
|
||||
|
@ -90,15 +91,15 @@ def do_auto(ic):
|
|||
|
||||
def do_cross(ics):
|
||||
out = {}
|
||||
cat1 = csiborgtools.read.HaloCatalogue(ics[0], max_dist=Rmax)
|
||||
cat2 = csiborgtools.read.HaloCatalogue(ics[1], max_dist=Rmax)
|
||||
cat1 = csiborgtools.read.HaloCatalogue(ics[0], paths, max_dist=Rmax)
|
||||
cat2 = csiborgtools.read.HaloCatalogue(ics[1], paths, max_dist=Rmax)
|
||||
|
||||
for i, mmin in enumerate(mass_threshold):
|
||||
knn1 = NearestNeighbors()
|
||||
knn1.fit(cat1.positions[cat1["totpartmass"] > mmin, ...])
|
||||
knn1.fit(cat1.positions()[cat1["totpartmass"] > mmin, ...])
|
||||
|
||||
knn2 = NearestNeighbors()
|
||||
knn2.fit(cat2.positions[cat2["totpartmass"] > mmin, ...])
|
||||
knn2.fit(cat2.positions()[cat2["totpartmass"] > mmin, ...])
|
||||
|
||||
rs, cdf0, cdf1, joint_cdf = knncdf.joint(
|
||||
knn1, knn2, nneighbours=args.nneighbours, Rmax=Rmax,
|
||||
|
|
|
@ -35,6 +35,7 @@ parser.add_argument("--sigma", type=float)
|
|||
args = parser.parse_args()
|
||||
|
||||
# File paths
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
fout = join(utils.dumpdir, "overlap",
|
||||
"cross_{}_{}.npz".format(args.nsim0, args.nsimx))
|
||||
smooth_kwargs = {"sigma": args.sigma, "mode": "constant", "cval": 0.0}
|
||||
|
@ -43,18 +44,18 @@ overlapper = csiborgtools.match.ParticleOverlap()
|
|||
# Load catalogues
|
||||
print("{}: loading catalogues {} and {}."
|
||||
.format(datetime.now(), args.nsim0, args.nsimx), flush=True)
|
||||
cat0 = csiborgtools.read.HaloCatalogue(args.nsim0)
|
||||
catx = csiborgtools.read.HaloCatalogue(args.nsimx)
|
||||
cat0 = csiborgtools.read.HaloCatalogue(args.nsim0, paths)
|
||||
catx = csiborgtools.read.HaloCatalogue(args.nsimx, paths)
|
||||
|
||||
|
||||
print("{}: loading simulation {} and converting positions to cell numbers."
|
||||
.format(datetime.now(), args.nsim0), flush=True)
|
||||
with open(cat0.paths.clump0_path(args.nsim0), "rb") as f:
|
||||
with open(paths.clump0_path(args.nsim0), "rb") as f:
|
||||
clumps0 = numpy.load(f, allow_pickle=True)
|
||||
overlapper.clumps_pos2cell(clumps0)
|
||||
print("{}: loading simulation {} and converting positions to cell numbers."
|
||||
.format(datetime.now(), args.nsimx), flush=True)
|
||||
with open(catx.paths.clump0_path(args.nsimx), 'rb') as f:
|
||||
with open(paths.clump0_path(args.nsimx), 'rb') as f:
|
||||
clumpsx = numpy.load(f, allow_pickle=True)
|
||||
overlapper.clumps_pos2cell(clumpsx)
|
||||
|
||||
|
|
|
@ -15,9 +15,8 @@
|
|||
"""
|
||||
Script to split particles into smaller files according to their clump
|
||||
membership for faster manipulation. Currently does this for the maximum
|
||||
snapshot of each simulation. Running this will require a lot of memory.
|
||||
snapshot of each simulation. Running this requires a lot of memory.
|
||||
"""
|
||||
|
||||
from mpi4py import MPI
|
||||
from datetime import datetime
|
||||
try:
|
||||
|
@ -34,27 +33,26 @@ rank = comm.Get_rank()
|
|||
nproc = comm.Get_size()
|
||||
|
||||
paths = csiborgtools.read.CSiBORGPaths()
|
||||
n_sims = paths.ic_ids[:1]
|
||||
sims = paths.ic_ids(False)
|
||||
partcols = ["x", "y", "z", "vx", "vy", "vz", "M", "level"]
|
||||
|
||||
jobs = csiborgtools.fits.split_jobs(len(n_sims), nproc)[rank]
|
||||
jobs = csiborgtools.fits.split_jobs(len(sims), nproc)[rank]
|
||||
for icount, sim_index in enumerate(jobs):
|
||||
print("{}: rank {} working {} / {} jobs.".format(datetime.now(), rank,
|
||||
icount + 1, len(jobs)))
|
||||
n_sim = n_sims[sim_index]
|
||||
n_snap = paths.get_maximum_snapshot(n_sim)
|
||||
# Set paths and inifitalise a particle reader
|
||||
paths.set_info(n_sim, n_snap)
|
||||
print("{}: rank {} working {} / {} jobs."
|
||||
.format(datetime.now(), rank, icount + 1, len(jobs)), flush=True)
|
||||
nsim = sims[sim_index]
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
partreader = csiborgtools.read.ParticleReader(paths)
|
||||
# Load the clumps, particles' clump IDs and particles.
|
||||
clumps = partreader.read_clumps()
|
||||
particle_clumps = partreader.read_clumpid(verbose=False)
|
||||
particles = partreader.read_particle(partcols, verbose=False)
|
||||
clumps = partreader.read_clumps(nsnap, nsim)
|
||||
particle_clumps = partreader.read_clumpid(nsnap, nsim, verbose=False)
|
||||
particles = partreader.read_particle(nsnap, nsim, partcols, verbose=False)
|
||||
# Drop all particles whose clump index is 0 (not assigned to any halo)
|
||||
particle_clumps, particles = partreader.drop_zero_indx(
|
||||
particle_clumps, particles)
|
||||
# Dump it!
|
||||
csiborgtools.fits.dump_split_particles(particles, particle_clumps, clumps,
|
||||
utils.Nsplits, paths, verbose=False)
|
||||
utils.Nsplits, nsnap, nsim, paths,
|
||||
verbose=False)
|
||||
|
||||
print("All finished!")
|
||||
print("All finished!", flush=True)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue