mirror of
https://github.com/Richard-Sti/csiborgtools.git
synced 2024-12-22 12:28:03 +00:00
Dump radial profile information (#48)
* add radial position path * pep8 * Add basic fit profile dumping * pep8 * pep8 * pep8 * pep8 * pep8 * pep8 * Update TODO * Fix parts is None bug * Update nb
This commit is contained in:
parent
1a115f481d
commit
f48eb6dcb0
29 changed files with 512 additions and 395 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,4 +1,4 @@
|
|||
venv_galomatch/
|
||||
venv_csiborg/
|
||||
share/
|
||||
bin/
|
||||
.bashrc
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
from csiborgtools import clustering, field, fits, match, read # noqa
|
||||
|
||||
# Arguments to csiborgtools.read.CSiBORGPaths.
|
||||
paths_glamdring = {
|
||||
"srcdir": "/mnt/extraspace/hdesmond/",
|
||||
"postdir": "/mnt/extraspace/rstiskalek/csiborg/"
|
||||
}
|
||||
paths_glamdring = {"srcdir": "/mnt/extraspace/hdesmond/",
|
||||
"postdir": "/mnt/extraspace/rstiskalek/csiborg/",
|
||||
}
|
||||
|
|
|
@ -28,4 +28,4 @@ try:
|
|||
|
||||
from .tpcf import Mock2PCF # noqa
|
||||
except ImportError:
|
||||
warn("`Corrfunc` not installed. 2PCF modules will not be available (`Mock2PCF`).") # noqa
|
||||
warn("`Corrfunc` not installed. 2PCF modules will not be available .") # noqa
|
||||
|
|
|
@ -66,7 +66,7 @@ class RVSinsphere(BaseRVS):
|
|||
def __call__(self, nsamples, random_state=42, dtype=numpy.float32):
|
||||
gen = numpy.random.default_rng(random_state)
|
||||
# Spherical
|
||||
r = gen.random(nsamples, dtype=dtype)**(1/3) * self.R
|
||||
r = gen.random(nsamples, dtype=dtype)**(1 / 3) * self.R
|
||||
theta = 2 * numpy.arcsin(gen.random(nsamples, dtype=dtype))
|
||||
phi = 2 * numpy.pi * gen.random(nsamples, dtype=dtype)
|
||||
# Cartesian
|
||||
|
|
|
@ -105,7 +105,7 @@ class DensityField:
|
|||
@box.setter
|
||||
def box(self, box):
|
||||
try:
|
||||
assert box._name == "box_units"
|
||||
assert box._name == "box_units"
|
||||
self._box = box
|
||||
except AttributeError as err:
|
||||
raise TypeError from err
|
||||
|
|
|
@ -40,7 +40,7 @@ class BaseStructure(ABC):
|
|||
|
||||
@particles.setter
|
||||
def particles(self, particles):
|
||||
pars = ["x", "y", "z", "vx", "vy", "vz", "M"]
|
||||
pars = ["x", "y", "z", "M"]
|
||||
assert all(p in particles.dtype.names for p in pars)
|
||||
self._particles = particles
|
||||
|
||||
|
@ -204,11 +204,12 @@ class BaseStructure(ABC):
|
|||
return numpy.nan
|
||||
mass = self.enclosed_mass(radius)
|
||||
V = numpy.sqrt(self.box.box_G * mass / radius)
|
||||
return numpy.linalg.norm(self.angular_momentum(radius)) / (
|
||||
numpy.sqrt(2) * mass * V * radius
|
||||
)
|
||||
out = numpy.linalg.norm(self.angular_momentum(radius))
|
||||
out /= numpy.sqrt(2) * mass * V * radius
|
||||
return out
|
||||
|
||||
def spherical_overdensity_mass(self, delta_mult, npart_min=10, kind="crit"):
|
||||
def spherical_overdensity_mass(self, delta_mult, npart_min=10,
|
||||
kind="crit"):
|
||||
r"""
|
||||
Calculate spherical overdensity mass and radius. The mass is defined as
|
||||
the enclosed mass within an outermost radius where the mean enclosed
|
||||
|
|
|
@ -364,9 +364,7 @@ class NFWPosterior(NFWProfile):
|
|||
if not (numpy.all(numpy.isfinite(bounds)) and bounds[0] < bounds[1]):
|
||||
return numpy.nan, numpy.nan
|
||||
|
||||
res = minimize_scalar(
|
||||
loss, bounds=(numpy.log10(rmin), numpy.log10(rmax)), method="bounded"
|
||||
)
|
||||
res = minimize_scalar(loss, bounds=bounds, method="bounded")
|
||||
# Check whether the fit converged to radius sufficienly far from `rmax`
|
||||
# and that its a success. Otherwise return NaNs.
|
||||
if numpy.log10(rmax) - res.x < eps:
|
||||
|
|
|
@ -105,13 +105,13 @@ class RealisationsMatcher:
|
|||
"""
|
||||
return self._overlapper
|
||||
|
||||
def cross(
|
||||
self, cat0, catx, halos0_archive, halosx_archive, delta_bckg, verbose=True
|
||||
):
|
||||
def cross(self, cat0, catx, halos0_archive, halosx_archive, delta_bckg,
|
||||
verbose=True):
|
||||
r"""
|
||||
Find all neighbours whose CM separation is less than `nmult` times the
|
||||
sum of their initial Lagrangian patch sizes and calculate their overlap.
|
||||
Enforces that the neighbours' are similar in mass up to `dlogmass` dex.
|
||||
sum of their initial Lagrangian patch sizes and calculate their
|
||||
overlap. Enforces that the neighbours' are similar in mass up to
|
||||
`dlogmass` dex.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
@ -121,12 +121,12 @@ class RealisationsMatcher:
|
|||
Halo catalogue of the cross simulation.
|
||||
halos0_archive : `NpzFile` object
|
||||
Archive of halos' particles of the reference simulation, keys must
|
||||
include `x`, `y`, `z` and `M`. The positions must already be converted
|
||||
to cell numbers.
|
||||
include `x`, `y`, `z` and `M`. The positions must already be
|
||||
converted to cell numbers.
|
||||
halosx_archive : `NpzFile` object
|
||||
Archive of halos' particles of the cross simulation, keys must
|
||||
include `x`, `y`, `z` and `M`. The positions must already be converted
|
||||
to cell numbers.
|
||||
include `x`, `y`, `z` and `M`. The positions must already be
|
||||
converted to cell numbers.
|
||||
delta_bckg : 3-dimensional array
|
||||
Summed background density field of the reference and cross
|
||||
simulations calculated with particles assigned to halos at the
|
||||
|
@ -138,25 +138,23 @@ class RealisationsMatcher:
|
|||
Returns
|
||||
-------
|
||||
match_indxs : 1-dimensional array of arrays
|
||||
The outer array corresponds to halos in the reference catalogue, the
|
||||
inner array corresponds to the array positions of matches in the cross
|
||||
catalogue.
|
||||
The outer array corresponds to halos in the reference catalogue,
|
||||
the inner array corresponds to the array positions of matches in
|
||||
the cross catalogue.
|
||||
overlaps : 1-dimensional array of arrays
|
||||
Overlaps with the cross catalogue. Follows similar pattern as `match_indxs`.
|
||||
Overlaps with the cross catalogue. Follows similar pattern as
|
||||
`match_indxs`.
|
||||
"""
|
||||
# We begin by querying the kNN for the nearest neighbours of each halo
|
||||
# in the reference simulation from the cross simulation in the initial
|
||||
# snapshot.
|
||||
verbose and print("{}: querying the KNN.".format(datetime.now()), flush=True)
|
||||
if verbose:
|
||||
now = datetime.now()
|
||||
print(f"{now}: querying the KNN.", flush=True)
|
||||
match_indxs = radius_neighbours(
|
||||
catx.knn(select_initial=True),
|
||||
cat0.positions(in_initial=True),
|
||||
radiusX=cat0["lagpatch"],
|
||||
radiusKNN=catx["lagpatch"],
|
||||
nmult=self.nmult,
|
||||
enforce_int32=True,
|
||||
verbose=verbose,
|
||||
)
|
||||
catx.knn(select_initial=True), cat0.positions(in_initial=True),
|
||||
radiusX=cat0["lagpatch"], radiusKNN=catx["lagpatch"],
|
||||
nmult=self.nmult, enforce_int32=True, verbose=verbose)
|
||||
# We next remove neighbours whose mass is too large/small.
|
||||
if self.dlogmass is not None:
|
||||
for i, indx in enumerate(match_indxs):
|
||||
|
@ -165,34 +163,35 @@ class RealisationsMatcher:
|
|||
aratio = numpy.abs(numpy.log10(catx[p][indx] / cat0[p][i]))
|
||||
match_indxs[i] = match_indxs[i][aratio < self.dlogmass]
|
||||
|
||||
# We will make a dictionary to keep in memory the halos' particles from the
|
||||
# cross simulations so that they are not loaded in several times and we only
|
||||
# convert their positions to cells once. Possibly make an option to not do
|
||||
# this to lower memory requirements?
|
||||
# We will make a dictionary to keep in memory the halos' particles from
|
||||
# the cross simulations so that they are not loaded in several times
|
||||
# and we only convert their positions to cells once. Possibly make an
|
||||
# option to not do this to lower memory requirements?
|
||||
cross_halos = {}
|
||||
cross_lims = {}
|
||||
cross = [numpy.asanyarray([], dtype=numpy.float32)] * match_indxs.size
|
||||
for i, k0 in enumerate(tqdm(cat0["index"]) if verbose else cat0["index"]):
|
||||
indxs = cat0["index"]
|
||||
for i, k0 in enumerate(tqdm(indxs) if verbose else indxs):
|
||||
# If we have no matches continue to the next halo.
|
||||
matches = match_indxs[i]
|
||||
if matches.size == 0:
|
||||
continue
|
||||
# Next, we find this halo's particles, total mass and minimum/maximum cells
|
||||
# and convert positions to cells.
|
||||
# Next, we find this halo's particles, total mass, minimum and
|
||||
# maximum cells and convert positions to cells.
|
||||
halo0 = halos0_archive[str(k0)]
|
||||
mass0 = numpy.sum(halo0["M"])
|
||||
mins0, maxs0 = get_halolims(
|
||||
halo0, ncells=self.overlapper.inv_clength, nshift=self.overlapper.nshift
|
||||
)
|
||||
mins0, maxs0 = get_halolims(halo0,
|
||||
ncells=self.overlapper.inv_clength,
|
||||
nshift=self.overlapper.nshift)
|
||||
for p in ("x", "y", "z"):
|
||||
halo0[p] = self.overlapper.pos2cell(halo0[p])
|
||||
# We now loop over matches of this halo and calculate their overlap,
|
||||
# storing them in `_cross`.
|
||||
# We now loop over matches of this halo and calculate their
|
||||
# overlap, storing them in `_cross`.
|
||||
_cross = numpy.full(matches.size, numpy.nan, dtype=numpy.float32)
|
||||
for j, kf in enumerate(catx["index"][matches]):
|
||||
# Attempt to load this cross halo from memory, if it fails get it from
|
||||
# from the halo archive (and similarly for the limits) and convert the
|
||||
# particle positions to cells.
|
||||
# Attempt to load this cross halo from memory, if it fails get
|
||||
# it from from the halo archive (and similarly for the limits)
|
||||
# and convert the particle positions to cells.
|
||||
try:
|
||||
halox = cross_halos[kf]
|
||||
minsx, maxsx = cross_lims[kf]
|
||||
|
@ -208,17 +207,9 @@ class RealisationsMatcher:
|
|||
cross_halos[kf] = halox
|
||||
cross_lims[kf] = (minsx, maxsx)
|
||||
massx = numpy.sum(halox["M"])
|
||||
_cross[j] = self.overlapper(
|
||||
halo0,
|
||||
halox,
|
||||
delta_bckg,
|
||||
mins0,
|
||||
maxs0,
|
||||
minsx,
|
||||
maxsx,
|
||||
mass1=mass0,
|
||||
mass2=massx,
|
||||
)
|
||||
_cross[j] = self.overlapper(halo0, halox, delta_bckg, mins0,
|
||||
maxs0, minsx, maxsx, mass1=mass0,
|
||||
mass2=massx)
|
||||
cross[i] = _cross
|
||||
|
||||
# We remove all matches that have zero overlap to save space.
|
||||
|
@ -233,17 +224,8 @@ class RealisationsMatcher:
|
|||
cross = numpy.asanyarray(cross, dtype=object)
|
||||
return match_indxs, cross
|
||||
|
||||
def smoothed_cross(
|
||||
self,
|
||||
cat0,
|
||||
catx,
|
||||
halos0_archive,
|
||||
halosx_archive,
|
||||
delta_bckg,
|
||||
match_indxs,
|
||||
smooth_kwargs,
|
||||
verbose=True,
|
||||
):
|
||||
def smoothed_cross(self, cat0, catx, halos0_archive, halosx_archive,
|
||||
delta_bckg, match_indxs, smooth_kwargs, verbose=True):
|
||||
r"""
|
||||
Calculate the smoothed overlaps for pair previously identified via
|
||||
`self.cross(...)` to have a non-zero overlap.
|
||||
|
@ -256,12 +238,12 @@ class RealisationsMatcher:
|
|||
Halo catalogue of the cross simulation.
|
||||
halos0_archive : `NpzFile` object
|
||||
Archive of halos' particles of the reference simulation, keys must
|
||||
include `x`, `y`, `z` and `M`. The positions must already be converted
|
||||
to cell numbers.
|
||||
include `x`, `y`, `z` and `M`. The positions must already be
|
||||
converted to cell numbers.
|
||||
halosx_archive : `NpzFile` object
|
||||
Archive of halos' particles of the cross simulation, keys must
|
||||
include `x`, `y`, `z` and `M`. The positions must already be converted
|
||||
to cell numbers.
|
||||
include `x`, `y`, `z` and `M`. The positions must already be
|
||||
converted to cell numbers.
|
||||
delta_bckg : 3-dimensional array
|
||||
Smoothed summed background density field of the reference and cross
|
||||
simulations calculated with particles assigned to halos at the
|
||||
|
@ -287,40 +269,32 @@ class RealisationsMatcher:
|
|||
cross_lims = {}
|
||||
cross = [numpy.asanyarray([], dtype=numpy.float32)] * match_indxs.size
|
||||
|
||||
for i, k0 in enumerate(tqdm(cat0["index"]) if verbose else cat0["index"]):
|
||||
indxs = cat0["index"]
|
||||
for i, k0 in enumerate(tqdm(indxs) if verbose else indxs):
|
||||
halo0 = halos0_archive[str(k0)]
|
||||
mins0, maxs0 = get_halolims(
|
||||
halo0, ncells=self.overlapper.inv_clength, nshift=self.overlapper.nshift
|
||||
)
|
||||
mins0, maxs0 = get_halolims(halo0,
|
||||
ncells=self.overlapper.inv_clength,
|
||||
nshift=self.overlapper.nshift)
|
||||
|
||||
# Now loop over the matches and calculate the smoothed overlap.
|
||||
_cross = numpy.full(match_indxs[i].size, numpy.nan, numpy.float32)
|
||||
for j, kf in enumerate(catx["index"][match_indxs[i]]):
|
||||
# Attempt to load this cross halo from memory, if it fails get it from
|
||||
# from the halo archive (and similarly for the limits).
|
||||
# Attempt to load this cross halo from memory, if it fails get
|
||||
# it from from the halo archive (and similarly for the limits).
|
||||
try:
|
||||
halox = cross_halos[kf]
|
||||
minsx, maxsx = cross_lims[kf]
|
||||
except KeyError:
|
||||
halox = halosx_archive[str(kf)]
|
||||
minsx, maxsx = get_halolims(
|
||||
halox,
|
||||
ncells=self.overlapper.inv_clength,
|
||||
nshift=self.overlapper.nshift,
|
||||
)
|
||||
halox, ncells=self.overlapper.inv_clength,
|
||||
nshift=self.overlapper.nshift)
|
||||
cross_halos[kf] = halox
|
||||
cross_lims[kf] = (minsx, maxsx)
|
||||
|
||||
_cross[j] = self.overlapper(
|
||||
halo0,
|
||||
halox,
|
||||
delta_bckg,
|
||||
mins0,
|
||||
maxs0,
|
||||
minsx,
|
||||
maxsx,
|
||||
smooth_kwargs=smooth_kwargs,
|
||||
)
|
||||
_cross[j] = self.overlapper(halo0, halox, delta_bckg, mins0,
|
||||
maxs0, minsx, maxsx,
|
||||
smooth_kwargs=smooth_kwargs)
|
||||
cross[i] = _cross
|
||||
|
||||
return numpy.asanyarray(cross, dtype=object)
|
||||
|
@ -398,9 +372,9 @@ class ParticleOverlap:
|
|||
|
||||
def make_bckg_delta(self, halo_archive, delta=None, verbose=False):
|
||||
"""
|
||||
Calculate a NGP density field of particles belonging to halos within the
|
||||
central :math:`1/2^3` high-resolution region of the simulation. Smoothing
|
||||
must be applied separately.
|
||||
Calculate a NGP density field of particles belonging to halos within
|
||||
the central :math:`1/2^3` high-resolution region of the simulation.
|
||||
Smoothing must be applied separately.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
@ -417,16 +391,16 @@ class ParticleOverlap:
|
|||
-------
|
||||
delta : 3-dimensional array
|
||||
"""
|
||||
# We obtain the minimum/maximum cell IDs and number of cells along each dim.
|
||||
# We obtain the minimum/maximum cell IDs and number of cells
|
||||
cellmin = self.inv_clength // 4 # The minimum cell ID
|
||||
cellmax = 3 * self.inv_clength // 4 # The maximum cell ID
|
||||
ncells = cellmax - cellmin
|
||||
# We then pre-allocate the density field or check it is of the right shape
|
||||
# if already given.
|
||||
# We then pre-allocate the density field/check it is of the right shape
|
||||
if delta is None:
|
||||
delta = numpy.zeros((ncells,) * 3, dtype=numpy.float32)
|
||||
else:
|
||||
assert (delta.shape == (ncells,) * 3) & (delta.dtype == numpy.float32)
|
||||
assert ((delta.shape == (ncells,) * 3)
|
||||
& (delta.dtype == numpy.float32))
|
||||
|
||||
# We now loop one-by-one over the halos fill the density field.
|
||||
files = halo_archive.files
|
||||
|
@ -436,21 +410,20 @@ class ParticleOverlap:
|
|||
mass = parts["M"]
|
||||
|
||||
# We mask out particles outside the cubical high-resolution region
|
||||
mask = (
|
||||
(cellmin <= cells[0])
|
||||
& (cells[0] < cellmax)
|
||||
& (cellmin <= cells[1])
|
||||
& (cells[1] < cellmax)
|
||||
& (cellmin <= cells[2])
|
||||
& (cells[2] < cellmax)
|
||||
)
|
||||
mask = ((cellmin <= cells[0])
|
||||
& (cells[0] < cellmax)
|
||||
& (cellmin <= cells[1])
|
||||
& (cells[1] < cellmax)
|
||||
& (cellmin <= cells[2])
|
||||
& (cells[2] < cellmax))
|
||||
cells = [c[mask] for c in cells]
|
||||
mass = mass[mask]
|
||||
fill_delta(delta, *cells, *(cellmin,) * 3, mass)
|
||||
|
||||
return delta
|
||||
|
||||
def make_delta(self, clump, mins=None, maxs=None, subbox=False, smooth_kwargs=None):
|
||||
def make_delta(self, clump, mins=None, maxs=None, subbox=False,
|
||||
smooth_kwargs=None):
|
||||
"""
|
||||
Calculate a NGP density field of a halo on a cubic grid. Optionally can
|
||||
be smoothed with a Gaussian kernel.
|
||||
|
@ -505,16 +478,8 @@ class ParticleOverlap:
|
|||
gaussian_filter(delta, output=delta, **smooth_kwargs)
|
||||
return delta
|
||||
|
||||
def make_deltas(
|
||||
self,
|
||||
clump1,
|
||||
clump2,
|
||||
mins1=None,
|
||||
maxs1=None,
|
||||
mins2=None,
|
||||
maxs2=None,
|
||||
smooth_kwargs=None,
|
||||
):
|
||||
def make_deltas(self, clump1, clump2, mins1=None, maxs1=None, mins2=None,
|
||||
maxs2=None, smooth_kwargs=None):
|
||||
"""
|
||||
Calculate a NGP density fields of two halos on a grid that encloses
|
||||
them both. Optionally can be smoothed with a Gaussian kernel.
|
||||
|
@ -596,19 +561,9 @@ class ParticleOverlap:
|
|||
gaussian_filter(delta2, output=delta2, **smooth_kwargs)
|
||||
return delta1, delta2, cellmins, nonzero
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
clump1,
|
||||
clump2,
|
||||
delta_bckg,
|
||||
mins1=None,
|
||||
maxs1=None,
|
||||
mins2=None,
|
||||
maxs2=None,
|
||||
mass1=None,
|
||||
mass2=None,
|
||||
smooth_kwargs=None,
|
||||
):
|
||||
def __call__(self, clump1, clump2, delta_bckg, mins1=None, maxs1=None,
|
||||
mins2=None, maxs2=None, mass1=None, mass2=None,
|
||||
smooth_kwargs=None):
|
||||
"""
|
||||
Calculate overlap between `clump1` and `clump2`. See
|
||||
`calculate_overlap(...)` for further information. Be careful so that
|
||||
|
@ -647,8 +602,8 @@ class ParticleOverlap:
|
|||
overlap : float
|
||||
"""
|
||||
delta1, delta2, cellmins, nonzero = self.make_deltas(
|
||||
clump1, clump2, mins1, maxs1, mins2, maxs2, smooth_kwargs=smooth_kwargs
|
||||
)
|
||||
clump1, clump2, mins1, maxs1, mins2, maxs2,
|
||||
smooth_kwargs=smooth_kwargs)
|
||||
|
||||
if smooth_kwargs is not None:
|
||||
return calculate_overlap(delta1, delta2, cellmins, delta_bckg)
|
||||
|
@ -656,8 +611,7 @@ class ParticleOverlap:
|
|||
mass1 = numpy.sum(clump1["M"]) if mass1 is None else mass1
|
||||
mass2 = numpy.sum(clump2["M"]) if mass2 is None else mass2
|
||||
return calculate_overlap_indxs(
|
||||
delta1, delta2, cellmins, delta_bckg, nonzero, mass1, mass2
|
||||
)
|
||||
delta1, delta2, cellmins, delta_bckg, nonzero, mass1, mass2)
|
||||
|
||||
|
||||
###############################################################################
|
||||
|
@ -963,8 +917,8 @@ def radius_neighbours(
|
|||
|
||||
for i in trange(nsamples) if verbose else range(nsamples):
|
||||
dist, indx = knn.radius_neighbors(
|
||||
X[i, :].reshape(-1, 3), radiusX[i] + patchknn_max, sort_results=True
|
||||
)
|
||||
X[i, :].reshape(-1, 3), radiusX[i] + patchknn_max,
|
||||
sort_results=True)
|
||||
# Note that `dist` and `indx` are wrapped in 1-element arrays
|
||||
# so we take the first item where appropriate
|
||||
mask = (dist[0] / (radiusX[i] + radiusKNN[indx[0]])) < nmult
|
||||
|
|
|
@ -41,9 +41,9 @@ def concatenate_parts(list_parts, include_velocities=False):
|
|||
else:
|
||||
posdtype = numpy.float32
|
||||
|
||||
# We pre-allocate an empty array. By default, we include just particle positions,
|
||||
# which may be specified by cell IDs if integers, and masses. Additionally also
|
||||
# outputs velocities.
|
||||
# We pre-allocate an empty array. By default, we include just particle
|
||||
# positions, which may be specified by cell IDs if integers, and masses.
|
||||
# Additionally also outputs velocities.
|
||||
if include_velocities:
|
||||
dtype = {
|
||||
"names": ["x", "y", "z", "vx", "vy", "vz", "M"],
|
||||
|
|
|
@ -24,27 +24,12 @@ from .readsim import ParticleReader
|
|||
|
||||
# Map of unit conversions
|
||||
CONV_NAME = {
|
||||
"length": [
|
||||
"x",
|
||||
"y",
|
||||
"z",
|
||||
"peak_x",
|
||||
"peak_y",
|
||||
"peak_z",
|
||||
"Rs",
|
||||
"rmin",
|
||||
"rmax",
|
||||
"r200c",
|
||||
"r500c",
|
||||
"r200m",
|
||||
"x0",
|
||||
"y0",
|
||||
"z0",
|
||||
"lagpatch",
|
||||
],
|
||||
"mass": ["mass_cl", "totpartmass", "m200c", "m500c", "mass_mmain", "M", "m200m"],
|
||||
"density": ["rho0"],
|
||||
}
|
||||
"length": ["x", "y", "z", "peak_x", "peak_y", "peak_z", "Rs", "rmin",
|
||||
"rmax", "r200c", "r500c", "r200m", "x0", "y0", "z0",
|
||||
"lagpatch"],
|
||||
"mass": ["mass_cl", "totpartmass", "m200c", "m500c", "mass_mmain", "M",
|
||||
"m200m"],
|
||||
"density": ["rho0"]}
|
||||
|
||||
|
||||
class BoxUnits:
|
||||
|
@ -367,7 +352,8 @@ class BoxUnits:
|
|||
density : float
|
||||
Density in :math:`M_\odot / \mathrm{pc}^3`.
|
||||
"""
|
||||
return density * self._unit_d / self._Msuncgs * (units.Mpc.to(units.cm)) ** 3
|
||||
return (density * self._unit_d
|
||||
/ self._Msuncgs * (units.Mpc.to(units.cm)) ** 3)
|
||||
|
||||
def dens2box(self, density):
|
||||
r"""
|
||||
|
@ -384,7 +370,8 @@ class BoxUnits:
|
|||
density : float
|
||||
Density in box units.
|
||||
"""
|
||||
return density / self._unit_d * self._Msuncgs / (units.Mpc.to(units.cm)) ** 3
|
||||
return (density / self._unit_d * self._Msuncgs
|
||||
/ (units.Mpc.to(units.cm)) ** 3)
|
||||
|
||||
def convert_from_boxunits(self, data, names):
|
||||
r"""
|
||||
|
@ -412,13 +399,10 @@ class BoxUnits:
|
|||
Input array with converted columns.
|
||||
"""
|
||||
names = [names] if isinstance(names, str) else names
|
||||
|
||||
# Shortcut for the transform functions
|
||||
transforms = {
|
||||
"length": self.box2mpc,
|
||||
"mass": self.box2solarmass,
|
||||
"density": self.box2dens,
|
||||
}
|
||||
transforms = {"length": self.box2mpc,
|
||||
"mass": self.box2solarmass,
|
||||
"density": self.box2dens,
|
||||
}
|
||||
|
||||
for name in names:
|
||||
# Check that the name is even in the array
|
||||
|
|
|
@ -21,7 +21,8 @@ from sklearn.neighbors import NearestNeighbors
|
|||
from .box_units import BoxUnits
|
||||
from .paths import CSiBORGPaths
|
||||
from .readsim import ParticleReader
|
||||
from .utils import add_columns, cartesian_to_radec, flip_cols, radec_to_cartesian
|
||||
from .utils import (add_columns, cartesian_to_radec, flip_cols,
|
||||
radec_to_cartesian)
|
||||
|
||||
|
||||
class BaseCatalogue(ABC):
|
||||
|
@ -249,7 +250,8 @@ class BaseCatalogue(ABC):
|
|||
knn.fit(pos)
|
||||
# Convert angular radius to cosine difference.
|
||||
metric_maxdist = 1 - numpy.cos(numpy.deg2rad(ang_radius))
|
||||
dist, ind = knn.radius_neighbors(X, radius=metric_maxdist, sort_results=True)
|
||||
dist, ind = knn.radius_neighbors(X, radius=metric_maxdist,
|
||||
sort_results=True)
|
||||
# And the cosine difference to angular distance.
|
||||
for i in range(X.shape[0]):
|
||||
dist[i] = numpy.rad2deg(numpy.arccos(1 - dist[i]))
|
||||
|
@ -302,15 +304,8 @@ class ClumpsCatalogue(BaseCatalogue):
|
|||
transformations.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
nsim,
|
||||
paths,
|
||||
maxdist=155.5 / 0.705,
|
||||
minmass=("mass_cl", 1e12),
|
||||
load_fitted=True,
|
||||
rawdata=False,
|
||||
):
|
||||
def __init__(self, nsim, paths, maxdist=155.5 / 0.705,
|
||||
minmass=("mass_cl", 1e12), load_fitted=True, rawdata=False):
|
||||
self.nsim = nsim
|
||||
self.paths = paths
|
||||
# Read in the clumps from the final snapshot
|
||||
|
@ -333,27 +328,12 @@ class ClumpsCatalogue(BaseCatalogue):
|
|||
flip_cols(self._data, "x", "z")
|
||||
for p in ("x", "y", "z"):
|
||||
self._data[p] -= 0.5
|
||||
self._data = self.box.convert_from_boxunits(
|
||||
self._data,
|
||||
[
|
||||
"x",
|
||||
"y",
|
||||
"z",
|
||||
"mass_cl",
|
||||
"totpartmass",
|
||||
"rho0",
|
||||
"r200c",
|
||||
"r500c",
|
||||
"m200c",
|
||||
"m500c",
|
||||
"r200m",
|
||||
"m200m",
|
||||
],
|
||||
)
|
||||
names = ["x", "y", "z", "mass_cl", "totpartmass", "rho0", "r200c",
|
||||
"r500c", "m200c", "m500c", "r200m", "m200m"]
|
||||
self._data = self.box.convert_from_boxunits(self._data, names)
|
||||
if maxdist is not None:
|
||||
dist = numpy.sqrt(
|
||||
self._data["x"] ** 2 + self._data["y"] ** 2 + self._data["z"] ** 2
|
||||
)
|
||||
dist = numpy.sqrt(self._data["x"]**2 + self._data["y"]**2
|
||||
+ self._data["z"]**2)
|
||||
self._data = self._data[dist < maxdist]
|
||||
if minmass is not None:
|
||||
self._data = self._data[self._data[minmass[0]] > minmass[1]]
|
||||
|
@ -397,16 +377,8 @@ class HaloCatalogue(BaseCatalogue):
|
|||
transformations.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
nsim,
|
||||
paths,
|
||||
maxdist=155.5 / 0.705,
|
||||
minmass=("M", 1e12),
|
||||
load_fitted=True,
|
||||
load_initial=False,
|
||||
rawdata=False,
|
||||
):
|
||||
def __init__(self, nsim, paths, maxdist=155.5 / 0.705, minmass=("M", 1e12),
|
||||
load_fitted=True, load_initial=False, rawdata=False):
|
||||
self.nsim = nsim
|
||||
self.paths = paths
|
||||
# Read in the mmain catalogue of summed substructure
|
||||
|
@ -426,28 +398,13 @@ class HaloCatalogue(BaseCatalogue):
|
|||
flip_cols(self._data, "x", "z")
|
||||
for p in ("x", "y", "z"):
|
||||
self._data[p] -= 0.5
|
||||
self._data = self.box.convert_from_boxunits(
|
||||
self._data,
|
||||
[
|
||||
"x",
|
||||
"y",
|
||||
"z",
|
||||
"M",
|
||||
"totpartmass",
|
||||
"rho0",
|
||||
"r200c",
|
||||
"r500c",
|
||||
"m200c",
|
||||
"m500c",
|
||||
"r200m",
|
||||
"m200m",
|
||||
],
|
||||
)
|
||||
names = ["x", "y", "z", "M", "totpartmass", "rho0", "r200c",
|
||||
"r500c", "m200c", "m500c", "r200m", "m200m"]
|
||||
self._data = self.box.convert_from_boxunits(self._data, names)
|
||||
|
||||
if maxdist is not None:
|
||||
dist = numpy.sqrt(
|
||||
self._data["x"] ** 2 + self._data["y"] ** 2 + self._data["z"] ** 2
|
||||
)
|
||||
dist = numpy.sqrt(self._data["x"]**2 + self._data["y"]**2
|
||||
+ self._data["z"]**2)
|
||||
self._data = self._data[dist < maxdist]
|
||||
if minmass is not None:
|
||||
self._data = self._data[self._data[minmass[0]] > minmass[1]]
|
||||
|
|
|
@ -163,7 +163,7 @@ class TwoMPPGroups(TextSurvey):
|
|||
# Convert galactic coordinates to RA, dec
|
||||
glon = data[:, 1]
|
||||
glat = data[:, 2]
|
||||
coords = SkyCoord(l=glon*units.degree, b=glat*units.degree,
|
||||
coords = SkyCoord(l=glon * units.degree, b=glat * units.degree,
|
||||
frame='galactic')
|
||||
coords = coords.transform_to("icrs")
|
||||
data["RA"] = coords.ra
|
||||
|
|
|
@ -82,7 +82,7 @@ class PairOverlap:
|
|||
"match_indxs": match_indxs,
|
||||
"ngp_overlap": ngp_overlap,
|
||||
"smoothed_overlap": smoothed_overlap,
|
||||
}
|
||||
}
|
||||
|
||||
self._make_refmask(min_mass, max_dist)
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ class CSiBORGPaths:
|
|||
fpath = join(self.postdir, "temp")
|
||||
if not isdir(fpath):
|
||||
mkdir(fpath)
|
||||
warn("Created directory `{}`.".format(fpath), UserWarning, stacklevel=1)
|
||||
warn(f"Created directory `{fpath}`.", UserWarning, stacklevel=1)
|
||||
return fpath
|
||||
|
||||
def mmain_path(self, nsnap, nsim):
|
||||
|
@ -118,10 +118,9 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "mmain")
|
||||
if not isdir(fdir):
|
||||
mkdir(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
return join(
|
||||
fdir, "mmain_{}_{}.npz".format(str(nsim).zfill(5), str(nsnap).zfill(5))
|
||||
)
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
return join(fdir,
|
||||
f"mmain_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npz")
|
||||
|
||||
def initmatch_path(self, nsim, kind):
|
||||
"""
|
||||
|
@ -143,8 +142,8 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "initmatch")
|
||||
if not isdir(fdir):
|
||||
mkdir(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
return join(fdir, "{}_{}.npy".format(kind, str(nsim).zfill(5)))
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
return join(fdir, f"{kind}_{str(nsim).zfill(5)}.npy")
|
||||
|
||||
def split_path(self, nsnap, nsim):
|
||||
"""
|
||||
|
@ -164,10 +163,9 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "split")
|
||||
if not isdir(fdir):
|
||||
mkdir(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
return join(
|
||||
fdir, "clumps_{}_{}.npz".format(str(nsim).zfill(5), str(nsnap).zfill(5))
|
||||
)
|
||||
fdir, f"clumps_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npz")
|
||||
|
||||
def get_ics(self, tonew):
|
||||
"""
|
||||
|
@ -256,7 +254,7 @@ class CSiBORGPaths:
|
|||
"""
|
||||
tonew = nsnap == 1
|
||||
simpath = self.ic_path(nsim, tonew=tonew)
|
||||
return join(simpath, "output_{}".format(str(nsnap).zfill(5)))
|
||||
return join(simpath, f"output_{str(nsnap).zfill(5)}")
|
||||
|
||||
def structfit_path(self, nsnap, nsim, kind):
|
||||
"""
|
||||
|
@ -279,9 +277,8 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "structfit")
|
||||
if not isdir(fdir):
|
||||
mkdir(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
|
||||
fname = "{}_out_{}_{}.npy".format(kind, str(nsim).zfill(5), str(nsnap).zfill(5))
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
fname = f"{kind}_out_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npy"
|
||||
return join(fdir, fname)
|
||||
|
||||
def overlap_path(self, nsim0, nsimx):
|
||||
|
@ -302,8 +299,31 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "overlap")
|
||||
if not isdir(fdir):
|
||||
mkdir(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
fname = "ovelrap_{}_{}.npz".format(str(nsim0).zfill(5), str(nsimx).zfill(5))
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
fname = f"overlap_{str(nsim0).zfill(5)}_{str(nsimx).zfill(5)}.npz"
|
||||
return join(fdir, fname)
|
||||
|
||||
def radpos_path(self, nsnap, nsim):
|
||||
"""
|
||||
Path to the files containing radial positions of halo particles (with
|
||||
summed substructure).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nsnap : int
|
||||
Snapshot index.
|
||||
nsim : int
|
||||
IC realisation index.
|
||||
|
||||
Returns
|
||||
-------
|
||||
path : str
|
||||
"""
|
||||
fdir = join(self.postdir, "radpos")
|
||||
if not isdir(fdir):
|
||||
mkdir(fdir)
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
fname = f"radpos_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npz"
|
||||
return join(fdir, fname)
|
||||
|
||||
def knnauto_path(self, run, nsim=None):
|
||||
|
@ -325,9 +345,9 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "knn", "auto")
|
||||
if not isdir(fdir):
|
||||
makedirs(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
if nsim is not None:
|
||||
return join(fdir, "knncdf_{}_{}.p".format(str(nsim).zfill(5), run))
|
||||
return join(fdir, f"knncdf_{str(nsim).zfill(5)}_{run}.p")
|
||||
|
||||
files = glob(join(fdir, "knncdf*"))
|
||||
run = "__" + run
|
||||
|
@ -352,12 +372,12 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "knn", "cross")
|
||||
if not isdir(fdir):
|
||||
makedirs(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
if nsims is not None:
|
||||
assert isinstance(nsims, (list, tuple)) and len(nsims) == 2
|
||||
nsim0 = str(nsims[0]).zfill(5)
|
||||
nsimx = str(nsims[1]).zfill(5)
|
||||
return join(fdir, "knncdf_{}_{}__{}.p".format(nsim0, nsimx, run))
|
||||
return join(fdir, f"knncdf_{nsim0}_{nsimx}__{run}.p")
|
||||
|
||||
files = glob(join(fdir, "knncdf*"))
|
||||
run = "__" + run
|
||||
|
@ -382,9 +402,9 @@ class CSiBORGPaths:
|
|||
fdir = join(self.postdir, "tpcf", "auto")
|
||||
if not isdir(fdir):
|
||||
makedirs(fdir)
|
||||
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
|
||||
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
|
||||
if nsim is not None:
|
||||
return join(fdir, "tpcf{}_{}.p".format(str(nsim).zfill(5), run))
|
||||
return join(fdir, f"tpcf{str(nsim).zfill(5)}_{run}.p")
|
||||
|
||||
files = glob(join(fdir, "tpcf*"))
|
||||
run = "__" + run
|
||||
|
|
|
@ -345,7 +345,7 @@ class ParticleReader:
|
|||
.format(fname))
|
||||
data = numpy.genfromtxt(fname)
|
||||
# How the data is stored in the clump file.
|
||||
clump_cols = {"index": (0, numpy.int32),
|
||||
clump_cols = {"index": (0, numpy.int32),
|
||||
"level": (1, numpy.int32),
|
||||
"parent": (2, numpy.int32),
|
||||
"ncell": (3, numpy.float32),
|
||||
|
|
|
@ -101,7 +101,8 @@ def cols_to_structured(N, cols):
|
|||
if not isinstance(cols, list) and all(isinstance(c, tuple) for c in cols):
|
||||
raise TypeError("`cols` must be a list of tuples.")
|
||||
|
||||
dtype = {"names": [col[0] for col in cols], "formats": [col[1] for col in cols]}
|
||||
dtype = {"names": [col[0] for col in cols],
|
||||
"formats": [col[1] for col in cols]}
|
||||
return numpy.full(N, numpy.nan, dtype=dtype)
|
||||
|
||||
|
||||
|
@ -236,9 +237,7 @@ def array_to_structured(arr, cols):
|
|||
"""
|
||||
cols = [cols] if isinstance(cols, str) else cols
|
||||
if arr.ndim != 2 and arr.shape[1] != len(cols):
|
||||
raise TypeError(
|
||||
"`arr` must be a 2-dimensional array of " "shape `(n_samples, n_cols)`."
|
||||
)
|
||||
raise TypeError("`arr` must be a 2D array `(n_samples, n_cols)`.")
|
||||
|
||||
dtype = {"names": cols, "formats": [arr.dtype] * len(cols)}
|
||||
out = numpy.full(arr.shape[0], numpy.nan, dtype=dtype)
|
||||
|
|
93
notebooks/fits.ipynb
Normal file
93
notebooks/fits.ipynb
Normal file
|
@ -0,0 +1,93 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "5a38ed25",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2023-04-12T14:25:46.519408Z",
|
||||
"start_time": "2023-04-12T14:25:03.003304Z"
|
||||
},
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import sys\n",
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"\n",
|
||||
"sys.path.append(\"../\")\n",
|
||||
"import csiborgtools\n",
|
||||
"\n",
|
||||
"%matplotlib widget \n",
|
||||
"%load_ext autoreload\n",
|
||||
"%autoreload 2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "312c96c9",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"paths = csiborgtools.read.CSiBORGPaths(**csiborgtools.paths_glamdring)\n",
|
||||
"nsim0 = 7444\n",
|
||||
"nsimx = 7444 + 24\n",
|
||||
"nsnap0 = max(paths.get_snapshots(nsim0))\n",
|
||||
"nsnapx = max(paths.get_snapshots(nsimx))\n",
|
||||
"overlapper = csiborgtools.match.ParticleOverlap()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "b097637b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"halo0_archive = np.load(paths.split_path(nsnap0, nsim0))\n",
|
||||
"halox_archive = np.load(paths.split_path(nsnapx, nsimx))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"id": "c3cf4c91",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"100%|██████████| 204873/204873 [16:48<00:00, 203.15it/s]\n",
|
||||
"100%|██████████| 204960/204960 [16:42<00:00, 204.55it/s]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# delta_bckg = overlapper.make_bckg_delta(halo0_archive, verbose=True)\n",
|
||||
"# delta_bckg = overlapper.make_bckg_delta(halox_archive, delta=delta_bckg, verbose=True)\n",
|
||||
"# np.save(\"./bckg_{}_{}.npy\".format(nsim0, nsimx), delta_bckg)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "f2986dd2",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "venv_galomatch",
|
||||
"language": "python",
|
||||
"name": "venv_galomatch"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
|
@ -63,7 +63,7 @@ fout = join(dumpdir, "crosspk",
|
|||
|
||||
jobs = csiborgtools.utils.split_jobs(nsims, nproc)[rank]
|
||||
for n in jobs:
|
||||
print("Rank {}@{}: saving {}th delta.".format(rank, datetime.now(), n))
|
||||
print(f"Rank {rank} at {datetime.now()}: saving {n}th delta.", flush=True)
|
||||
nsim = ics[n]
|
||||
particles = reader.read_particle(max(paths.get_snapshots(nsim)), nsim,
|
||||
["x", "y", "z", "M"], verbose=False)
|
||||
|
|
|
@ -23,7 +23,7 @@ import numpy
|
|||
import yaml
|
||||
from mpi4py import MPI
|
||||
from sklearn.neighbors import NearestNeighbors
|
||||
from TaskmasterMPI import master_process, worker_process
|
||||
from taskmaster import master_process, worker_process
|
||||
|
||||
try:
|
||||
import csiborgtools
|
||||
|
@ -98,7 +98,7 @@ def do_auto(run, cat, ic):
|
|||
"""Calculate the kNN-CDF single catalgoue autocorrelation."""
|
||||
_config = config.get(run, None)
|
||||
if _config is None:
|
||||
warn("No configuration for run {}.".format(run), UserWarning, stacklevel=1)
|
||||
warn(f"No configuration for run {run}.", UserWarning, stacklevel=1)
|
||||
return
|
||||
|
||||
rvs_gen = csiborgtools.clustering.RVSinsphere(Rmax)
|
||||
|
@ -106,16 +106,10 @@ def do_auto(run, cat, ic):
|
|||
knn = NearestNeighbors()
|
||||
knn.fit(pos)
|
||||
rs, cdf = knncdf(
|
||||
knn,
|
||||
rvs_gen=rvs_gen,
|
||||
nneighbours=config["nneighbours"],
|
||||
rmin=config["rmin"],
|
||||
rmax=config["rmax"],
|
||||
nsamples=int(config["nsamples"]),
|
||||
neval=int(config["neval"]),
|
||||
batch_size=int(config["batch_size"]),
|
||||
random_state=config["seed"],
|
||||
)
|
||||
knn, rvs_gen=rvs_gen, nneighbours=config["nneighbours"],
|
||||
rmin=config["rmin"], rmax=config["rmax"],
|
||||
nsamples=int(config["nsamples"]), neval=int(config["neval"]),
|
||||
batch_size=int(config["batch_size"]), random_state=config["seed"])
|
||||
|
||||
joblib.dump(
|
||||
{"rs": rs, "cdf": cdf, "ndensity": pos.shape[0] / totvol},
|
||||
|
@ -127,7 +121,7 @@ def do_cross_rand(run, cat, ic):
|
|||
"""Calculate the kNN-CDF cross catalogue random correlation."""
|
||||
_config = config.get(run, None)
|
||||
if _config is None:
|
||||
warn("No configuration for run {}.".format(run), UserWarning, stacklevel=1)
|
||||
warn(f"No configuration for run {run}.", UserWarning, stacklevel=1)
|
||||
return
|
||||
|
||||
rvs_gen = csiborgtools.clustering.RVSinsphere(Rmax)
|
||||
|
@ -140,16 +134,10 @@ def do_cross_rand(run, cat, ic):
|
|||
knn2.fit(pos2)
|
||||
|
||||
rs, cdf0, cdf1, joint_cdf = knncdf.joint(
|
||||
knn1,
|
||||
knn2,
|
||||
rvs_gen=rvs_gen,
|
||||
nneighbours=int(config["nneighbours"]),
|
||||
rmin=config["rmin"],
|
||||
rmax=config["rmax"],
|
||||
nsamples=int(config["nsamples"]),
|
||||
neval=int(config["neval"]),
|
||||
batch_size=int(config["batch_size"]),
|
||||
random_state=config["seed"],
|
||||
knn1, knn2, rvs_gen=rvs_gen, nneighbours=int(config["nneighbours"]),
|
||||
rmin=config["rmin"], rmax=config["rmax"],
|
||||
nsamples=int(config["nsamples"]), neval=int(config["neval"]),
|
||||
batch_size=int(config["batch_size"]), random_state=config["seed"],
|
||||
)
|
||||
corr = knncdf.joint_to_corr(cdf0, cdf1, joint_cdf)
|
||||
joblib.dump({"rs": rs, "corr": corr}, paths.knnauto_path(run, ic))
|
||||
|
|
|
@ -23,7 +23,7 @@ import numpy
|
|||
import yaml
|
||||
from mpi4py import MPI
|
||||
from sklearn.neighbors import NearestNeighbors
|
||||
from TaskmasterMPI import master_process, worker_process
|
||||
from taskmaster import master_process, worker_process
|
||||
|
||||
try:
|
||||
import csiborgtools
|
||||
|
|
|
@ -22,7 +22,7 @@ import joblib
|
|||
import numpy
|
||||
import yaml
|
||||
from mpi4py import MPI
|
||||
from TaskmasterMPI import master_process, worker_process
|
||||
from taskmaster import master_process, worker_process
|
||||
|
||||
try:
|
||||
import csiborgtools
|
||||
|
|
|
@ -77,9 +77,12 @@ def fit_clump(particles, clump_info, box):
|
|||
for i, v in enumerate(["vx", "vy", "vz"]):
|
||||
out[v] = numpy.average(obj.vel[:, i], weights=obj["M"])
|
||||
# Overdensity masses
|
||||
out["r200c"], out["m200c"] = obj.spherical_overdensity_mass(200, kind="crit")
|
||||
out["r500c"], out["m500c"] = obj.spherical_overdensity_mass(500, kind="crit")
|
||||
out["r200m"], out["m200m"] = obj.spherical_overdensity_mass(200, kind="matter")
|
||||
out["r200c"], out["m200c"] = obj.spherical_overdensity_mass(200,
|
||||
kind="crit")
|
||||
out["r500c"], out["m500c"] = obj.spherical_overdensity_mass(500,
|
||||
kind="crit")
|
||||
out["r200m"], out["m200m"] = obj.spherical_overdensity_mass(200,
|
||||
kind="matter")
|
||||
# NFW fit
|
||||
if out["npart"] > 10 and numpy.isfinite(out["r200c"]):
|
||||
Rs, rho0 = nfwpost.fit(obj)
|
||||
|
@ -108,8 +111,8 @@ def load_parent_particles(clumpid, particle_archive, clumps_cat):
|
|||
Load a parent halo's particles.
|
||||
"""
|
||||
indxs = clumps_cat["index"][clumps_cat["parent"] == clumpid]
|
||||
# We first load the particles of each clump belonging to this parent and then
|
||||
# concatenate them for further analysis.
|
||||
# We first load the particles of each clump belonging to this parent
|
||||
# and then concatenate them for further analysis.
|
||||
clumps = []
|
||||
for ind in indxs:
|
||||
parts = load_clump_particles(ind, particle_archive)
|
||||
|
@ -118,24 +121,23 @@ def load_parent_particles(clumpid, particle_archive, clumps_cat):
|
|||
|
||||
if len(clumps) == 0:
|
||||
return None
|
||||
return csiborgtools.match.concatenate_parts(clumps, include_velocities=True)
|
||||
return csiborgtools.match.concatenate_parts(clumps,
|
||||
include_velocities=True)
|
||||
|
||||
|
||||
# We now start looping over all simulations
|
||||
for i, nsim in enumerate(paths.get_ics(tonew=False)):
|
||||
if rank == 0:
|
||||
print(
|
||||
"{}: calculating {}th simulation `{}`.".format(datetime.now(), i, nsim),
|
||||
flush=True,
|
||||
)
|
||||
print(f"{datetime.now()}: calculating {i}th simulation `{nsim}`.",
|
||||
flush=True)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
box = csiborgtools.read.BoxUnits(nsnap, nsim, paths)
|
||||
|
||||
# Archive of clumps, keywords are their clump IDs
|
||||
particle_archive = numpy.load(paths.split_path(nsnap, nsim))
|
||||
clumps_cat = csiborgtools.read.ClumpsCatalogue(
|
||||
nsim, paths, maxdist=None, minmass=None, rawdata=True, load_fitted=False
|
||||
)
|
||||
clumps_cat = csiborgtools.read.ClumpsCatalogue(nsim, paths, maxdist=None,
|
||||
minmass=None, rawdata=True,
|
||||
load_fitted=False)
|
||||
# We check whether we fit halos or clumps, will be indexing over different
|
||||
# iterators.
|
||||
if args.kind == "halos":
|
||||
|
@ -171,25 +173,23 @@ for i, nsim in enumerate(paths.get_ics(tonew=False)):
|
|||
|
||||
fout = ftemp.format(str(nsim).zfill(5), str(nsnap).zfill(5), rank)
|
||||
if nproc == 0:
|
||||
print(
|
||||
"{}: rank {} saving to `{}`.".format(datetime.now(), rank, fout), flush=True
|
||||
)
|
||||
print(f"{datetime.now()}: rank {rank} saving to `{fout}`.", flush=True)
|
||||
numpy.save(fout, out)
|
||||
# We saved this CPU's results in a temporary file. Wait now for the other
|
||||
# CPUs and then collect results from the 0th rank and save them.
|
||||
comm.Barrier()
|
||||
|
||||
if rank == 0:
|
||||
print(
|
||||
"{}: collecting results for simulation `{}`.".format(datetime.now(), nsim),
|
||||
flush=True,
|
||||
)
|
||||
print(f"{datetime.now()}: collecting results for simulation `{nsim}`.",
|
||||
flush=True)
|
||||
# We write to the output array. Load data from each CPU and append to
|
||||
# the output array.
|
||||
out = csiborgtools.read.cols_to_structured(ntasks, cols_collect)
|
||||
clumpid2outpos = {indx: i for i, indx in enumerate(clumps_cat["index"])}
|
||||
clumpid2outpos = {indx: i
|
||||
for i, indx in enumerate(clumps_cat["index"])}
|
||||
for i in range(nproc):
|
||||
inp = numpy.load(ftemp.format(str(nsim).zfill(5), str(nsnap).zfill(5), i))
|
||||
inp = numpy.load(ftemp.format(str(nsim).zfill(5),
|
||||
str(nsnap).zfill(5), i))
|
||||
for j, clumpid in enumerate(inp["index"]):
|
||||
k = clumpid2outpos[clumpid]
|
||||
for key in inp.dtype.names:
|
||||
|
@ -201,7 +201,7 @@ for i, nsim in enumerate(paths.get_ics(tonew=False)):
|
|||
out = out[ismain]
|
||||
|
||||
fout = paths.structfit_path(nsnap, nsim, args.kind)
|
||||
print("Saving to `{}`.".format(fout), flush=True)
|
||||
print(f"Saving to `{fout}`.", flush=True)
|
||||
numpy.save(fout, out)
|
||||
|
||||
# We now wait before moving on to another simulation.
|
||||
|
|
140
scripts/fit_profiles.py
Normal file
140
scripts/fit_profiles.py
Normal file
|
@ -0,0 +1,140 @@
|
|||
# Copyright (C) 2023 Richard Stiskalek
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
A script to calculate the particle's separation from the CM and save it.
|
||||
Currently MPI is not supported.
|
||||
"""
|
||||
from argparse import ArgumentParser
|
||||
from datetime import datetime
|
||||
from gc import collect
|
||||
|
||||
import numpy
|
||||
from mpi4py import MPI
|
||||
from tqdm import trange
|
||||
|
||||
try:
|
||||
import csiborgtools
|
||||
except ModuleNotFoundError:
|
||||
import sys
|
||||
|
||||
sys.path.append("../")
|
||||
import csiborgtools
|
||||
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--ics", type=int, nargs="+", default=None,
|
||||
help="IC realisatiosn. If `-1` processes all simulations.")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get MPI things
|
||||
comm = MPI.COMM_WORLD
|
||||
rank = comm.Get_rank()
|
||||
nproc = comm.Get_size()
|
||||
|
||||
if nproc > 1:
|
||||
raise NotImplementedError("MPI is not implemented implemented yet.")
|
||||
|
||||
paths = csiborgtools.read.CSiBORGPaths(**csiborgtools.paths_glamdring)
|
||||
partreader = csiborgtools.read.ParticleReader(paths)
|
||||
cols_collect = [("r", numpy.float32), ("M", numpy.float32)]
|
||||
if args.ics is None or args.ics == -1:
|
||||
nsims = paths.get_ics(tonew=False)
|
||||
else:
|
||||
nsims = args.ics
|
||||
|
||||
|
||||
def load_clump_particles(clumpid, particle_archive):
|
||||
"""
|
||||
Load a clump's particles from the particle archive. If it is not there, i.e
|
||||
clump has no associated particles, return `None`.
|
||||
"""
|
||||
try:
|
||||
part = particle_archive[str(clumpid)]
|
||||
except KeyError:
|
||||
part = None
|
||||
return part
|
||||
|
||||
|
||||
def load_parent_particles(clumpid, particle_archive, clumps_cat):
|
||||
"""
|
||||
Load a parent halo's particles.
|
||||
"""
|
||||
indxs = clumps_cat["index"][clumps_cat["parent"] == clumpid]
|
||||
# We first load the particles of each clump belonging to this
|
||||
# parent and then concatenate them for further analysis.
|
||||
clumps = []
|
||||
for ind in indxs:
|
||||
parts = load_clump_particles(ind, particle_archive)
|
||||
if parts is not None:
|
||||
clumps.append(parts)
|
||||
|
||||
if len(clumps) == 0:
|
||||
return None
|
||||
return csiborgtools.match.concatenate_parts(clumps)
|
||||
|
||||
|
||||
# We loop over simulations. Here later optionlaly add MPI.
|
||||
for i, nsim in enumerate(nsims):
|
||||
if rank == 0:
|
||||
now = datetime.now()
|
||||
print(f"{now}: calculating {i}th simulation `{nsim}`.", flush=True)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
box = csiborgtools.read.BoxUnits(nsnap, nsim, paths)
|
||||
|
||||
# Archive of clumps, keywords are their clump IDs
|
||||
particle_archive = numpy.load(paths.split_path(nsnap, nsim))
|
||||
clumps_cat = csiborgtools.read.ClumpsCatalogue(nsim, paths, maxdist=None,
|
||||
minmass=None, rawdata=True,
|
||||
load_fitted=False)
|
||||
ismain = clumps_cat.ismain
|
||||
ntasks = len(clumps_cat)
|
||||
|
||||
# We loop over halos and add ther particle positions to this dictionary,
|
||||
# which we will later save as an archive.
|
||||
out = {}
|
||||
for j in trange(ntasks) if nproc == 1 else range(ntasks):
|
||||
# If we are fitting halos and this clump is not a main, then continue.
|
||||
if not ismain[j]:
|
||||
continue
|
||||
|
||||
clumpid = clumps_cat["index"][j]
|
||||
|
||||
parts = load_parent_particles(clumpid, particle_archive, clumps_cat)
|
||||
# If we have no particles, then do not save anything.
|
||||
if parts is None:
|
||||
continue
|
||||
obj = csiborgtools.fits.Clump(parts, clumps_cat[j], box)
|
||||
r200m, m200m = obj.spherical_overdensity_mass(200, npart_min=10,
|
||||
kind="matter")
|
||||
r = obj.r()
|
||||
mask = r <= r200m
|
||||
|
||||
_out = csiborgtools.read.cols_to_structured(numpy.sum(mask),
|
||||
cols_collect)
|
||||
|
||||
_out["r"] = r[mask]
|
||||
_out["M"] = obj["M"][mask]
|
||||
|
||||
out[str(clumps_cat["index"][i])] = _out
|
||||
|
||||
# Finished, so we save everything.
|
||||
fout = paths.radpos_path(nsnap, nsim)
|
||||
now = datetime.now()
|
||||
print(f"{now}: saving radial profiles for simulation {nsim} to `{fout}`",
|
||||
flush=True)
|
||||
numpy.savez(fout, **out)
|
||||
|
||||
# Clean up the memory just to be sure.
|
||||
del out
|
||||
collect()
|
|
@ -33,66 +33,55 @@ parser.add_argument("--nsim0", type=int)
|
|||
parser.add_argument("--nsimx", type=int)
|
||||
parser.add_argument("--nmult", type=float)
|
||||
parser.add_argument("--sigma", type=float)
|
||||
parser.add_argument("--verbose", type=lambda x: bool(strtobool(x)), default=False)
|
||||
parser.add_argument("--verbose", type=lambda x: bool(strtobool(x)),
|
||||
default=False)
|
||||
args = parser.parse_args()
|
||||
paths = csiborgtools.read.CSiBORGPaths(**csiborgtools.paths_glamdring)
|
||||
smooth_kwargs = {"sigma": args.sigma, "mode": "constant", "cval": 0.0}
|
||||
overlapper = csiborgtools.match.ParticleOverlap()
|
||||
matcher = csiborgtools.match.RealisationsMatcher()
|
||||
|
||||
# Load the raw catalogues (i.e. no selection) including the initial CM positions
|
||||
# and the particle archives.
|
||||
cat0 = csiborgtools.read.HaloCatalogue(
|
||||
args.nsim0, paths, load_initial=True, rawdata=True
|
||||
)
|
||||
catx = csiborgtools.read.HaloCatalogue(
|
||||
args.nsimx, paths, load_initial=True, rawdata=True
|
||||
)
|
||||
# Load the raw catalogues (i.e. no selection) including the initial CM
|
||||
# positions and the particle archives.
|
||||
cat0 = csiborgtools.read.HaloCatalogue(args.nsim0, paths, load_initial=True,
|
||||
rawdata=True)
|
||||
catx = csiborgtools.read.HaloCatalogue(args.nsimx, paths, load_initial=True,
|
||||
rawdata=True)
|
||||
halos0_archive = paths.initmatch_path(args.nsim0, "particles")
|
||||
halosx_archive = paths.initmatch_path(args.nsimx, "particles")
|
||||
|
||||
# We generate the background density fields. Loads halos's particles one by one
|
||||
# from the archive, concatenates them and calculates the NGP density field.
|
||||
args.verbose and print(
|
||||
"{}: generating the background density fields.".format(datetime.now()), flush=True
|
||||
)
|
||||
if args.verbose:
|
||||
print(f"{datetime.now()}: generating the background density fields.",
|
||||
flush=True)
|
||||
delta_bckg = overlapper.make_bckg_delta(halos0_archive, verbose=args.verbose)
|
||||
delta_bckg = overlapper.make_bckg_delta(
|
||||
halosx_archive, delta=delta_bckg, verbose=args.verbose
|
||||
)
|
||||
delta_bckg = overlapper.make_bckg_delta(halosx_archive, delta=delta_bckg,
|
||||
verbose=args.verbose)
|
||||
|
||||
# We calculate the overlap between the NGP fields.
|
||||
args.verbose and print(
|
||||
"{}: crossing the simulations.".format(datetime.now()), flush=True
|
||||
)
|
||||
match_indxs, ngp_overlap = matcher.cross(
|
||||
cat0, catx, halos0_archive, halosx_archive, delta_bckg
|
||||
)
|
||||
if args.verbose:
|
||||
print(f"{datetime.now()}: crossing the simulations.", flush=True)
|
||||
match_indxs, ngp_overlap = matcher.cross(cat0, catx, halos0_archive,
|
||||
halosx_archive, delta_bckg)
|
||||
|
||||
# We now smoothen up the background density field for the smoothed overlap calculation.
|
||||
args.verbose and print(
|
||||
"{}: smoothing the background field.".format(datetime.now()), flush=True
|
||||
)
|
||||
# We now smoothen up the background density field for the smoothed overlap
|
||||
# calculation.
|
||||
if args.verbose:
|
||||
print(f"{datetime.now()}: smoothing the background field.", flush=True)
|
||||
gaussian_filter(delta_bckg, output=delta_bckg, **smooth_kwargs)
|
||||
|
||||
# We calculate the smoothed overlap for the pairs whose NGP overlap is > 0.
|
||||
args.verbose and print(
|
||||
"{}: calculating smoothed overlaps.".format(datetime.now()), flush=True
|
||||
)
|
||||
smoothed_overlap = matcher.smoothed_cross(
|
||||
cat0, catx, halos0_archive, halosx_archive, delta_bckg, match_indxs, smooth_kwargs
|
||||
)
|
||||
if args.verbose:
|
||||
print(f"{datetime.now()}: calculating smoothed overlaps.", flush=True)
|
||||
smoothed_overlap = matcher.smoothed_cross(cat0, catx, halos0_archive,
|
||||
halosx_archive, delta_bckg,
|
||||
match_indxs, smooth_kwargs)
|
||||
|
||||
# We save the results at long last.
|
||||
fout = paths.overlap_path(args.nsim0, args.nsimx)
|
||||
args.verbose and print(
|
||||
"{}: saving results to `{}`.".format(datetime.now(), fout), flush=True
|
||||
)
|
||||
numpy.savez(
|
||||
fout,
|
||||
match_indxs=match_indxs,
|
||||
ngp_overlap=ngp_overlap,
|
||||
smoothed_overlap=smoothed_overlap,
|
||||
sigma=args.sigma,
|
||||
)
|
||||
print("{}: all finished.".format(datetime.now()), flush=True)
|
||||
if args.verbose:
|
||||
print(f"{datetime.now()}: saving results to `{fout}`.", flush=True)
|
||||
numpy.savez(fout, match_indxs=match_indxs, ngp_overlap=ngp_overlap,
|
||||
smoothed_overlap=smoothed_overlap, sigma=args.sigma)
|
||||
print(f"{datetime.now()}: all finished.", flush=True)
|
||||
|
|
|
@ -13,8 +13,9 @@
|
|||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
Script to calculate the particle centre of mass and Lagrangian patch size in the initial
|
||||
snapshot. Optinally dumps the particle files, however this requires a lot of memory.
|
||||
Script to calculate the particle centre of mass and Lagrangian patch size in
|
||||
the initial snapshot. Optinally dumps the particle files, however this requires
|
||||
a lot of memory.
|
||||
"""
|
||||
from argparse import ArgumentParser
|
||||
from datetime import datetime
|
||||
|
@ -54,15 +55,14 @@ ftemp = join(paths.temp_dumpdir, "initmatch_{}_{}_{}.npy")
|
|||
# initial snapshot and dumping them.
|
||||
for i, nsim in enumerate(paths.get_ics(tonew=True)):
|
||||
if rank == 0:
|
||||
print("{}: reading simulation {}.".format(datetime.now(), nsim), flush=True)
|
||||
print(f"{datetime.now()}: reading simulation {nsim}.", flush=True)
|
||||
nsnap = max(paths.get_snapshots(nsim))
|
||||
|
||||
# We first load particles in the initial and final snapshots and sort them
|
||||
# by their particle IDs so that we can match them by array position.
|
||||
# `clump_ids` are the clump IDs of particles.
|
||||
part0 = partreader.read_particle(
|
||||
1, nsim, ["x", "y", "z", "M", "ID"], verbose=verbose
|
||||
)
|
||||
part0 = partreader.read_particle(1, nsim, ["x", "y", "z", "M", "ID"],
|
||||
verbose=verbose)
|
||||
part0 = part0[numpy.argsort(part0["ID"])]
|
||||
|
||||
pid = partreader.read_particle(nsnap, nsim, ["ID"], verbose=verbose)["ID"]
|
||||
|
@ -85,17 +85,14 @@ for i, nsim in enumerate(paths.get_ics(tonew=True)):
|
|||
# size and optionally the initial snapshot particles belonging to this
|
||||
# parent halo. Dumping the particles will take majority of time.
|
||||
if rank == 0:
|
||||
print(
|
||||
"{}: calculating {}th simulation {}.".format(datetime.now(), i, nsim),
|
||||
flush=True,
|
||||
)
|
||||
print(f"{datetime.now()}: calculating {i}th simulation {nsim}.",
|
||||
flush=True)
|
||||
# We load up the clump catalogue which contains information about the
|
||||
# ultimate parent halos of each clump. We will loop only over the clump
|
||||
# IDs of ultimate parent halos and add their substructure particles and at
|
||||
# the end save these.
|
||||
cat = csiborgtools.read.ClumpsCatalogue(
|
||||
nsim, paths, load_fitted=False, rawdata=True
|
||||
)
|
||||
cat = csiborgtools.read.ClumpsCatalogue(nsim, paths, load_fitted=False,
|
||||
rawdata=True)
|
||||
parent_ids = cat["index"][cat.ismain][:500]
|
||||
jobs = csiborgtools.fits.split_jobs(parent_ids.size, nproc)[rank]
|
||||
for i in tqdm(jobs) if verbose else jobs:
|
||||
|
@ -106,7 +103,8 @@ for i, nsim in enumerate(paths.get_ics(tonew=True)):
|
|||
mmain_particles = part0[mmain_mask]
|
||||
|
||||
raddist, cmpos = csiborgtools.match.dist_centmass(mmain_particles)
|
||||
patchsize = csiborgtools.match.dist_percentile(raddist, [99], distmax=0.075)
|
||||
patchsize = csiborgtools.match.dist_percentile(raddist, [99],
|
||||
distmax=0.075)
|
||||
with open(ftemp.format(nsim, clid, "fit"), "wb") as f:
|
||||
numpy.savez(f, cmpos=cmpos, patchsize=patchsize)
|
||||
|
||||
|
@ -118,15 +116,13 @@ for i, nsim in enumerate(paths.get_ics(tonew=True)):
|
|||
del part0, clump_ids
|
||||
collect()
|
||||
|
||||
# We now wait for all processes and then use the 0th process to collect the results.
|
||||
# We first collect just the Lagrangian patch size information.
|
||||
# We now wait for all processes and then use the 0th process to collect
|
||||
# the results. We first collect just the Lagrangian patch size information.
|
||||
comm.Barrier()
|
||||
if rank == 0:
|
||||
print("{}: collecting fits...".format(datetime.now()), flush=True)
|
||||
dtype = {
|
||||
"names": ["index", "x", "y", "z", "lagpatch"],
|
||||
"formats": [numpy.int32] + [numpy.float32] * 4,
|
||||
}
|
||||
print(f"{datetime.now()}: collecting fits...", flush=True)
|
||||
dtype = {"names": ["index", "x", "y", "z", "lagpatch"],
|
||||
"formats": [numpy.int32] + [numpy.float32] * 4}
|
||||
out = numpy.full(parent_ids.size, numpy.nan, dtype=dtype)
|
||||
for i, clid in enumerate(parent_ids):
|
||||
fpath = ftemp.format(nsim, clid, "fit")
|
||||
|
@ -140,14 +136,15 @@ for i, nsim in enumerate(paths.get_ics(tonew=True)):
|
|||
remove(fpath)
|
||||
|
||||
fout = paths.initmatch_path(nsim, "fit")
|
||||
print("{}: dumping fits to .. `{}`.".format(datetime.now(), fout), flush=True)
|
||||
print(f"{datetime.now()}: dumping fits to .. `{fout}`.", flush=True)
|
||||
with open(fout, "wb") as f:
|
||||
numpy.save(f, out)
|
||||
|
||||
# We now optionally collect the individual clumps and store them in an archive,
|
||||
# which has the benefit of being a single file that can be easily read in.
|
||||
# We now optionally collect the individual clumps and store them in an
|
||||
# archive, which has the benefit of being a single file that can be
|
||||
# easily read in.
|
||||
if args.dump:
|
||||
print("{}: collecting particles...".format(datetime.now()), flush=True)
|
||||
print(f"{datetime.now()}: collecting particles...", flush=True)
|
||||
out = {}
|
||||
for clid in parent_ids:
|
||||
fpath = ftemp.format(nsim, clid, "particles")
|
||||
|
@ -155,10 +152,8 @@ for i, nsim in enumerate(paths.get_ics(tonew=True)):
|
|||
out.update({str(clid): numpy.load(f)})
|
||||
|
||||
fout = paths.initmatch_path(nsim, "particles")
|
||||
print(
|
||||
"{}: dumping particles to .. `{}`.".format(datetime.now(), fout),
|
||||
flush=True,
|
||||
)
|
||||
print(f"{datetime.now()}: dumping particles to .. `{fout}`.",
|
||||
flush=True)
|
||||
with open(fout, "wb") as f:
|
||||
numpy.savez(f, **out)
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ from datetime import datetime
|
|||
|
||||
import numpy
|
||||
from mpi4py import MPI
|
||||
from TaskmasterMPI import master_process, worker_process
|
||||
from taskmaster import master_process, worker_process
|
||||
|
||||
try:
|
||||
import csiborgtools
|
||||
|
@ -58,7 +58,7 @@ if nproc > 1:
|
|||
else:
|
||||
tasks = paths.get_ics(tonew=False)
|
||||
for task in tasks:
|
||||
print("{}: completing task `{}`.".format(datetime.now(), task))
|
||||
print(f"{datetime.now()}: completing task `{task}`.", flush=True)
|
||||
do_mmain(task)
|
||||
|
||||
comm.Barrier()
|
|
@ -24,7 +24,7 @@ from os.path import join
|
|||
|
||||
import numpy
|
||||
from mpi4py import MPI
|
||||
from TaskmasterMPI import master_process, worker_process
|
||||
from taskmaster import master_process, worker_process
|
||||
from tqdm import tqdm
|
||||
|
||||
try:
|
||||
|
|
|
@ -30,12 +30,12 @@ dumpdir = "/mnt/extraspace/rstiskalek/csiborg/"
|
|||
|
||||
|
||||
# Some chosen clusters
|
||||
_coma = {"RA": (12 + 59/60 + 48.7 / 60**2) * 15,
|
||||
_coma = {"RA": (12 + 59 / 60 + 48.7 / 60**2) * 15,
|
||||
"DEC": 27 + 58 / 60 + 50 / 60**2,
|
||||
"COMDIST": 102.975}
|
||||
|
||||
_virgo = {"RA": (12 + 27 / 60) * 15,
|
||||
"DEC": 12 + 43/60,
|
||||
"DEC": 12 + 43 / 60,
|
||||
"COMDIST": 16.5}
|
||||
|
||||
specific_clusters = {"Coma": _coma, "Virgo": _virgo}
|
||||
|
|
Loading…
Reference in a new issue