Dump radial profile information (#48)

* add radial position path

* pep8

* Add basic fit profile dumping

* pep8

* pep8

* pep8

* pep8

* pep8

* pep8

* Update TODO

* Fix parts is None bug

* Update nb
This commit is contained in:
Richard Stiskalek 2023-04-27 01:18:30 +02:00 committed by GitHub
parent 1a115f481d
commit f48eb6dcb0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 512 additions and 395 deletions

View file

@ -15,7 +15,6 @@
from csiborgtools import clustering, field, fits, match, read # noqa
# Arguments to csiborgtools.read.CSiBORGPaths.
paths_glamdring = {
"srcdir": "/mnt/extraspace/hdesmond/",
"postdir": "/mnt/extraspace/rstiskalek/csiborg/"
}
paths_glamdring = {"srcdir": "/mnt/extraspace/hdesmond/",
"postdir": "/mnt/extraspace/rstiskalek/csiborg/",
}

View file

@ -28,4 +28,4 @@ try:
from .tpcf import Mock2PCF # noqa
except ImportError:
warn("`Corrfunc` not installed. 2PCF modules will not be available (`Mock2PCF`).") # noqa
warn("`Corrfunc` not installed. 2PCF modules will not be available .") # noqa

View file

@ -66,7 +66,7 @@ class RVSinsphere(BaseRVS):
def __call__(self, nsamples, random_state=42, dtype=numpy.float32):
gen = numpy.random.default_rng(random_state)
# Spherical
r = gen.random(nsamples, dtype=dtype)**(1/3) * self.R
r = gen.random(nsamples, dtype=dtype)**(1 / 3) * self.R
theta = 2 * numpy.arcsin(gen.random(nsamples, dtype=dtype))
phi = 2 * numpy.pi * gen.random(nsamples, dtype=dtype)
# Cartesian

View file

@ -105,7 +105,7 @@ class DensityField:
@box.setter
def box(self, box):
try:
assert box._name == "box_units"
assert box._name == "box_units"
self._box = box
except AttributeError as err:
raise TypeError from err

View file

@ -40,7 +40,7 @@ class BaseStructure(ABC):
@particles.setter
def particles(self, particles):
pars = ["x", "y", "z", "vx", "vy", "vz", "M"]
pars = ["x", "y", "z", "M"]
assert all(p in particles.dtype.names for p in pars)
self._particles = particles
@ -204,11 +204,12 @@ class BaseStructure(ABC):
return numpy.nan
mass = self.enclosed_mass(radius)
V = numpy.sqrt(self.box.box_G * mass / radius)
return numpy.linalg.norm(self.angular_momentum(radius)) / (
numpy.sqrt(2) * mass * V * radius
)
out = numpy.linalg.norm(self.angular_momentum(radius))
out /= numpy.sqrt(2) * mass * V * radius
return out
def spherical_overdensity_mass(self, delta_mult, npart_min=10, kind="crit"):
def spherical_overdensity_mass(self, delta_mult, npart_min=10,
kind="crit"):
r"""
Calculate spherical overdensity mass and radius. The mass is defined as
the enclosed mass within an outermost radius where the mean enclosed

View file

@ -364,9 +364,7 @@ class NFWPosterior(NFWProfile):
if not (numpy.all(numpy.isfinite(bounds)) and bounds[0] < bounds[1]):
return numpy.nan, numpy.nan
res = minimize_scalar(
loss, bounds=(numpy.log10(rmin), numpy.log10(rmax)), method="bounded"
)
res = minimize_scalar(loss, bounds=bounds, method="bounded")
# Check whether the fit converged to radius sufficienly far from `rmax`
# and that its a success. Otherwise return NaNs.
if numpy.log10(rmax) - res.x < eps:

View file

@ -105,13 +105,13 @@ class RealisationsMatcher:
"""
return self._overlapper
def cross(
self, cat0, catx, halos0_archive, halosx_archive, delta_bckg, verbose=True
):
def cross(self, cat0, catx, halos0_archive, halosx_archive, delta_bckg,
verbose=True):
r"""
Find all neighbours whose CM separation is less than `nmult` times the
sum of their initial Lagrangian patch sizes and calculate their overlap.
Enforces that the neighbours' are similar in mass up to `dlogmass` dex.
sum of their initial Lagrangian patch sizes and calculate their
overlap. Enforces that the neighbours' are similar in mass up to
`dlogmass` dex.
Parameters
----------
@ -121,12 +121,12 @@ class RealisationsMatcher:
Halo catalogue of the cross simulation.
halos0_archive : `NpzFile` object
Archive of halos' particles of the reference simulation, keys must
include `x`, `y`, `z` and `M`. The positions must already be converted
to cell numbers.
include `x`, `y`, `z` and `M`. The positions must already be
converted to cell numbers.
halosx_archive : `NpzFile` object
Archive of halos' particles of the cross simulation, keys must
include `x`, `y`, `z` and `M`. The positions must already be converted
to cell numbers.
include `x`, `y`, `z` and `M`. The positions must already be
converted to cell numbers.
delta_bckg : 3-dimensional array
Summed background density field of the reference and cross
simulations calculated with particles assigned to halos at the
@ -138,25 +138,23 @@ class RealisationsMatcher:
Returns
-------
match_indxs : 1-dimensional array of arrays
The outer array corresponds to halos in the reference catalogue, the
inner array corresponds to the array positions of matches in the cross
catalogue.
The outer array corresponds to halos in the reference catalogue,
the inner array corresponds to the array positions of matches in
the cross catalogue.
overlaps : 1-dimensional array of arrays
Overlaps with the cross catalogue. Follows similar pattern as `match_indxs`.
Overlaps with the cross catalogue. Follows similar pattern as
`match_indxs`.
"""
# We begin by querying the kNN for the nearest neighbours of each halo
# in the reference simulation from the cross simulation in the initial
# snapshot.
verbose and print("{}: querying the KNN.".format(datetime.now()), flush=True)
if verbose:
now = datetime.now()
print(f"{now}: querying the KNN.", flush=True)
match_indxs = radius_neighbours(
catx.knn(select_initial=True),
cat0.positions(in_initial=True),
radiusX=cat0["lagpatch"],
radiusKNN=catx["lagpatch"],
nmult=self.nmult,
enforce_int32=True,
verbose=verbose,
)
catx.knn(select_initial=True), cat0.positions(in_initial=True),
radiusX=cat0["lagpatch"], radiusKNN=catx["lagpatch"],
nmult=self.nmult, enforce_int32=True, verbose=verbose)
# We next remove neighbours whose mass is too large/small.
if self.dlogmass is not None:
for i, indx in enumerate(match_indxs):
@ -165,34 +163,35 @@ class RealisationsMatcher:
aratio = numpy.abs(numpy.log10(catx[p][indx] / cat0[p][i]))
match_indxs[i] = match_indxs[i][aratio < self.dlogmass]
# We will make a dictionary to keep in memory the halos' particles from the
# cross simulations so that they are not loaded in several times and we only
# convert their positions to cells once. Possibly make an option to not do
# this to lower memory requirements?
# We will make a dictionary to keep in memory the halos' particles from
# the cross simulations so that they are not loaded in several times
# and we only convert their positions to cells once. Possibly make an
# option to not do this to lower memory requirements?
cross_halos = {}
cross_lims = {}
cross = [numpy.asanyarray([], dtype=numpy.float32)] * match_indxs.size
for i, k0 in enumerate(tqdm(cat0["index"]) if verbose else cat0["index"]):
indxs = cat0["index"]
for i, k0 in enumerate(tqdm(indxs) if verbose else indxs):
# If we have no matches continue to the next halo.
matches = match_indxs[i]
if matches.size == 0:
continue
# Next, we find this halo's particles, total mass and minimum/maximum cells
# and convert positions to cells.
# Next, we find this halo's particles, total mass, minimum and
# maximum cells and convert positions to cells.
halo0 = halos0_archive[str(k0)]
mass0 = numpy.sum(halo0["M"])
mins0, maxs0 = get_halolims(
halo0, ncells=self.overlapper.inv_clength, nshift=self.overlapper.nshift
)
mins0, maxs0 = get_halolims(halo0,
ncells=self.overlapper.inv_clength,
nshift=self.overlapper.nshift)
for p in ("x", "y", "z"):
halo0[p] = self.overlapper.pos2cell(halo0[p])
# We now loop over matches of this halo and calculate their overlap,
# storing them in `_cross`.
# We now loop over matches of this halo and calculate their
# overlap, storing them in `_cross`.
_cross = numpy.full(matches.size, numpy.nan, dtype=numpy.float32)
for j, kf in enumerate(catx["index"][matches]):
# Attempt to load this cross halo from memory, if it fails get it from
# from the halo archive (and similarly for the limits) and convert the
# particle positions to cells.
# Attempt to load this cross halo from memory, if it fails get
# it from from the halo archive (and similarly for the limits)
# and convert the particle positions to cells.
try:
halox = cross_halos[kf]
minsx, maxsx = cross_lims[kf]
@ -208,17 +207,9 @@ class RealisationsMatcher:
cross_halos[kf] = halox
cross_lims[kf] = (minsx, maxsx)
massx = numpy.sum(halox["M"])
_cross[j] = self.overlapper(
halo0,
halox,
delta_bckg,
mins0,
maxs0,
minsx,
maxsx,
mass1=mass0,
mass2=massx,
)
_cross[j] = self.overlapper(halo0, halox, delta_bckg, mins0,
maxs0, minsx, maxsx, mass1=mass0,
mass2=massx)
cross[i] = _cross
# We remove all matches that have zero overlap to save space.
@ -233,17 +224,8 @@ class RealisationsMatcher:
cross = numpy.asanyarray(cross, dtype=object)
return match_indxs, cross
def smoothed_cross(
self,
cat0,
catx,
halos0_archive,
halosx_archive,
delta_bckg,
match_indxs,
smooth_kwargs,
verbose=True,
):
def smoothed_cross(self, cat0, catx, halos0_archive, halosx_archive,
delta_bckg, match_indxs, smooth_kwargs, verbose=True):
r"""
Calculate the smoothed overlaps for pair previously identified via
`self.cross(...)` to have a non-zero overlap.
@ -256,12 +238,12 @@ class RealisationsMatcher:
Halo catalogue of the cross simulation.
halos0_archive : `NpzFile` object
Archive of halos' particles of the reference simulation, keys must
include `x`, `y`, `z` and `M`. The positions must already be converted
to cell numbers.
include `x`, `y`, `z` and `M`. The positions must already be
converted to cell numbers.
halosx_archive : `NpzFile` object
Archive of halos' particles of the cross simulation, keys must
include `x`, `y`, `z` and `M`. The positions must already be converted
to cell numbers.
include `x`, `y`, `z` and `M`. The positions must already be
converted to cell numbers.
delta_bckg : 3-dimensional array
Smoothed summed background density field of the reference and cross
simulations calculated with particles assigned to halos at the
@ -287,40 +269,32 @@ class RealisationsMatcher:
cross_lims = {}
cross = [numpy.asanyarray([], dtype=numpy.float32)] * match_indxs.size
for i, k0 in enumerate(tqdm(cat0["index"]) if verbose else cat0["index"]):
indxs = cat0["index"]
for i, k0 in enumerate(tqdm(indxs) if verbose else indxs):
halo0 = halos0_archive[str(k0)]
mins0, maxs0 = get_halolims(
halo0, ncells=self.overlapper.inv_clength, nshift=self.overlapper.nshift
)
mins0, maxs0 = get_halolims(halo0,
ncells=self.overlapper.inv_clength,
nshift=self.overlapper.nshift)
# Now loop over the matches and calculate the smoothed overlap.
_cross = numpy.full(match_indxs[i].size, numpy.nan, numpy.float32)
for j, kf in enumerate(catx["index"][match_indxs[i]]):
# Attempt to load this cross halo from memory, if it fails get it from
# from the halo archive (and similarly for the limits).
# Attempt to load this cross halo from memory, if it fails get
# it from from the halo archive (and similarly for the limits).
try:
halox = cross_halos[kf]
minsx, maxsx = cross_lims[kf]
except KeyError:
halox = halosx_archive[str(kf)]
minsx, maxsx = get_halolims(
halox,
ncells=self.overlapper.inv_clength,
nshift=self.overlapper.nshift,
)
halox, ncells=self.overlapper.inv_clength,
nshift=self.overlapper.nshift)
cross_halos[kf] = halox
cross_lims[kf] = (minsx, maxsx)
_cross[j] = self.overlapper(
halo0,
halox,
delta_bckg,
mins0,
maxs0,
minsx,
maxsx,
smooth_kwargs=smooth_kwargs,
)
_cross[j] = self.overlapper(halo0, halox, delta_bckg, mins0,
maxs0, minsx, maxsx,
smooth_kwargs=smooth_kwargs)
cross[i] = _cross
return numpy.asanyarray(cross, dtype=object)
@ -398,9 +372,9 @@ class ParticleOverlap:
def make_bckg_delta(self, halo_archive, delta=None, verbose=False):
"""
Calculate a NGP density field of particles belonging to halos within the
central :math:`1/2^3` high-resolution region of the simulation. Smoothing
must be applied separately.
Calculate a NGP density field of particles belonging to halos within
the central :math:`1/2^3` high-resolution region of the simulation.
Smoothing must be applied separately.
Parameters
----------
@ -417,16 +391,16 @@ class ParticleOverlap:
-------
delta : 3-dimensional array
"""
# We obtain the minimum/maximum cell IDs and number of cells along each dim.
# We obtain the minimum/maximum cell IDs and number of cells
cellmin = self.inv_clength // 4 # The minimum cell ID
cellmax = 3 * self.inv_clength // 4 # The maximum cell ID
ncells = cellmax - cellmin
# We then pre-allocate the density field or check it is of the right shape
# if already given.
# We then pre-allocate the density field/check it is of the right shape
if delta is None:
delta = numpy.zeros((ncells,) * 3, dtype=numpy.float32)
else:
assert (delta.shape == (ncells,) * 3) & (delta.dtype == numpy.float32)
assert ((delta.shape == (ncells,) * 3)
& (delta.dtype == numpy.float32))
# We now loop one-by-one over the halos fill the density field.
files = halo_archive.files
@ -436,21 +410,20 @@ class ParticleOverlap:
mass = parts["M"]
# We mask out particles outside the cubical high-resolution region
mask = (
(cellmin <= cells[0])
& (cells[0] < cellmax)
& (cellmin <= cells[1])
& (cells[1] < cellmax)
& (cellmin <= cells[2])
& (cells[2] < cellmax)
)
mask = ((cellmin <= cells[0])
& (cells[0] < cellmax)
& (cellmin <= cells[1])
& (cells[1] < cellmax)
& (cellmin <= cells[2])
& (cells[2] < cellmax))
cells = [c[mask] for c in cells]
mass = mass[mask]
fill_delta(delta, *cells, *(cellmin,) * 3, mass)
return delta
def make_delta(self, clump, mins=None, maxs=None, subbox=False, smooth_kwargs=None):
def make_delta(self, clump, mins=None, maxs=None, subbox=False,
smooth_kwargs=None):
"""
Calculate a NGP density field of a halo on a cubic grid. Optionally can
be smoothed with a Gaussian kernel.
@ -505,16 +478,8 @@ class ParticleOverlap:
gaussian_filter(delta, output=delta, **smooth_kwargs)
return delta
def make_deltas(
self,
clump1,
clump2,
mins1=None,
maxs1=None,
mins2=None,
maxs2=None,
smooth_kwargs=None,
):
def make_deltas(self, clump1, clump2, mins1=None, maxs1=None, mins2=None,
maxs2=None, smooth_kwargs=None):
"""
Calculate a NGP density fields of two halos on a grid that encloses
them both. Optionally can be smoothed with a Gaussian kernel.
@ -596,19 +561,9 @@ class ParticleOverlap:
gaussian_filter(delta2, output=delta2, **smooth_kwargs)
return delta1, delta2, cellmins, nonzero
def __call__(
self,
clump1,
clump2,
delta_bckg,
mins1=None,
maxs1=None,
mins2=None,
maxs2=None,
mass1=None,
mass2=None,
smooth_kwargs=None,
):
def __call__(self, clump1, clump2, delta_bckg, mins1=None, maxs1=None,
mins2=None, maxs2=None, mass1=None, mass2=None,
smooth_kwargs=None):
"""
Calculate overlap between `clump1` and `clump2`. See
`calculate_overlap(...)` for further information. Be careful so that
@ -647,8 +602,8 @@ class ParticleOverlap:
overlap : float
"""
delta1, delta2, cellmins, nonzero = self.make_deltas(
clump1, clump2, mins1, maxs1, mins2, maxs2, smooth_kwargs=smooth_kwargs
)
clump1, clump2, mins1, maxs1, mins2, maxs2,
smooth_kwargs=smooth_kwargs)
if smooth_kwargs is not None:
return calculate_overlap(delta1, delta2, cellmins, delta_bckg)
@ -656,8 +611,7 @@ class ParticleOverlap:
mass1 = numpy.sum(clump1["M"]) if mass1 is None else mass1
mass2 = numpy.sum(clump2["M"]) if mass2 is None else mass2
return calculate_overlap_indxs(
delta1, delta2, cellmins, delta_bckg, nonzero, mass1, mass2
)
delta1, delta2, cellmins, delta_bckg, nonzero, mass1, mass2)
###############################################################################
@ -963,8 +917,8 @@ def radius_neighbours(
for i in trange(nsamples) if verbose else range(nsamples):
dist, indx = knn.radius_neighbors(
X[i, :].reshape(-1, 3), radiusX[i] + patchknn_max, sort_results=True
)
X[i, :].reshape(-1, 3), radiusX[i] + patchknn_max,
sort_results=True)
# Note that `dist` and `indx` are wrapped in 1-element arrays
# so we take the first item where appropriate
mask = (dist[0] / (radiusX[i] + radiusKNN[indx[0]])) < nmult

View file

@ -41,9 +41,9 @@ def concatenate_parts(list_parts, include_velocities=False):
else:
posdtype = numpy.float32
# We pre-allocate an empty array. By default, we include just particle positions,
# which may be specified by cell IDs if integers, and masses. Additionally also
# outputs velocities.
# We pre-allocate an empty array. By default, we include just particle
# positions, which may be specified by cell IDs if integers, and masses.
# Additionally also outputs velocities.
if include_velocities:
dtype = {
"names": ["x", "y", "z", "vx", "vy", "vz", "M"],

View file

@ -24,27 +24,12 @@ from .readsim import ParticleReader
# Map of unit conversions
CONV_NAME = {
"length": [
"x",
"y",
"z",
"peak_x",
"peak_y",
"peak_z",
"Rs",
"rmin",
"rmax",
"r200c",
"r500c",
"r200m",
"x0",
"y0",
"z0",
"lagpatch",
],
"mass": ["mass_cl", "totpartmass", "m200c", "m500c", "mass_mmain", "M", "m200m"],
"density": ["rho0"],
}
"length": ["x", "y", "z", "peak_x", "peak_y", "peak_z", "Rs", "rmin",
"rmax", "r200c", "r500c", "r200m", "x0", "y0", "z0",
"lagpatch"],
"mass": ["mass_cl", "totpartmass", "m200c", "m500c", "mass_mmain", "M",
"m200m"],
"density": ["rho0"]}
class BoxUnits:
@ -367,7 +352,8 @@ class BoxUnits:
density : float
Density in :math:`M_\odot / \mathrm{pc}^3`.
"""
return density * self._unit_d / self._Msuncgs * (units.Mpc.to(units.cm)) ** 3
return (density * self._unit_d
/ self._Msuncgs * (units.Mpc.to(units.cm)) ** 3)
def dens2box(self, density):
r"""
@ -384,7 +370,8 @@ class BoxUnits:
density : float
Density in box units.
"""
return density / self._unit_d * self._Msuncgs / (units.Mpc.to(units.cm)) ** 3
return (density / self._unit_d * self._Msuncgs
/ (units.Mpc.to(units.cm)) ** 3)
def convert_from_boxunits(self, data, names):
r"""
@ -412,13 +399,10 @@ class BoxUnits:
Input array with converted columns.
"""
names = [names] if isinstance(names, str) else names
# Shortcut for the transform functions
transforms = {
"length": self.box2mpc,
"mass": self.box2solarmass,
"density": self.box2dens,
}
transforms = {"length": self.box2mpc,
"mass": self.box2solarmass,
"density": self.box2dens,
}
for name in names:
# Check that the name is even in the array

View file

@ -21,7 +21,8 @@ from sklearn.neighbors import NearestNeighbors
from .box_units import BoxUnits
from .paths import CSiBORGPaths
from .readsim import ParticleReader
from .utils import add_columns, cartesian_to_radec, flip_cols, radec_to_cartesian
from .utils import (add_columns, cartesian_to_radec, flip_cols,
radec_to_cartesian)
class BaseCatalogue(ABC):
@ -249,7 +250,8 @@ class BaseCatalogue(ABC):
knn.fit(pos)
# Convert angular radius to cosine difference.
metric_maxdist = 1 - numpy.cos(numpy.deg2rad(ang_radius))
dist, ind = knn.radius_neighbors(X, radius=metric_maxdist, sort_results=True)
dist, ind = knn.radius_neighbors(X, radius=metric_maxdist,
sort_results=True)
# And the cosine difference to angular distance.
for i in range(X.shape[0]):
dist[i] = numpy.rad2deg(numpy.arccos(1 - dist[i]))
@ -302,15 +304,8 @@ class ClumpsCatalogue(BaseCatalogue):
transformations.
"""
def __init__(
self,
nsim,
paths,
maxdist=155.5 / 0.705,
minmass=("mass_cl", 1e12),
load_fitted=True,
rawdata=False,
):
def __init__(self, nsim, paths, maxdist=155.5 / 0.705,
minmass=("mass_cl", 1e12), load_fitted=True, rawdata=False):
self.nsim = nsim
self.paths = paths
# Read in the clumps from the final snapshot
@ -333,27 +328,12 @@ class ClumpsCatalogue(BaseCatalogue):
flip_cols(self._data, "x", "z")
for p in ("x", "y", "z"):
self._data[p] -= 0.5
self._data = self.box.convert_from_boxunits(
self._data,
[
"x",
"y",
"z",
"mass_cl",
"totpartmass",
"rho0",
"r200c",
"r500c",
"m200c",
"m500c",
"r200m",
"m200m",
],
)
names = ["x", "y", "z", "mass_cl", "totpartmass", "rho0", "r200c",
"r500c", "m200c", "m500c", "r200m", "m200m"]
self._data = self.box.convert_from_boxunits(self._data, names)
if maxdist is not None:
dist = numpy.sqrt(
self._data["x"] ** 2 + self._data["y"] ** 2 + self._data["z"] ** 2
)
dist = numpy.sqrt(self._data["x"]**2 + self._data["y"]**2
+ self._data["z"]**2)
self._data = self._data[dist < maxdist]
if minmass is not None:
self._data = self._data[self._data[minmass[0]] > minmass[1]]
@ -397,16 +377,8 @@ class HaloCatalogue(BaseCatalogue):
transformations.
"""
def __init__(
self,
nsim,
paths,
maxdist=155.5 / 0.705,
minmass=("M", 1e12),
load_fitted=True,
load_initial=False,
rawdata=False,
):
def __init__(self, nsim, paths, maxdist=155.5 / 0.705, minmass=("M", 1e12),
load_fitted=True, load_initial=False, rawdata=False):
self.nsim = nsim
self.paths = paths
# Read in the mmain catalogue of summed substructure
@ -426,28 +398,13 @@ class HaloCatalogue(BaseCatalogue):
flip_cols(self._data, "x", "z")
for p in ("x", "y", "z"):
self._data[p] -= 0.5
self._data = self.box.convert_from_boxunits(
self._data,
[
"x",
"y",
"z",
"M",
"totpartmass",
"rho0",
"r200c",
"r500c",
"m200c",
"m500c",
"r200m",
"m200m",
],
)
names = ["x", "y", "z", "M", "totpartmass", "rho0", "r200c",
"r500c", "m200c", "m500c", "r200m", "m200m"]
self._data = self.box.convert_from_boxunits(self._data, names)
if maxdist is not None:
dist = numpy.sqrt(
self._data["x"] ** 2 + self._data["y"] ** 2 + self._data["z"] ** 2
)
dist = numpy.sqrt(self._data["x"]**2 + self._data["y"]**2
+ self._data["z"]**2)
self._data = self._data[dist < maxdist]
if minmass is not None:
self._data = self._data[self._data[minmass[0]] > minmass[1]]

View file

@ -163,7 +163,7 @@ class TwoMPPGroups(TextSurvey):
# Convert galactic coordinates to RA, dec
glon = data[:, 1]
glat = data[:, 2]
coords = SkyCoord(l=glon*units.degree, b=glat*units.degree,
coords = SkyCoord(l=glon * units.degree, b=glat * units.degree,
frame='galactic')
coords = coords.transform_to("icrs")
data["RA"] = coords.ra

View file

@ -82,7 +82,7 @@ class PairOverlap:
"match_indxs": match_indxs,
"ngp_overlap": ngp_overlap,
"smoothed_overlap": smoothed_overlap,
}
}
self._make_refmask(min_mass, max_dist)

View file

@ -97,7 +97,7 @@ class CSiBORGPaths:
fpath = join(self.postdir, "temp")
if not isdir(fpath):
mkdir(fpath)
warn("Created directory `{}`.".format(fpath), UserWarning, stacklevel=1)
warn(f"Created directory `{fpath}`.", UserWarning, stacklevel=1)
return fpath
def mmain_path(self, nsnap, nsim):
@ -118,10 +118,9 @@ class CSiBORGPaths:
fdir = join(self.postdir, "mmain")
if not isdir(fdir):
mkdir(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
return join(
fdir, "mmain_{}_{}.npz".format(str(nsim).zfill(5), str(nsnap).zfill(5))
)
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
return join(fdir,
f"mmain_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npz")
def initmatch_path(self, nsim, kind):
"""
@ -143,8 +142,8 @@ class CSiBORGPaths:
fdir = join(self.postdir, "initmatch")
if not isdir(fdir):
mkdir(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
return join(fdir, "{}_{}.npy".format(kind, str(nsim).zfill(5)))
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
return join(fdir, f"{kind}_{str(nsim).zfill(5)}.npy")
def split_path(self, nsnap, nsim):
"""
@ -164,10 +163,9 @@ class CSiBORGPaths:
fdir = join(self.postdir, "split")
if not isdir(fdir):
mkdir(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
return join(
fdir, "clumps_{}_{}.npz".format(str(nsim).zfill(5), str(nsnap).zfill(5))
)
fdir, f"clumps_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npz")
def get_ics(self, tonew):
"""
@ -256,7 +254,7 @@ class CSiBORGPaths:
"""
tonew = nsnap == 1
simpath = self.ic_path(nsim, tonew=tonew)
return join(simpath, "output_{}".format(str(nsnap).zfill(5)))
return join(simpath, f"output_{str(nsnap).zfill(5)}")
def structfit_path(self, nsnap, nsim, kind):
"""
@ -279,9 +277,8 @@ class CSiBORGPaths:
fdir = join(self.postdir, "structfit")
if not isdir(fdir):
mkdir(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
fname = "{}_out_{}_{}.npy".format(kind, str(nsim).zfill(5), str(nsnap).zfill(5))
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
fname = f"{kind}_out_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npy"
return join(fdir, fname)
def overlap_path(self, nsim0, nsimx):
@ -302,8 +299,31 @@ class CSiBORGPaths:
fdir = join(self.postdir, "overlap")
if not isdir(fdir):
mkdir(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
fname = "ovelrap_{}_{}.npz".format(str(nsim0).zfill(5), str(nsimx).zfill(5))
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
fname = f"overlap_{str(nsim0).zfill(5)}_{str(nsimx).zfill(5)}.npz"
return join(fdir, fname)
def radpos_path(self, nsnap, nsim):
"""
Path to the files containing radial positions of halo particles (with
summed substructure).
Parameters
----------
nsnap : int
Snapshot index.
nsim : int
IC realisation index.
Returns
-------
path : str
"""
fdir = join(self.postdir, "radpos")
if not isdir(fdir):
mkdir(fdir)
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
fname = f"radpos_{str(nsim).zfill(5)}_{str(nsnap).zfill(5)}.npz"
return join(fdir, fname)
def knnauto_path(self, run, nsim=None):
@ -325,9 +345,9 @@ class CSiBORGPaths:
fdir = join(self.postdir, "knn", "auto")
if not isdir(fdir):
makedirs(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
if nsim is not None:
return join(fdir, "knncdf_{}_{}.p".format(str(nsim).zfill(5), run))
return join(fdir, f"knncdf_{str(nsim).zfill(5)}_{run}.p")
files = glob(join(fdir, "knncdf*"))
run = "__" + run
@ -352,12 +372,12 @@ class CSiBORGPaths:
fdir = join(self.postdir, "knn", "cross")
if not isdir(fdir):
makedirs(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
if nsims is not None:
assert isinstance(nsims, (list, tuple)) and len(nsims) == 2
nsim0 = str(nsims[0]).zfill(5)
nsimx = str(nsims[1]).zfill(5)
return join(fdir, "knncdf_{}_{}__{}.p".format(nsim0, nsimx, run))
return join(fdir, f"knncdf_{nsim0}_{nsimx}__{run}.p")
files = glob(join(fdir, "knncdf*"))
run = "__" + run
@ -382,9 +402,9 @@ class CSiBORGPaths:
fdir = join(self.postdir, "tpcf", "auto")
if not isdir(fdir):
makedirs(fdir)
warn("Created directory `{}`.".format(fdir), UserWarning, stacklevel=1)
warn(f"Created directory `{fdir}`.", UserWarning, stacklevel=1)
if nsim is not None:
return join(fdir, "tpcf{}_{}.p".format(str(nsim).zfill(5), run))
return join(fdir, f"tpcf{str(nsim).zfill(5)}_{run}.p")
files = glob(join(fdir, "tpcf*"))
run = "__" + run

View file

@ -345,7 +345,7 @@ class ParticleReader:
.format(fname))
data = numpy.genfromtxt(fname)
# How the data is stored in the clump file.
clump_cols = {"index": (0, numpy.int32),
clump_cols = {"index": (0, numpy.int32),
"level": (1, numpy.int32),
"parent": (2, numpy.int32),
"ncell": (3, numpy.float32),

View file

@ -101,7 +101,8 @@ def cols_to_structured(N, cols):
if not isinstance(cols, list) and all(isinstance(c, tuple) for c in cols):
raise TypeError("`cols` must be a list of tuples.")
dtype = {"names": [col[0] for col in cols], "formats": [col[1] for col in cols]}
dtype = {"names": [col[0] for col in cols],
"formats": [col[1] for col in cols]}
return numpy.full(N, numpy.nan, dtype=dtype)
@ -236,9 +237,7 @@ def array_to_structured(arr, cols):
"""
cols = [cols] if isinstance(cols, str) else cols
if arr.ndim != 2 and arr.shape[1] != len(cols):
raise TypeError(
"`arr` must be a 2-dimensional array of " "shape `(n_samples, n_cols)`."
)
raise TypeError("`arr` must be a 2D array `(n_samples, n_cols)`.")
dtype = {"names": cols, "formats": [arr.dtype] * len(cols)}
out = numpy.full(arr.shape[0], numpy.nan, dtype=dtype)