Add galaxy sampling (#88)

* Improve calculations

* Improve flags

* Add smoothed options

* Remove some old comments

* Edit little things

* Save smoothed

* Move files

* Edit imports

* Edit imports

* Renaming imports

* Renaming imports

* Sort imports

* Sort files

* Sorting

* Optionally make copies of the field

* Add quijote backup check

* Add direct field smoothing

* Shorten stupid documentation

* Shorten stupid docs

* Update conversion

* Add particles to ASCII conversion

* Add a short comment

* Add SDSS uncorrected distance

* Adjust comment

* Add FITS index to galaxies

* Remove spare space

* Remove a stupid line

* Remove blank line

* Make space separated

* Add interpolated field path

* Add field sampling

* Sort imports

* Return density in cells

* Clear out observer velocity

* Add 170817 sampling

* Fix normalization

* Update plot
This commit is contained in:
Richard Stiskalek 2023-09-01 16:29:50 +01:00 committed by GitHub
parent 0af925e26a
commit eccd8e3507
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 610 additions and 365 deletions

View file

@ -111,7 +111,7 @@ class DensityField(BaseField):
Returns
-------
overdensity : 3-dimensional array of shape `(grid, grid, grid)`.
3-dimensional array of shape `(grid, grid, grid)`.
"""
delta /= delta.mean()
delta -= 1
@ -140,8 +140,7 @@ class DensityField(BaseField):
Returns
-------
rho : 3-dimensional array of shape `(grid, grid, grid)`.
Density field.
3-dimensional array of shape `(grid, grid, grid)`.
References
----------
@ -154,7 +153,9 @@ class DensityField(BaseField):
nparts = parts.shape[0]
batch_size = nparts // nbatch
start = 0
for __ in trange(nbatch + 1) if verbose else range(nbatch + 1):
for __ in trange(nbatch + 1, disable=not verbose,
desc="Loading particles for the density field"):
end = min(start + batch_size, nparts)
pos = parts[start:end]
pos, vel, mass = pos[:, :3], pos[:, 3:6], pos[:, 6]
@ -170,6 +171,10 @@ class DensityField(BaseField):
if end == nparts:
break
start = end
# Divide by the cell volume in (kpc / h)^3
rho /= (self.box.boxsize / grid * 1e3)**3
return rho
@ -216,8 +221,7 @@ class VelocityField(BaseField):
Returns
-------
radvel : 3-dimensional array of shape `(grid, grid, grid)`.
Radial velocity field.
3-dimensional array of shape `(grid, grid, grid)`.
"""
grid = rho_vel.shape[1]
radvel = numpy.zeros((grid, grid, grid), dtype=numpy.float32)
@ -261,8 +265,7 @@ class VelocityField(BaseField):
Returns
-------
rho_vel : 4-dimensional array of shape `(3, grid, grid, grid)`.
Velocity field along each axis.
4-dimensional array of shape `(3, grid, grid, grid)`.
References
----------
@ -341,7 +344,7 @@ class PotentialField(BaseField):
Returns
-------
potential : 3-dimensional array of shape `(grid, grid, grid)`.
3-dimensional array of shape `(grid, grid, grid)`.
"""
return MASL.potential(overdensity_field, self.box._omega_m,
self.box._aexp, self.MAS)
@ -383,19 +386,11 @@ class TidalTensorField(BaseField):
Returns
-------
eigvals : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
"""
grid = tidal_tensor.T00.shape[0]
eigvals = numpy.full((grid, grid, grid, 3), numpy.nan,
dtype=numpy.float32)
dummy_vector = numpy.full(3, numpy.nan, dtype=numpy.float32)
dummy_tensor = numpy.full((3, 3), numpy.nan, dtype=numpy.float32)
tidal_tensor_to_eigenvalues(eigvals, dummy_vector, dummy_tensor,
tidal_tensor.T00, tidal_tensor.T01,
tidal_tensor.T02, tidal_tensor.T11,
tidal_tensor.T12, tidal_tensor.T22)
return eigvals
return tidal_tensor_to_eigenvalues(
tidal_tensor.T00, tidal_tensor.T01, tidal_tensor.T02,
tidal_tensor.T11, tidal_tensor.T12, tidal_tensor.T22)
@staticmethod
def eigvals_to_environment(eigvals, threshold=0.0):
@ -410,14 +405,11 @@ class TidalTensorField(BaseField):
Returns
-------
environment : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
The environment of each grid cell. Possible values are 0 (void),
1 (sheet), 2 (filament), 3 (knot).
"""
environment = numpy.full(eigvals.shape[:-1], numpy.nan,
dtype=numpy.float32)
eigenvalues_to_environment(environment, eigvals, threshold)
return environment
return eigenvalues_to_environment(eigvals, threshold)
def __call__(self, overdensity_field):
"""
@ -430,7 +422,7 @@ class TidalTensorField(BaseField):
Returns
-------
tidal_tensor : :py:class:`MAS_library.tidal_tensor`
:py:class:`MAS_library.tidal_tensor`
Tidal tensor object, whose attributes `tidal_tensor.Tij` contain
the relevant tensor components.
"""
@ -439,38 +431,25 @@ class TidalTensorField(BaseField):
@jit(nopython=True)
def tidal_tensor_to_eigenvalues(eigvals, dummy_vector, dummy_tensor,
T00, T01, T02, T11, T12, T22):
def tidal_tensor_to_eigenvalues(T00, T01, T02, T11, T12, T22):
"""
Calculate eigenvalues of the tidal tensor field, sorted in decreasing
absolute value order. JIT implementation to speed up the work.
absolute value order.
Parameters
----------
eigvals : 3-dimensional array of shape `(grid, grid, grid)`
Array to store the eigenvalues.
dummy_vector : 1-dimensional array of shape `(3,)`
Dummy vector to store the eigenvalues.
dummy_tensor : 2-dimensional array of shape `(3, 3)`
Dummy tensor to store the tidal tensor.
T00 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{00}`.
T01 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{01}`.
T02 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{02}`.
T11 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{11}`.
T12 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{12}`.
T22 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{22}`.
T00, T01, T02, T11, T12, T22 : 3-dimensional array `(grid, grid, grid)`
Tidal tensor components.
Returns
-------
eigvals : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
"""
grid = T00.shape[0]
eigvals = numpy.full((grid, grid, grid, 3), numpy.nan, dtype=numpy.float32)
dummy_vector = numpy.full(3, numpy.nan, dtype=numpy.float32)
dummy_tensor = numpy.full((3, 3), numpy.nan, dtype=numpy.float32)
for i in range(grid):
for j in range(grid):
for k in range(grid):
@ -494,15 +473,13 @@ def tidal_tensor_to_eigenvalues(eigvals, dummy_vector, dummy_tensor,
@jit(nopython=True)
def eigenvalues_to_environment(environment, eigvals, th):
def eigenvalues_to_environment(eigvals, th):
"""
Classify the environment of each grid cell based on the eigenvalues of the
tidal tensor field.
Parameters
----------
environment : 3-dimensional array of shape `(grid, grid, grid)`
Array to store the environment.
eigvals : 4-dimensional array of shape `(grid, grid, grid, 3)`
The eigenvalues of the tidal tensor field.
th : float
@ -510,19 +487,21 @@ def eigenvalues_to_environment(environment, eigvals, th):
Returns
-------
environment : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
"""
env = numpy.full(eigvals.shape[:-1], numpy.nan, dtype=numpy.float32)
grid = eigvals.shape[0]
for i in range(grid):
for j in range(grid):
for k in range(grid):
lmbda1, lmbda2, lmbda3 = eigvals[i, j, k, :]
if lmbda1 < th and lmbda2 < th and lmbda3 < th:
environment[i, j, k] = 0
env[i, j, k] = 0
elif lmbda1 < th and lmbda2 < th:
environment[i, j, k] = 1
env[i, j, k] = 1
elif lmbda1 < th:
environment[i, j, k] = 2
env[i, j, k] = 2
else:
environment[i, j, k] = 3
return environment
env[i, j, k] = 3
return env

View file

@ -18,13 +18,13 @@ Tools for interpolating 3D fields at arbitrary positions.
import MAS_library as MASL
import numpy
from numba import jit
from tqdm import trange
from tqdm import trange, tqdm
from .utils import force_single_precision
from .utils import force_single_precision, smoothen_field
from ..utils import periodic_wrap_grid, radec_to_cartesian
def evaluate_cartesian(*fields, pos, interp="CIC"):
def evaluate_cartesian(*fields, pos, smooth_scales=None, verbose=False):
"""
Evaluate a scalar field(s) at Cartesian coordinates `pos`.
@ -34,36 +34,49 @@ def evaluate_cartesian(*fields, pos, interp="CIC"):
Fields to be interpolated.
pos : 2-dimensional array of shape `(n_samples, 3)`
Query positions in box units.
interp : str, optional
Interpolation method, `NGP` or `CIC`.
smooth_scales : (list of) float, optional
Smoothing scales in box units. If `None`, no smoothing is performed.
verbose : bool, optional
Smoothing verbosity flag.
Returns
-------
interp_fields : (list of) 1-dimensional array of shape `(n_samples,).
(list of) 1-dimensional array of shape `(n_samples, len(smooth_scales))`
"""
assert interp in ["CIC", "NGP"]
boxsize = 1.
pos = force_single_precision(pos)
nsamples = pos.shape[0]
interp_fields = [numpy.full(nsamples, numpy.nan, dtype=numpy.float32)
if isinstance(smooth_scales, (int, float)):
smooth_scales = [smooth_scales]
if smooth_scales is None:
shape = (pos.shape[0],)
else:
shape = (pos.shape[0], len(smooth_scales))
interp_fields = [numpy.full(shape, numpy.nan, dtype=numpy.float32)
for __ in range(len(fields))]
if interp == "CIC":
for i, field in enumerate(fields):
MASL.CIC_interp(field, boxsize, pos, interp_fields[i])
else:
pos = numpy.floor(pos * fields[0].shape[0]).astype(numpy.int32)
for i, field in enumerate(fields):
for j in range(nsamples):
interp_fields[i][j] = field[pos[j, 0], pos[j, 1], pos[j, 2]]
for i, field in enumerate(fields):
if smooth_scales is None:
MASL.CIC_interp(field, 1., pos, interp_fields[i])
else:
desc = f"Smoothing and interpolating field {i + 1}/{len(fields)}"
iterator = tqdm(smooth_scales, desc=desc, disable=not verbose)
for j, scale in enumerate(iterator):
if not scale > 0:
fsmooth = numpy.copy(field)
else:
fsmooth = smoothen_field(field, scale, 1., make_copy=True)
MASL.CIC_interp(fsmooth, 1., pos, interp_fields[i][:, j])
if len(fields) == 1:
return interp_fields[0]
return interp_fields
def evaluate_sky(*fields, pos, box):
def evaluate_sky(*fields, pos, mpc2box, smooth_scales=None, verbose=False):
"""
Evaluate a scalar field(s) at radial distance `Mpc / h`, right ascensions
[0, 360) deg and declinations [-90, 90] deg.
@ -74,19 +87,33 @@ def evaluate_sky(*fields, pos, box):
Field to be interpolated.
pos : 2-dimensional array of shape `(n_samples, 3)`
Query spherical coordinates.
box : :py:class:`csiborgtools.read.CSiBORGBox`
The simulation box information and transformations.
mpc2box : float
Conversion factor to multiply the radial distance by to get box units.
smooth_scales : (list of) float, optional
Smoothing scales in `Mpc / h`. If `None`, no smoothing is performed.
verbose : bool, optional
Smoothing verbosity flag.
Returns
-------
interp_fields : (list of) 1-dimensional array of shape `(n_samples,).
(list of) 1-dimensional array of shape `(n_samples, len(smooth_scales))`
"""
pos = force_single_precision(pos)
pos[:, 0] = box.mpc2box(pos[:, 0])
pos[:, 0] *= mpc2box
cart_pos = radec_to_cartesian(pos) + 0.5
return evaluate_cartesian(*fields, pos=cart_pos)
if smooth_scales is not None:
if isinstance(smooth_scales, (int, float)):
smooth_scales = [smooth_scales]
if isinstance(smooth_scales, list):
smooth_scales = numpy.array(smooth_scales, dtype=numpy.float32)
smooth_scales *= mpc2box
return evaluate_cartesian(*fields, pos=cart_pos,
smooth_scales=smooth_scales, verbose=verbose)
def observer_vobs(velocity_field):
@ -142,6 +169,7 @@ def make_sky(field, angpos, dist, box, volume_weight=True, verbose=True):
# of distances. We pre-allocate arrays for speed.
dir_loop = numpy.full((dist.size, 3), numpy.nan, dtype=numpy.float32)
boxdist = box.mpc2box(dist)
boxsize = box.box2mpc(1.)
ndir = angpos.shape[0]
out = numpy.full(ndir, numpy.nan, dtype=numpy.float32)
for i in trange(ndir) if verbose else range(ndir):
@ -151,10 +179,10 @@ def make_sky(field, angpos, dist, box, volume_weight=True, verbose=True):
if volume_weight:
out[i] = numpy.sum(
boxdist**2
* evaluate_sky(field, pos=dir_loop, box=box, isdeg=True))
* evaluate_sky(field, pos=dir_loop, mpc2box=1 / boxsize))
else:
out[i] = numpy.sum(
evaluate_sky(field, pos=dir_loop, box=box, isdeg=True))
evaluate_sky(field, pos=dir_loop, mpc2box=1 / boxsize))
out *= dx
return out

View file

@ -29,12 +29,16 @@ def force_single_precision(x):
return x
def smoothen_field(field, smooth_scale, boxsize, threads=1):
def smoothen_field(field, smooth_scale, boxsize, threads=1, make_copy=False):
"""
Smooth a field with a Gaussian filter.
"""
W_k = SL.FT_filter(boxsize, smooth_scale, field.shape[0], "Gaussian",
threads)
if make_copy:
field = numpy.copy(field)
return SL.field_smoothing(field, W_k, threads)

View file

@ -13,17 +13,8 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from .box_units import CSiBORGBox, QuijoteBox # noqa
from .halo_cat import (CSiBORGHaloCatalogue, QuijoteHaloCatalogue, fiducial_observers) # noqa
from .knn_summary import kNNCDFReader # noqa
from .nearest_neighbour_summary import NearestNeighbourReader # noqa
from .obs import (SDSS, MCXCClusters, PlanckClusters, TwoMPPGalaxies, # noqa
TwoMPPGroups)
from .overlap_summary import weighted_stats # noqa
from .overlap_summary import (NPairsOverlap, PairOverlap, # noqa
binned_resample_mean, get_cross_sims) # noqa
from .halo_cat import CSiBORGHaloCatalogue, QuijoteHaloCatalogue, fiducial_observers # noqa
from .obs import SDSS, MCXCClusters, PlanckClusters, TwoMPPGalaxies, TwoMPPGroups # noqa
from .paths import Paths # noqa
from .pk_summary import PKReader # noqa
from .readsim import (MmainReader, CSiBORGReader, QuijoteReader, halfwidth_mask, # noqa
load_halo_particles) # noqa
from .tpcf_summary import TPCFReader # noqa
from .utils import (cols_to_structured, read_h5) # noqa
from .readsim import MmainReader, CSiBORGReader, QuijoteReader, halfwidth_mask, load_halo_particles # noqa
from .utils import cols_to_structured, read_h5 # noqa

View file

@ -22,7 +22,6 @@ from astropy.cosmology import LambdaCDM
from .readsim import CSiBORGReader, QuijoteReader
###############################################################################
# Base box #
###############################################################################

View file

@ -24,16 +24,15 @@ from itertools import product
from math import floor
import numpy
from readfof import FoF_catalog
from sklearn.neighbors import NearestNeighbors
from ..utils import (cartesian_to_radec, periodic_distance_two_points,
radec_to_cartesian, real2redshift)
from .box_units import CSiBORGBox, QuijoteBox
from .paths import Paths
from .readsim import CSiBORGReader
from .utils import add_columns, cols_to_structured, flip_cols
from ..utils import (periodic_distance_two_points, real2redshift,
cartesian_to_radec, radec_to_cartesian)
class BaseCatalogue(ABC):
@ -43,6 +42,8 @@ class BaseCatalogue(ABC):
_data = None
_paths = None
_nsim = None
_observer_location = None
_observer_velocity = None
@property
def nsim(self):
@ -51,7 +52,7 @@ class BaseCatalogue(ABC):
Returns
-------
nsim : int
int
"""
if self._nsim is None:
raise RuntimeError("`nsim` is not set!")
@ -70,7 +71,7 @@ class BaseCatalogue(ABC):
Returns
-------
nsnap : int
int
"""
pass
@ -81,7 +82,7 @@ class BaseCatalogue(ABC):
Returns
-------
simname : str
str
"""
pass
@ -92,7 +93,7 @@ class BaseCatalogue(ABC):
Returns
-------
paths : :py:class:`csiborgtools.read.Paths`
:py:class:`csiborgtools.read.Paths`
"""
if self._paths is None:
raise RuntimeError("`paths` is not set!")
@ -110,7 +111,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
structured array
"""
if self._data is None:
raise RuntimeError("`data` is not set!")
@ -123,7 +124,7 @@ class BaseCatalogue(ABC):
Returns
-------
box : instance of :py:class:`csiborgtools.units.BoxUnits`
instance of :py:class:`csiborgtools.units.BoxUnits`
"""
pass
@ -142,7 +143,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
structured array
"""
fits = numpy.load(paths.initmatch(self.nsim, simname, "fit"))
X, cols = [], []
@ -174,7 +175,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
structured array
"""
fits = numpy.load(paths.structfit(self.nsnap, self.nsim, simname))
@ -203,8 +204,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
The filtered data based on the provided bounds.
structured array
"""
for key, (xmin, xmax) in bounds.items():
if key == "dist":
@ -229,7 +229,7 @@ class BaseCatalogue(ABC):
Returns
-------
obs_pos : 1-dimensional array of shape `(3,)`
1-dimensional array of shape `(3,)`
"""
if self._observer_location is None:
raise RuntimeError("`observer_location` is not set!")
@ -242,6 +242,25 @@ class BaseCatalogue(ABC):
assert obs_pos.shape == (3,)
self._observer_location = obs_pos
@property
def observer_velocity(self):
r"""
Velocity of the observer in units :math:`\mathrm{km} / \mathrm{s}`.
Returns
1-dimensional array of shape `(3,)`
"""
if self._observer_velocity is None:
raise RuntimeError("`observer_velocity` is not set!")
return self._observer_velocity
@observer_velocity.setter
def observer_velocity(self, obs_vel):
assert isinstance(obs_vel, (list, tuple, numpy.ndarray))
obs_vel = numpy.asanyarray(obs_vel)
assert obs_vel.shape == (3,)
self._observer_velocity = obs_vel
def position(self, in_initial=False, cartesian=True,
subtract_observer=False):
r"""
@ -261,8 +280,7 @@ class BaseCatalogue(ABC):
Returns
-------
pos : ndarray, shape `(nobjects, 3)`
Position components.
ndarray, shape `(nobjects, 3)`
"""
suffix = '0' if in_initial else ''
component_keys = [f"{comp}{suffix}" for comp in ('x', 'y', 'z')]
@ -285,7 +303,7 @@ class BaseCatalogue(ABC):
Returns
-------
radial_distance : 1-dimensional array of shape `(nobjects,)`
1-dimensional array of shape `(nobjects,)`
"""
pos = self.position(in_initial=in_initial, cartesian=True,
subtract_observer=True)
@ -301,7 +319,7 @@ class BaseCatalogue(ABC):
"""
return numpy.vstack([self["v{}".format(p)] for p in ("x", "y", "z")]).T
def redshift_space_position(self, cartesian=True, subtract_observer=False):
def redshift_space_position(self, cartesian=True):
"""
Calculates the position of objects in redshift space. Positions can be
returned in either Cartesian coordinates (default) or spherical
@ -318,14 +336,19 @@ class BaseCatalogue(ABC):
Returns
-------
pos : 2-dimensional array of shape `(nobjects, 3)`
Position of objects in the desired coordinate system.
2-dimensional array of shape `(nobjects, 3)`
"""
# Force subtraction of observer if not in Cartesian coordinates
subtract_observer = subtract_observer or not cartesian
if self.simname == "quijote":
raise NotImplementedError("Redshift space positions not "
"implemented for Quijote.")
rsp = real2redshift(self.position(cartesian=True), self.velocity(),
self.observer_location, self.box, make_copy=False)
return rsp if cartesian else cartesian_to_radec(rsp)
self.observer_location, self.observer_velocity,
self.box, make_copy=False)
if cartesian:
return rsp
return cartesian_to_radec(rsp - self.observer_location)
def angmomentum(self):
"""
@ -334,7 +357,7 @@ class BaseCatalogue(ABC):
Returns
-------
angmom : 2-dimensional array of shape `(nobjects, 3)`
2-dimensional array of shape `(nobjects, 3)`
"""
return numpy.vstack([self["L{}".format(p)] for p in ("x", "y", "z")]).T
@ -355,8 +378,7 @@ class BaseCatalogue(ABC):
Returns
-------
knn : :py:class:`sklearn.neighbors.NearestNeighbors`
kNN object fitted with object positions.
:py:class:`sklearn.neighbors.NearestNeighbors`
"""
if subtract_observer and periodic:
raise ValueError("Subtracting observer is not supported for "
@ -533,8 +555,6 @@ class CSiBORGHaloCatalogue(BaseCatalogue):
IC realisation index.
paths : py:class`csiborgtools.read.Paths`
Paths object.
observer_location : array, optional
Observer's location in :math:`\mathrm{Mpc} / h`.
bounds : dict
Parameter bounds; keys as names, values as (min, max) tuples. Use
`dist` for radial distance, `None` for no bound.
@ -544,14 +564,17 @@ class CSiBORGHaloCatalogue(BaseCatalogue):
Load initial positions.
with_lagpatch : bool, optional
Load halos with a resolved Lagrangian patch.
observer_velocity : 1-dimensional array, optional
Observer's velocity in :math:`\mathrm{km} / \mathrm{s}`.
"""
def __init__(self, nsim, paths, observer_location=[338.85, 338.85, 338.85],
bounds={"dist": (0, 155.5)},
load_fitted=True, load_initial=True, with_lagpatch=False):
def __init__(self, nsim, paths, bounds={"dist": (0, 155.5)},
load_fitted=True, load_initial=True, with_lagpatch=False,
observer_velocity=None):
self.nsim = nsim
self.paths = paths
self.observer_location = observer_location
self.observer_location = [338.85, 338.85, 338.85]
self.observer_velocity = observer_velocity
reader = CSiBORGReader(paths)
data = reader.read_fof_halos(self.nsim)
box = self.box
@ -695,7 +718,7 @@ class QuijoteHaloCatalogue(BaseCatalogue):
Returns
-------
nsnap : int
int
"""
return self._nsnap
@ -715,7 +738,7 @@ class QuijoteHaloCatalogue(BaseCatalogue):
Returns
-------
redshift : float
float
"""
return {4: 0.0, 3: 0.5, 2: 1.0, 1: 2.0, 0: 3.0}[self.nsnap]
@ -737,7 +760,7 @@ class QuijoteHaloCatalogue(BaseCatalogue):
Returns
-------
cat : instance of csiborgtools.read.QuijoteHaloCatalogue
instance of `csiborgtools.read.QuijoteHaloCatalogue`
"""
cat = deepcopy(self)
cat.observer_location = fiducial_observers(self.box.boxsize, rmax)[n]

View file

@ -23,6 +23,7 @@ import numpy
from astropy import units
from astropy.coordinates import SkyCoord
from astropy.io import fits
from astropy.cosmology import FlatLambdaCDM
from scipy import constants
from .utils import cols_to_structured
@ -143,7 +144,7 @@ class TwoMPPGroups(TextSurvey):
"""
name = "2M++_groups"
def __init__(self, fpath):
def __init__(self, fpath=None):
if fpath is None:
fpath = join("/mnt/extraspace/rstiskalek/catalogs",
"2M++_group_catalog.dat")
@ -323,7 +324,7 @@ class FitsSurvey(ABC):
-------
keys : list of str
"""
return self.routine_keys + self.fits_keys
return self.routine_keys + self.fits_keys + ["INDEX"]
def make_mask(self, steps):
"""
@ -356,20 +357,27 @@ class FitsSurvey(ABC):
return out
def __getitem__(self, key):
if key == "INDEX":
mask = self.selection_mask
if mask is None:
return numpy.arange(self.size)
else:
return numpy.arange(mask.size)[mask]
# Check duplicates
if key in self.routine_keys and key in self.fits_keys:
warn("Key `{}` found in both `routine_keys` and `fits_keys`. "
"Returning `routine_keys` value.".format(key), stacklevel=1)
warn(f"Key `{key}` found in both `routine_keys` and `fits_keys`. "
"Returning `routine_keys` value.")
if key in self.routine_keys:
func, args = self.routines[key]
out = func(*args)
elif key in self.fits_keys:
warn("Returning a FITS property. Be careful about little h!",
stacklevel=1)
warn(f"Returning a FITS property `{key}`. "
"Be careful about little h!")
out = self.get_fitsitem(key)
else:
raise KeyError("Unrecognised key `{}`.".format(key))
raise KeyError(f"Unrecognised key `{key}`.")
if self.selection_mask is None:
return out
@ -538,6 +546,7 @@ class MCXCClusters(FitsSurvey):
"""Get luminosity, puts back units to be in ergs/s."""
return self.get_fitsitem(key) * 1e44 * (self._hdata / self.h)**2
###############################################################################
# SDSS galaxies #
###############################################################################
@ -556,6 +565,8 @@ class SDSS(FitsSurvey):
h : float, optional
Little h. By default `h = 1`. The catalogue assumes this value.
The routine properties should take care of little h conversion.
Om0 : float, optional
Matter density. By default `Om0 = 0.3175`, matching CSiBORG.
sel_steps : py:function:
Steps to mask the survey. Expected to look for example like
```
@ -570,7 +581,7 @@ class SDSS(FitsSurvey):
"""
name = "SDSS"
def __init__(self, fpath=None, h=1, sel_steps=None):
def __init__(self, fpath=None, h=1, Om0=0.3175, sel_steps=None):
if fpath is None:
fpath = "/mnt/extraspace/rstiskalek/catalogs/nsa_v1_0_1.fits"
self._file = fits.open(fpath, memmap=False)
@ -603,6 +614,8 @@ class SDSS(FitsSurvey):
self.routines.update({key: val})
# Set DIST routine
self.routines.update({"DIST": (self._dist, ())})
self.routines.update(
{"DIST_UNCORRECTED": (self._dist_uncorrected, (Om0,))})
# Set MASS routines
for photo in self._photos:
key = "{}_MASS".format(photo)
@ -623,8 +636,11 @@ class SDSS(FitsSurvey):
@property
def size(self):
# Here pick some property that is in the catalogue..
return self.get_fitsitem("ZDIST").size
mask = self.selection_mask
if mask is not None:
return numpy.sum(mask)
else:
return self.get_fitsitem("ZDIST").size
def _absmag(self, photo, band):
"""
@ -674,6 +690,14 @@ class SDSS(FitsSurvey):
"""
return self.get_fitsitem("ZDIST") * constants.c * 1e-3 / (100 * self.h)
def _dist_uncorrected(self, Om0):
"""
Get the comoving distance estimate from `Z`, i.e. redshift uncorrected
for peculiar motion in the heliocentric frame.
"""
cosmo = FlatLambdaCDM(H0=100 * self.h, Om0=Om0)
return cosmo.comoving_distance(self.get_fitsitem("Z")).value
def _solmass(self, photo):
"""
Get solar mass of a given photometry. Converts little h.

View file

@ -561,9 +561,9 @@ class Paths:
return join(fdir, fname)
def field(self, kind, MAS, grid, nsim, in_rsp):
def field(self, kind, MAS, grid, nsim, in_rsp, smooth_scale=None):
r"""
Path to the files containing the calculated density fields in CSiBORG.
Path to the files containing the calculated fields in CSiBORG.
Parameters
----------
@ -578,6 +578,8 @@ class Paths:
IC realisation index.
in_rsp : bool
Whether the calculation is performed in redshift space.
smooth_scale : float, optional
Smoothing scale in Mpc/h.
Returns
-------
@ -593,6 +595,54 @@ class Paths:
kind = kind + "_rsp"
fname = f"{kind}_{MAS}_{str(nsim).zfill(5)}_grid{grid}.npy"
if smooth_scale is not None:
fname = fname.replace(".npy", f"_smooth{smooth_scale}.npy")
return join(fdir, fname)
def field_interpolated(self, survey, kind, MAS, grid, nsim, in_rsp,
smooth_scale=None):
"""
Path to the files containing the CSiBORG interpolated field for a given
survey.
Parameters
----------
survey : str
Survey name.
kind : str
Field type. Must be one of: `density`, `velocity`, `potential`,
`radvel`, `environment`.
MAS : str
Mass-assignment scheme.
grid : int
Grid size.
nsim : int
IC realisation index.
in_rsp : bool
Whether the calculation is performed in redshift space.
smooth_scale : float, optional
Smoothing scale in Mpc/h.
Returns
-------
str
"""
assert kind in ["density", "velocity", "potential", "radvel",
"environment"]
fdir = join(self.postdir, "environment_interpolated")
try_create_directory(fdir)
if in_rsp:
kind = kind + "_rsp"
fname = f"{survey}_{kind}_{MAS}_{str(nsim).zfill(5)}_grid{grid}.npz"
if smooth_scale is not None:
fname = fname.replace(".npz", f"_smooth{smooth_scale}.npz")
return join(fdir, fname)
def observer_peculiar_velocity(self, MAS, grid, nsim):

View file

@ -17,7 +17,6 @@ from os.path import isfile
import numpy
from h5py import File
###############################################################################
# Array manipulation #
###############################################################################

View file

@ -0,0 +1,21 @@
# Copyright (C) 2023 Richard Stiskalek
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from .knn_summary import kNNCDFReader # noqa
from .nearest_neighbour_summary import NearestNeighbourReader # noqa
from .overlap_summary import weighted_stats # noqa
from .overlap_summary import NPairsOverlap, PairOverlap, get_cross_sims # noqa
from .pk_summary import PKReader # noqa
from .tpcf_summary import TPCFReader # noqa

View file

@ -19,11 +19,10 @@ the final snapshot.
from math import floor
import numpy
from numba import jit
from scipy.integrate import cumulative_trapezoid, quad
from scipy.interpolate import interp1d
from scipy.stats import gaussian_kde, kstest
from numba import jit
from tqdm import tqdm

View file

@ -778,7 +778,7 @@ class NPairsOverlap:
Returns
-------
pairs : list of :py:class:`csiborgtools.read.PairOverlap`
pairs : list of :py:class:`csiborgtools.summary.PairOverlap`
"""
return self._pairs
@ -827,53 +827,3 @@ def get_cross_sims(simname, nsim0, paths, min_logmass, smoothed):
if isfile(f1) or isfile(f2):
nsimxs.append(nsimx)
return nsimxs
def binned_resample_mean(x, y, prob, bins, nresample=50, seed=42):
"""
Calculate binned average of `y` by MC resampling. Each point is kept with
probability `prob`.
Parameters
----------
x : 1-dimensional array
Independent variable.
y : 1-dimensional array
Dependent variable.
prob : 1-dimensional array
Sample probability.
bins : 1-dimensional array
Bin edges to bin `x`.
nresample : int, optional
Number of MC resamples. By default 50.
seed : int, optional
Random seed.
Returns
-------
bin_centres : 1-dimensional array
Bin centres.
stat : 2-dimensional array
Mean and its standard deviation from MC resampling.
"""
assert (x.ndim == 1) & (x.shape == y.shape == prob.shape)
gen = numpy.random.RandomState(seed)
loop_stat = numpy.full(nresample, numpy.nan) # Preallocate loop arr
stat = numpy.full((bins.size - 1, 2), numpy.nan) # Preallocate output
for i in range(bins.size - 1):
mask = (x > bins[i]) & (x <= bins[i + 1])
nsamples = numpy.sum(mask)
loop_stat[:] = numpy.nan # Clear it
for j in range(nresample):
loop_stat[j] = numpy.mean(y[mask][gen.rand(nsamples) < prob[mask]])
stat[i, 0] = numpy.mean(loop_stat)
stat[i, 1] = numpy.std(loop_stat)
bin_centres = (bins[1:] + bins[:-1]) / 2
return bin_centres, stat

View file

@ -16,8 +16,6 @@
import joblib
import numpy
from .paths import Paths
class TPCFReader:
"""
@ -45,7 +43,6 @@ class TPCFReader:
@paths.setter
def paths(self, paths):
assert isinstance(paths, Paths)
self._paths = paths
def read(self, run):

View file

@ -78,7 +78,7 @@
},
"outputs": [],
"source": [
"pkreader = csiborgtools.read.PKReader(paths.get_ics, hw)\n",
"pkreader = csiborgtools.summary.PKReader(paths.get_ics, hw)\n",
"\n",
"autoks, pks = pkreader.read_autos()\n",
"\n",

View file

@ -23,16 +23,9 @@ from gc import collect
import numpy
from mpi4py import MPI
try:
import csiborgtools
except ModuleNotFoundError:
import sys
sys.path.append("../")
import csiborgtools
from taskmaster import work_delegation
import csiborgtools
from utils import get_nsims
###############################################################################
@ -60,6 +53,10 @@ def density_field(nsim, parser_args, to_save=True):
radvel_field = numpy.load(paths.field(
"radvel", parser_args.MAS, parser_args.grid, nsim, False))
if parser_args.verbose:
print(f"{datetime.now()}: converting density field to RSP.",
flush=True)
field = csiborgtools.field.field2rsp(field, radvel_field, box,
parser_args.MAS)
@ -187,6 +184,10 @@ def environment_field(nsim, parser_args, to_save=True):
density_gen = csiborgtools.field.DensityField(box, parser_args.MAS)
rho = density_gen.overdensity_field(rho)
if parser_args.smooth_scale > 0.0:
rho = csiborgtools.field.smoothen_field(
rho, parser_args.smooth_scale, box.box2mpc(1.))
gen = csiborgtools.field.TidalTensorField(box, parser_args.MAS)
field = gen(rho)
@ -217,7 +218,7 @@ def environment_field(nsim, parser_args, to_save=True):
if to_save:
fout = paths.field("environment", parser_args.MAS, parser_args.grid,
nsim, parser_args.in_rsp)
nsim, parser_args.in_rsp, parser_args.smooth_scale)
print(f"{datetime.now()}: saving output to `{fout}`.")
numpy.save(fout, env)
return env
@ -241,6 +242,8 @@ if __name__ == "__main__":
parser.add_argument("--grid", type=int, help="Grid resolution.")
parser.add_argument("--in_rsp", type=lambda x: bool(strtobool(x)),
help="Calculate in RSP?")
parser.add_argument("--smooth_scale", type=float, default=0.0,
help="Smoothing scale in Mpc / h. Only used for the environment field.") # noqa
parser.add_argument("--verbose", type=lambda x: bool(strtobool(x)),
help="Verbosity flag for reading in particles.")
parser.add_argument("--simname", type=str, default="csiborg",

194
scripts/field_sample.py Normal file
View file

@ -0,0 +1,194 @@
# Copyright (C) 2023 Richard Stiskalek
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Sample a CSiBORG field at galaxy positions and save the result to disk.
"""
from argparse import ArgumentParser
from distutils.util import strtobool
from os.path import join
import numpy
from astropy.cosmology import FlatLambdaCDM
from h5py import File
from mpi4py import MPI
from taskmaster import work_delegation
from tqdm import tqdm
import csiborgtools
from utils import get_nsims
MPC2BOX = 1 / 677.7
def steps(cls, survey_name):
"""Make a list of selection criteria to apply to a survey."""
if survey_name == "SDSS":
return [
# (lambda x: cls[x], ("IN_DR7_LSS",)),
# (lambda x: cls[x] < 17.6, ("ELPETRO_APPMAG_r", )),
(lambda x: cls[x] < 155.5, ("DIST", ))
]
else:
raise NotImplementedError(f"Survey `{survey_name}` not implemented.")
def open_galaxy_positions(survey_name, comm):
"""
Load the survey galaxy positions and indices, broadcasting them to all
ranks.
"""
rank, size = comm.Get_rank(), comm.Get_size()
if rank == 0:
if survey_name == "SDSS":
survey = csiborgtools.read.SDSS(
h=1, sel_steps=lambda cls: steps(cls, survey_name))
pos = numpy.vstack([survey["DIST_UNCORRECTED"],
survey["RA"],
survey["DEC"]],
).T
indxs = survey["INDEX"]
elif survey_name == "GW170817":
samples = File("/mnt/extraspace/rstiskalek/GWLSS/H1L1V1-EXTRACT_POSTERIOR_GW170817-1187008600-400.hdf", 'r')["samples"] # noqa
cosmo = FlatLambdaCDM(H0=100, Om0=0.3175)
pos = numpy.vstack([
cosmo.comoving_distance(samples["redshift"][:]).value,
samples["ra"][:] * 180 / numpy.pi,
samples["dec"][:] * 180 / numpy.pi],
).T
indxs = numpy.arange(pos.shape[0])
else:
raise NotImplementedError(f"Survey `{survey_name}` not "
"implemented.")
else:
pos = None
indxs = None
comm.Barrier()
if size > 1:
pos = comm.bcast(pos, root=0)
indxs = comm.bcast(indxs, root=0)
return pos, indxs
def evaluate_field(field, pos, nrand, smooth_scales=None, seed=42,
verbose=True):
"""
Evaluate the field at the given sky positions. Additionally, evaluate the
field at `nrand` random positions.
"""
if smooth_scales is None:
smooth_scales = [0.]
nsample = pos.shape[0]
nsmooth = len(smooth_scales)
val = numpy.full((nsample, nsmooth), numpy.nan, dtype=field.dtype)
if nrand > 0:
rand_val = numpy.full((nsample, nsmooth, nrand), numpy.nan,
dtype=field.dtype)
else:
rand_val = None
for i, scale in enumerate(tqdm(smooth_scales, desc="Smoothing",
disable=not verbose)):
if scale > 0:
field_smoothed = csiborgtools.field.smoothen_field(
field, scale * MPC2BOX, boxsize=1, make_copy=True)
else:
field_smoothed = field
val[:, i] = csiborgtools.field.evaluate_sky(
field_smoothed, pos=pos, mpc2box=MPC2BOX)
if nrand == 0:
continue
for j in range(nrand):
gen = numpy.random.default_rng(seed)
pos_rand = numpy.vstack([
gen.permutation(pos[:, 0]),
gen.uniform(0, 360, nsample),
90 - numpy.rad2deg(numpy.arccos(gen.uniform(-1, 1, nsample))),
]).T
rand_val[:, i, j] = csiborgtools.field.evaluate_sky(
field_smoothed, pos=pos_rand, mpc2box=MPC2BOX)
return val, rand_val, smooth_scales
def main(nsim, parser_args, pos, indxs, paths, verbose):
"""Load the field, interpolate it and save it to disk."""
fpath_field = paths.field(parser_args.kind, parser_args.MAS,
parser_args.grid, nsim, parser_args.in_rsp)
field = numpy.load(fpath_field)
val, rand_val, smooth_scales = evaluate_field(
field, pos, nrand=parser_args.nrand,
smooth_scales=parser_args.smooth_scales, verbose=verbose)
if parser_args.survey == "GW170817":
kind = parser_args.kind
kind = kind + "_rsp" if parser_args.in_rsp else kind
fout = join(
"/mnt/extraspace/rstiskalek/GWLSS/",
f"{kind}_{parser_args.MAS}_{parser_args.grid}_{nsim}_H1L1V1-EXTRACT_POSTERIOR_GW170817-1187008600-400.npz") # noqa
else:
fout = paths.field_interpolated(parser_args.survey, parser_args.kind,
parser_args.MAS, parser_args.grid,
nsim, parser_args.in_rsp)
if verbose:
print(f"Saving to ... `{fout}`.")
numpy.savez(fout, val=val, rand_val=rand_val, indxs=indxs,
smooth_scales=smooth_scales)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--nsims", type=int, nargs="+", default=None,
help="IC realisations. If `-1` processes all.")
parser.add_argument("--survey", type=str, required=True,
choices=["SDSS", "GW170817"],
help="Galaxy survey")
parser.add_argument("--smooth_scales", type=float, nargs="+", default=None,
help="Smoothing scales in Mpc / h.")
parser.add_argument("--kind", type=str,
choices=["density", "rspdensity", "velocity", "radvel",
"potential"],
help="What field to interpolate.")
parser.add_argument("--MAS", type=str,
choices=["NGP", "CIC", "TSC", "PCS"],
help="Mass assignment scheme.")
parser.add_argument("--grid", type=int, help="Grid resolution.")
parser.add_argument("--in_rsp", type=lambda x: bool(strtobool(x)),
help="Field in RSP?")
parser.add_argument("--nrand", type=int, required=True,
help="Number of rand. positions to evaluate the field")
args = parser.parse_args()
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
nsims = get_nsims(args, paths)
pos, indxs = open_galaxy_positions(args.survey, MPI.COMM_WORLD)
def _main(nsim):
main(nsim, args, pos, indxs, paths,
verbose=MPI.COMM_WORLD.Get_size() == 1)
work_delegation(_main, nsims, MPI.COMM_WORLD)

View file

@ -81,7 +81,7 @@ def find_neighbour(args, nsim, cats, paths, comm, save_kind):
numpy.savez(fout, **out)
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
reader = csiborgtools.read.NearestNeighbourReader(
reader = csiborgtools.summary.NearestNeighbourReader(
paths=paths, **csiborgtools.neighbour_kwargs)
counts = numpy.zeros((reader.nbins_radial, reader.nbins_neighbour),
dtype=numpy.float32)

View file

@ -68,7 +68,7 @@ def pair_match_max(nsim0, nsimx, simname, min_logmass, mult, verbose):
else:
raise ValueError(f"Unknown simulation `{simname}`.")
reader = csiborgtools.read.PairOverlap(cat0, catx, paths, min_logmass,
reader = csiborgtools.summary.PairOverlap(cat0, catx, paths, min_logmass,
maxdist=maxdist)
out = csiborgtools.match.matching_max(
cat0, catx, mass_kind, mult=mult, periodic=periodic,

View file

@ -0,0 +1,82 @@
# Copyright (C) 2023 Richard Stiskalek
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Convert the HDF5 CSiBORG particle file to an ASCII file."""
from argparse import ArgumentParser
import csiborgtools
import h5py
from mpi4py import MPI
from utils import get_nsims
from tqdm import trange
from taskmaster import work_delegation
def h5_to_ascii(nsim, paths, chunk_size=50_000, verbose=True):
"""
Convert the HDF5 CSiBORG particle file to an ASCII file. Outputs only
particle positions in Mpc / h. Ignores the unequal particle masses.
"""
fname = paths.particles(nsim, args.simname)
boxsize = 677.7
fname_out = fname.replace(".h5", ".txt")
with h5py.File(fname, 'r') as f:
dataset = f["particles"]
total_size = dataset.shape[0]
if verbose:
print(f"Number of rows to write: {total_size}")
with open(fname_out, 'w') as out_file:
# Write the header
out_file.write("#px py pz\n")
# Loop through data in chunks
for i in trange(0, total_size, chunk_size,
desc=f"Writing to ... `{fname_out}`",
disable=not verbose):
end = i + chunk_size
if end > total_size:
end = total_size
data_chunk = dataset[i:end]
# Convert to positions Mpc / h
data_chunk = data_chunk[:, :3] * boxsize
chunk_str = "\n".join([f"{x:.4f} {y:.4f} {z:.4f}"
for x, y, z in data_chunk])
out_file.write(chunk_str + "\n")
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--nsims", type=int, nargs="+", default=None,
help="IC realisations. If `-1` processes all.")
parser.add_argument("--simname", type=str, default="csiborg",
choices=["csiborg"],
help="Simulation name")
args = parser.parse_args()
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
nsims = get_nsims(args, paths)
def main(nsim):
h5_to_ascii(nsim, paths, verbose=MPI.COMM_WORLD.Get_size() == 1)
work_delegation(main, nsims, MPI.COMM_WORLD)

View file

@ -37,22 +37,14 @@ except ModuleNotFoundError:
def get_nsims(args, paths):
"""
Get simulation indices from the command line arguments.
Parameters
----------
args : argparse.Namespace
Command line arguments. Must include `nsims` and `simname`. If `nsims`
is `None` or `-1`, all simulations in `simname` are used.
paths : :py:class`csiborgtools.paths.Paths`
Paths object.
Returns
-------
nsims : list of int
Simulation indices.
"""
try:
from_quijote_backup = args.from_quijote_backup
except AttributeError:
from_quijote_backup = False
if args.nsims is None or args.nsims[0] == -1:
nsims = paths.get_ics(args.simname, args.from_quijote_backup)
nsims = paths.get_ics(args.simname, from_quijote_backup)
else:
nsims = args.nsims
return list(nsims)
@ -81,8 +73,7 @@ def read_single_catalogue(args, config, nsim, run, rmax, paths, nobs=None):
Returns
-------
cat : csiborgtools.read.CSiBORGHaloCatalogue or csiborgtools.read.QuijoteHaloCatalogue # noqa
Halo catalogue with selection criteria applied.
`csiborgtools.read.CSiBORGHaloCatalogue` or `csiborgtools.read.QuijoteHaloCatalogue` # noqa
"""
selection = config.get(run, None)
if selection is None:

View file

@ -62,7 +62,7 @@ def open_cats(nsims, simname):
def read_dist(simname, run, kind, kwargs):
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
fpath = paths.cross_nearest(simname, run, "tot_counts", nsim=0, nobs=0)
counts = numpy.load(fpath)["tot_counts"]
@ -102,7 +102,7 @@ def plot_dist(run, kind, kwargs, runs_to_mass, pulled_cdf=False, r200=None):
"""
assert kind in ["pdf", "cdf"]
print(f"Plotting the {kind} for {run}...", flush=True)
reader = csiborgtools.read.NearestNeighbourReader(
reader = csiborgtools.summary.NearestNeighbourReader(
**kwargs, paths=csiborgtools.read.Paths(**kwargs["paths_kind"]))
raddist = reader.bin_centres("radial")
r = reader.bin_centres("neighbour")
@ -241,7 +241,7 @@ def plot_cdf_diff(runs, kwargs, pulled_cdf, runs_to_mass):
"""
print("Plotting the CDF difference...", flush=True)
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
r = reader.bin_centres("neighbour")
runs_to_mass = [numpy.mean(runs_to_mass[run]) for run in runs]
@ -320,7 +320,7 @@ def make_kl(simname, run, nsim, nobs, kwargs):
of each halo in the reference simulation.
"""
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
# This is the reference PDF. Must be Quijote!
pdf = read_dist("quijote", run, "pdf", kwargs)
return reader.kl_divergence(simname, run, nsim, pdf, nobs=nobs)
@ -352,7 +352,7 @@ def make_ks(simname, run, nsim, nobs, kwargs):
each halo in the reference simulation.
"""
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
# This is the reference CDF. Must be Quijote!
cdf = read_dist("quijote", run, "cdf", kwargs)
return reader.ks_significance(simname, run, nsim, cdf, nobs=nobs)
@ -551,7 +551,7 @@ def plot_significance_vs_mass(simname, runs, nsim, nobs, kind, kwargs,
print(f"Plotting {kind} significance vs mass.")
assert kind in ["kl", "ks"]
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
with plt.style.context(plt_utils.mplstyle):
plt.figure()
@ -627,7 +627,7 @@ def plot_kl_vs_ks(simname, runs, nsim, nobs, kwargs, runs_to_mass,
None
"""
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
xs, ys, cs = [], [], []
for run in runs:
@ -697,7 +697,7 @@ def plot_kl_vs_overlap(runs, nsim, kwargs, runs_to_mass, plot_std=True,
None
"""
paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
nn_reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
nn_reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
xs, ys1, ys2, cs = [], [], [], []
for run in runs:

View file

@ -38,15 +38,6 @@ except ModuleNotFoundError:
def open_csiborg(nsim):
"""
Open a CSiBORG halo catalogue. Applies mass and distance selection.
Parameters
----------
nsim : int
Simulation index.
Returns
-------
cat : csiborgtools.read.CSiBORGHaloCatalogue
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
bounds = {"totpartmass": (None, None), "dist": (0, 155)}
@ -58,17 +49,6 @@ def open_csiborg(nsim):
def open_quijote(nsim, nobs=None):
"""
Open a Quijote halo catalogue. Applies mass and distance selection.
Parameters
----------
nsim : int
Simulation index.
nobs : int, optional
Fiducial observer index.
Returns
-------
cat : csiborgtools.read.QuijoteHaloCatalogue
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
cat = csiborgtools.read.QuijoteHaloCatalogue(
@ -82,17 +62,6 @@ def open_quijote(nsim, nobs=None):
def plot_mass_vs_ncells(nsim, pdf=False):
"""
Plot the halo mass vs. number of occupied cells in the initial snapshot.
Parameters
----------
nsim : int
Simulation index.
pdf : bool, optional
Whether to save the figure as a PDF file.
Returns
-------
None
"""
cat = open_csiborg(nsim)
mpart = 4.38304044e+09
@ -123,15 +92,6 @@ def plot_mass_vs_ncells(nsim, pdf=False):
def plot_hmf(pdf=False):
"""
Plot the FoF halo mass function of CSiBORG and Quijote.
Parameters
----------
pdf : bool, optional
Whether to save the figure as a PDF file.
Returns
-------
None
"""
print("Plotting the HMF...", flush=True)
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
@ -189,13 +149,15 @@ def plot_hmf(pdf=False):
std_csiborg = numpy.std(csiborg_counts, axis=0)
ax[0].plot(x, mean_csiborg, label="CSiBORG", c=cols[0])
ax[0].fill_between(x, mean_csiborg - std_csiborg,
mean_csiborg + std_csiborg, alpha=0.5, color=cols[0])
mean_csiborg + std_csiborg,
alpha=0.5, color=cols[0])
mean_quijote = numpy.mean(quijote_counts, axis=0)
std_quijote = numpy.std(quijote_counts, axis=0)
ax[0].plot(x, mean_quijote, label="Quijote", c=cols[1])
ax[0].fill_between(x, mean_quijote - std_quijote,
mean_quijote + std_quijote, alpha=0.5, color=cols[1])
mean_quijote + std_quijote, alpha=0.5,
color=cols[1])
ax[0].plot(x, csiborg5511, label="CSiBORG 5511", c="k", ls="--")
std5511 = numpy.sqrt(csiborg5511)
@ -207,7 +169,7 @@ def plot_hmf(pdf=False):
+ (std_quijote / mean_quijote / numpy.log(10))**2)
ax[1].plot(x, 10**log_y, c=cols[0])
ax[1].fill_between(x, 10**(log_y - err), 10**(log_y + err), alpha=0.5,
color=col[0])
color=cols[0])
ax[1].plot(x, csiborg5511 / mean_quijote, c="k", ls="--")
@ -239,11 +201,6 @@ def plot_hmf_quijote_full(pdf=False):
"""
Plot the FoF halo mass function of Quijote full run.
Parameters
----------
pdf : bool, optional
Whether to save the figure as a PDF file.
Returns
-------
None
@ -305,29 +262,13 @@ def plot_hmf_quijote_full(pdf=False):
plt.close()
def load_field(kind, nsim, grid, MAS, in_rsp=False):
def load_field(kind, nsim, grid, MAS, in_rsp=False, smooth_scale=None):
r"""
Load a single field.
Parameters
----------
kind : str
Field kind.
nsim : int
Simulation index.
grid : int
Grid size.
MAS : str
Mass assignment scheme.
in_rsp : bool, optional
Whether to load the field in redshift space.
Returns
-------
field : n-dimensional array
"""
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
return numpy.load(paths.field(kind, MAS, grid, nsim, in_rsp=in_rsp))
return numpy.load(paths.field(kind, MAS, grid, nsim, in_rsp=in_rsp,
smooth_scale=smooth_scale))
###############################################################################
@ -338,35 +279,8 @@ def load_field(kind, nsim, grid, MAS, in_rsp=False):
def plot_projected_field(kind, nsim, grid, in_rsp, smooth_scale, MAS="PCS",
vel_component=0, highres_only=True, slice_find=None,
pdf=False):
r"""
"""
Plot the mean projected field, however can also plot a single slice.
Parameters
----------
kind : str
Field kind.
nsim : int
Simulation index.
grid : int
Grid size.
in_rsp : bool
Whether to load the field in redshift space.
smooth_scale : float
Smoothing scale in :math:`\mathrm{Mpc} / h`.
MAS : str, optional
Mass assignment scheme.
vel_component : int, optional
Which velocity field component to plot.
highres_only : bool, optional
Whether to only plot the high-resolution region.
slice_find : float, optional
Which slice to plot in fractional units (i.e. 1. is the last slice)
pdf : bool, optional
Whether to save the figure as a PDF.
Returns
-------
None
"""
print(f"Plotting projected field `{kind}`. ", flush=True)
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
@ -383,7 +297,8 @@ def plot_projected_field(kind, nsim, grid, in_rsp, smooth_scale, MAS="PCS",
field = File(paths.borg_mcmc(nsim), 'r')
field = field["scalars"]["BORG_final_density"][...]
else:
field = load_field(kind, nsim, grid, MAS=MAS, in_rsp=in_rsp)
field = load_field(kind, nsim, grid, MAS=MAS, in_rsp=in_rsp,
smooth_scale=smooth_scale)
if kind == "velocity":
field = field[vel_component, ...]
@ -487,7 +402,6 @@ def plot_projected_field(kind, nsim, grid, in_rsp, smooth_scale, MAS="PCS",
else:
fig.colorbar(im, cax=cbar_ax, label=clabel)
fig.tight_layout(h_pad=0, w_pad=0)
for ext in ["png"] if pdf is False else ["png", "pdf"]:
fout = join(
@ -506,20 +420,9 @@ def plot_projected_field(kind, nsim, grid, in_rsp, smooth_scale, MAS="PCS",
###############################################################################
def get_sky_label(kind, volume_weight):
def get_sky_label(kind, volume_weight: bool):
"""
Get the sky label for a given field kind.
Parameters
----------
kind : str
Field kind.
volume_weight : bool
Whether to volume weight the field.
Returns
-------
label : str
"""
if volume_weight:
if kind == "density":
@ -667,14 +570,16 @@ if __name__ == "__main__":
plot_halos=5e13, volume_weight=True)
if True:
kind = "potential"
kind = "environment"
grid = 512
smooth_scale = 0
smooth_scale = 8.0
# plot_projected_field("overdensity", 7444, grid, in_rsp=True,
# highres_only=False)
# for in_rsp in [True, False]:
for in_rsp in [True, False]:
plot_projected_field(kind, 7444, grid, in_rsp=in_rsp,
# nsims = [7444 + n * 24 for n in range(101)]
nsim = 7444
for in_rsp in [False]:
plot_projected_field(kind, nsim, grid, in_rsp=in_rsp,
smooth_scale=smooth_scale, slice_find=0.5,
MAS="PCS", highres_only=True)

View file

@ -49,7 +49,7 @@ def plot_knn(runname):
print(f"Plotting kNN CDF for {runname}.")
cols = plt.rcParams["axes.prop_cycle"].by_key()["color"]
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
reader = csiborgtools.read.kNNCDFReader(paths)
reader = csiborgtools.summary.kNNCDFReader(paths)
with plt.style.context(plt_utils.mplstyle):
plt.figure()

View file

@ -64,7 +64,8 @@ def get_overlap_summary(nsim0, simname, min_logmass, smoothed):
cat0 = open_cat(nsim0, simname)
catxs = open_cats(nsimxs, simname)
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths, min_logmass)
reader = csiborgtools.summary.NPairsOverlap(cat0, catxs, paths,
min_logmass)
mass0 = reader.cat0(MASS_KINDS[simname])
mask = mass0 > 10**min_logmass
@ -85,7 +86,8 @@ def get_expected_mass(nsim0, simname, min_overlap, min_logmass, smoothed):
cat0 = open_cat(nsim0, simname)
catxs = open_cats(nsimxs, simname)
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths, min_logmass)
reader = csiborgtools.summary.NPairsOverlap(cat0, catxs, paths,
min_logmass)
mass0 = reader.cat0(MASS_KINDS[simname])
mask = mass0 > 10**min_logmass
mu, std = reader.counterpart_mass(
@ -115,7 +117,8 @@ def get_mtot_vs_all_pairoverlap(nsim0, simname, mass_kind, min_logmass,
cat0 = open_cat(nsim0, simname)
catxs = open_cats(nsimxs, simname)
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths, min_logmass)
reader = csiborgtools.summary.NPairsOverlap(cat0, catxs, paths,
min_logmass)
x = [None] * len(catxs)
y = [None] * len(catxs)
@ -193,7 +196,8 @@ def get_mtot_vs_maxpairoverlap(nsim0, simname, mass_kind, min_logmass,
return 0
return numpy.nanmax(y_)
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths, min_logmass)
reader = csiborgtools.summary.NPairsOverlap(cat0, catxs, paths,
min_logmass)
x = [None] * len(catxs)
y = [None] * len(catxs)
@ -266,7 +270,8 @@ def get_mtot_vs_maxpairoverlap_consistency(nsim0, simname, mass_kind,
cat0 = open_cat(nsim0, simname)
catxs = open_cats(nsimxs, simname)
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths, min_logmass)
reader = csiborgtools.summary.NPairsOverlap(cat0, catxs, paths,
min_logmass)
x = numpy.log10(cat0[mass_kind])
mask = x > min_logmass
@ -534,12 +539,12 @@ def get_mass_vs_separation(nsim0, nsimx, simname, min_logmass, boxsize,
cat0 = open_cat(nsim0, simname)
catx = open_cat(nsimx, simname)
reader = csiborgtools.read.PairOverlap(cat0, catx, paths, min_logmass)
reader = csiborgtools.summary.PairOverlap(cat0, catx, paths, min_logmass)
mass = numpy.log10(reader.cat0(MASS_KINDS[simname]))
dist = reader.dist(in_initial=False, boxsize=boxsize, norm_kind="r200c")
overlap = reader.overlap(smoothed)
dist = csiborgtools.read.weighted_stats(dist, overlap, min_weight=0)
dist = csiborgtools.summary.weighted_stats(dist, overlap, min_weight=0)
mask = numpy.isfinite(dist[:, 0])
mass = mass[mask]
@ -618,7 +623,7 @@ def get_mass_vs_max_overlap_separation(nsim0, nsimx, simname, min_logmass,
cat0 = open_cat(nsim0, simname)
catx = open_cat(nsimx, simname)
reader = csiborgtools.read.PairOverlap(cat0, catx, paths, min_logmass)
reader = csiborgtools.summary.PairOverlap(cat0, catx, paths, min_logmass)
mass = numpy.log10(reader.cat0(MASS_KINDS[simname]))
dist = reader.dist(in_initial=False, boxsize=boxsize, norm_kind="r200c")
@ -697,7 +702,8 @@ def get_property_maxoverlap(nsim0, simname, min_logmass, key, min_overlap,
cat0 = open_cat(nsim0, simname)
catxs = open_cats(nsimxs, simname)
reader = csiborgtools.read.NPairsOverlap(cat0, catxs, paths, min_logmass)
reader = csiborgtools.summary.NPairsOverlap(cat0, catxs, paths,
min_logmass)
mass0 = reader.cat0(MASS_KINDS[simname])
mask = mass0 > 10**min_logmass
@ -1044,7 +1050,7 @@ def matching_max_vs_overlap(simname, nsim0, min_logmass):
# None
# """
# paths = csiborgtools.read.Paths(**kwargs["paths_kind"])
# nn_reader = csiborgtools.read.NearestNeighbourReader(**kwargs, paths=paths)
# nn_reader = csiborgtools.summary.NearestNeighbourReader(**kwargs, paths=paths)
#
# xs, ys1, ys2, cs = [], [], [], []
# for run in runs: