Add galaxy sampling (#88)

* Improve calculations

* Improve flags

* Add smoothed options

* Remove some old comments

* Edit little things

* Save smoothed

* Move files

* Edit imports

* Edit imports

* Renaming imports

* Renaming imports

* Sort imports

* Sort files

* Sorting

* Optionally make copies of the field

* Add quijote backup check

* Add direct field smoothing

* Shorten stupid documentation

* Shorten stupid docs

* Update conversion

* Add particles to ASCII conversion

* Add a short comment

* Add SDSS uncorrected distance

* Adjust comment

* Add FITS index to galaxies

* Remove spare space

* Remove a stupid line

* Remove blank line

* Make space separated

* Add interpolated field path

* Add field sampling

* Sort imports

* Return density in cells

* Clear out observer velocity

* Add 170817 sampling

* Fix normalization

* Update plot
This commit is contained in:
Richard Stiskalek 2023-09-01 16:29:50 +01:00 committed by GitHub
parent 0af925e26a
commit eccd8e3507
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 610 additions and 365 deletions

View file

@ -111,7 +111,7 @@ class DensityField(BaseField):
Returns
-------
overdensity : 3-dimensional array of shape `(grid, grid, grid)`.
3-dimensional array of shape `(grid, grid, grid)`.
"""
delta /= delta.mean()
delta -= 1
@ -140,8 +140,7 @@ class DensityField(BaseField):
Returns
-------
rho : 3-dimensional array of shape `(grid, grid, grid)`.
Density field.
3-dimensional array of shape `(grid, grid, grid)`.
References
----------
@ -154,7 +153,9 @@ class DensityField(BaseField):
nparts = parts.shape[0]
batch_size = nparts // nbatch
start = 0
for __ in trange(nbatch + 1) if verbose else range(nbatch + 1):
for __ in trange(nbatch + 1, disable=not verbose,
desc="Loading particles for the density field"):
end = min(start + batch_size, nparts)
pos = parts[start:end]
pos, vel, mass = pos[:, :3], pos[:, 3:6], pos[:, 6]
@ -170,6 +171,10 @@ class DensityField(BaseField):
if end == nparts:
break
start = end
# Divide by the cell volume in (kpc / h)^3
rho /= (self.box.boxsize / grid * 1e3)**3
return rho
@ -216,8 +221,7 @@ class VelocityField(BaseField):
Returns
-------
radvel : 3-dimensional array of shape `(grid, grid, grid)`.
Radial velocity field.
3-dimensional array of shape `(grid, grid, grid)`.
"""
grid = rho_vel.shape[1]
radvel = numpy.zeros((grid, grid, grid), dtype=numpy.float32)
@ -261,8 +265,7 @@ class VelocityField(BaseField):
Returns
-------
rho_vel : 4-dimensional array of shape `(3, grid, grid, grid)`.
Velocity field along each axis.
4-dimensional array of shape `(3, grid, grid, grid)`.
References
----------
@ -341,7 +344,7 @@ class PotentialField(BaseField):
Returns
-------
potential : 3-dimensional array of shape `(grid, grid, grid)`.
3-dimensional array of shape `(grid, grid, grid)`.
"""
return MASL.potential(overdensity_field, self.box._omega_m,
self.box._aexp, self.MAS)
@ -383,19 +386,11 @@ class TidalTensorField(BaseField):
Returns
-------
eigvals : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
"""
grid = tidal_tensor.T00.shape[0]
eigvals = numpy.full((grid, grid, grid, 3), numpy.nan,
dtype=numpy.float32)
dummy_vector = numpy.full(3, numpy.nan, dtype=numpy.float32)
dummy_tensor = numpy.full((3, 3), numpy.nan, dtype=numpy.float32)
tidal_tensor_to_eigenvalues(eigvals, dummy_vector, dummy_tensor,
tidal_tensor.T00, tidal_tensor.T01,
tidal_tensor.T02, tidal_tensor.T11,
tidal_tensor.T12, tidal_tensor.T22)
return eigvals
return tidal_tensor_to_eigenvalues(
tidal_tensor.T00, tidal_tensor.T01, tidal_tensor.T02,
tidal_tensor.T11, tidal_tensor.T12, tidal_tensor.T22)
@staticmethod
def eigvals_to_environment(eigvals, threshold=0.0):
@ -410,14 +405,11 @@ class TidalTensorField(BaseField):
Returns
-------
environment : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
The environment of each grid cell. Possible values are 0 (void),
1 (sheet), 2 (filament), 3 (knot).
"""
environment = numpy.full(eigvals.shape[:-1], numpy.nan,
dtype=numpy.float32)
eigenvalues_to_environment(environment, eigvals, threshold)
return environment
return eigenvalues_to_environment(eigvals, threshold)
def __call__(self, overdensity_field):
"""
@ -430,7 +422,7 @@ class TidalTensorField(BaseField):
Returns
-------
tidal_tensor : :py:class:`MAS_library.tidal_tensor`
:py:class:`MAS_library.tidal_tensor`
Tidal tensor object, whose attributes `tidal_tensor.Tij` contain
the relevant tensor components.
"""
@ -439,38 +431,25 @@ class TidalTensorField(BaseField):
@jit(nopython=True)
def tidal_tensor_to_eigenvalues(eigvals, dummy_vector, dummy_tensor,
T00, T01, T02, T11, T12, T22):
def tidal_tensor_to_eigenvalues(T00, T01, T02, T11, T12, T22):
"""
Calculate eigenvalues of the tidal tensor field, sorted in decreasing
absolute value order. JIT implementation to speed up the work.
absolute value order.
Parameters
----------
eigvals : 3-dimensional array of shape `(grid, grid, grid)`
Array to store the eigenvalues.
dummy_vector : 1-dimensional array of shape `(3,)`
Dummy vector to store the eigenvalues.
dummy_tensor : 2-dimensional array of shape `(3, 3)`
Dummy tensor to store the tidal tensor.
T00 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{00}`.
T01 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{01}`.
T02 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{02}`.
T11 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{11}`.
T12 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{12}`.
T22 : 3-dimensional array of shape `(grid, grid, grid)`
Tidal tensor component :math:`T_{22}`.
T00, T01, T02, T11, T12, T22 : 3-dimensional array `(grid, grid, grid)`
Tidal tensor components.
Returns
-------
eigvals : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
"""
grid = T00.shape[0]
eigvals = numpy.full((grid, grid, grid, 3), numpy.nan, dtype=numpy.float32)
dummy_vector = numpy.full(3, numpy.nan, dtype=numpy.float32)
dummy_tensor = numpy.full((3, 3), numpy.nan, dtype=numpy.float32)
for i in range(grid):
for j in range(grid):
for k in range(grid):
@ -494,15 +473,13 @@ def tidal_tensor_to_eigenvalues(eigvals, dummy_vector, dummy_tensor,
@jit(nopython=True)
def eigenvalues_to_environment(environment, eigvals, th):
def eigenvalues_to_environment(eigvals, th):
"""
Classify the environment of each grid cell based on the eigenvalues of the
tidal tensor field.
Parameters
----------
environment : 3-dimensional array of shape `(grid, grid, grid)`
Array to store the environment.
eigvals : 4-dimensional array of shape `(grid, grid, grid, 3)`
The eigenvalues of the tidal tensor field.
th : float
@ -510,19 +487,21 @@ def eigenvalues_to_environment(environment, eigvals, th):
Returns
-------
environment : 3-dimensional array of shape `(grid, grid, grid)`
3-dimensional array of shape `(grid, grid, grid)`
"""
env = numpy.full(eigvals.shape[:-1], numpy.nan, dtype=numpy.float32)
grid = eigvals.shape[0]
for i in range(grid):
for j in range(grid):
for k in range(grid):
lmbda1, lmbda2, lmbda3 = eigvals[i, j, k, :]
if lmbda1 < th and lmbda2 < th and lmbda3 < th:
environment[i, j, k] = 0
env[i, j, k] = 0
elif lmbda1 < th and lmbda2 < th:
environment[i, j, k] = 1
env[i, j, k] = 1
elif lmbda1 < th:
environment[i, j, k] = 2
env[i, j, k] = 2
else:
environment[i, j, k] = 3
return environment
env[i, j, k] = 3
return env

View file

@ -18,13 +18,13 @@ Tools for interpolating 3D fields at arbitrary positions.
import MAS_library as MASL
import numpy
from numba import jit
from tqdm import trange
from tqdm import trange, tqdm
from .utils import force_single_precision
from .utils import force_single_precision, smoothen_field
from ..utils import periodic_wrap_grid, radec_to_cartesian
def evaluate_cartesian(*fields, pos, interp="CIC"):
def evaluate_cartesian(*fields, pos, smooth_scales=None, verbose=False):
"""
Evaluate a scalar field(s) at Cartesian coordinates `pos`.
@ -34,36 +34,49 @@ def evaluate_cartesian(*fields, pos, interp="CIC"):
Fields to be interpolated.
pos : 2-dimensional array of shape `(n_samples, 3)`
Query positions in box units.
interp : str, optional
Interpolation method, `NGP` or `CIC`.
smooth_scales : (list of) float, optional
Smoothing scales in box units. If `None`, no smoothing is performed.
verbose : bool, optional
Smoothing verbosity flag.
Returns
-------
interp_fields : (list of) 1-dimensional array of shape `(n_samples,).
(list of) 1-dimensional array of shape `(n_samples, len(smooth_scales))`
"""
assert interp in ["CIC", "NGP"]
boxsize = 1.
pos = force_single_precision(pos)
nsamples = pos.shape[0]
interp_fields = [numpy.full(nsamples, numpy.nan, dtype=numpy.float32)
if isinstance(smooth_scales, (int, float)):
smooth_scales = [smooth_scales]
if smooth_scales is None:
shape = (pos.shape[0],)
else:
shape = (pos.shape[0], len(smooth_scales))
interp_fields = [numpy.full(shape, numpy.nan, dtype=numpy.float32)
for __ in range(len(fields))]
if interp == "CIC":
for i, field in enumerate(fields):
MASL.CIC_interp(field, boxsize, pos, interp_fields[i])
else:
pos = numpy.floor(pos * fields[0].shape[0]).astype(numpy.int32)
for i, field in enumerate(fields):
for j in range(nsamples):
interp_fields[i][j] = field[pos[j, 0], pos[j, 1], pos[j, 2]]
for i, field in enumerate(fields):
if smooth_scales is None:
MASL.CIC_interp(field, 1., pos, interp_fields[i])
else:
desc = f"Smoothing and interpolating field {i + 1}/{len(fields)}"
iterator = tqdm(smooth_scales, desc=desc, disable=not verbose)
for j, scale in enumerate(iterator):
if not scale > 0:
fsmooth = numpy.copy(field)
else:
fsmooth = smoothen_field(field, scale, 1., make_copy=True)
MASL.CIC_interp(fsmooth, 1., pos, interp_fields[i][:, j])
if len(fields) == 1:
return interp_fields[0]
return interp_fields
def evaluate_sky(*fields, pos, box):
def evaluate_sky(*fields, pos, mpc2box, smooth_scales=None, verbose=False):
"""
Evaluate a scalar field(s) at radial distance `Mpc / h`, right ascensions
[0, 360) deg and declinations [-90, 90] deg.
@ -74,19 +87,33 @@ def evaluate_sky(*fields, pos, box):
Field to be interpolated.
pos : 2-dimensional array of shape `(n_samples, 3)`
Query spherical coordinates.
box : :py:class:`csiborgtools.read.CSiBORGBox`
The simulation box information and transformations.
mpc2box : float
Conversion factor to multiply the radial distance by to get box units.
smooth_scales : (list of) float, optional
Smoothing scales in `Mpc / h`. If `None`, no smoothing is performed.
verbose : bool, optional
Smoothing verbosity flag.
Returns
-------
interp_fields : (list of) 1-dimensional array of shape `(n_samples,).
(list of) 1-dimensional array of shape `(n_samples, len(smooth_scales))`
"""
pos = force_single_precision(pos)
pos[:, 0] = box.mpc2box(pos[:, 0])
pos[:, 0] *= mpc2box
cart_pos = radec_to_cartesian(pos) + 0.5
return evaluate_cartesian(*fields, pos=cart_pos)
if smooth_scales is not None:
if isinstance(smooth_scales, (int, float)):
smooth_scales = [smooth_scales]
if isinstance(smooth_scales, list):
smooth_scales = numpy.array(smooth_scales, dtype=numpy.float32)
smooth_scales *= mpc2box
return evaluate_cartesian(*fields, pos=cart_pos,
smooth_scales=smooth_scales, verbose=verbose)
def observer_vobs(velocity_field):
@ -142,6 +169,7 @@ def make_sky(field, angpos, dist, box, volume_weight=True, verbose=True):
# of distances. We pre-allocate arrays for speed.
dir_loop = numpy.full((dist.size, 3), numpy.nan, dtype=numpy.float32)
boxdist = box.mpc2box(dist)
boxsize = box.box2mpc(1.)
ndir = angpos.shape[0]
out = numpy.full(ndir, numpy.nan, dtype=numpy.float32)
for i in trange(ndir) if verbose else range(ndir):
@ -151,10 +179,10 @@ def make_sky(field, angpos, dist, box, volume_weight=True, verbose=True):
if volume_weight:
out[i] = numpy.sum(
boxdist**2
* evaluate_sky(field, pos=dir_loop, box=box, isdeg=True))
* evaluate_sky(field, pos=dir_loop, mpc2box=1 / boxsize))
else:
out[i] = numpy.sum(
evaluate_sky(field, pos=dir_loop, box=box, isdeg=True))
evaluate_sky(field, pos=dir_loop, mpc2box=1 / boxsize))
out *= dx
return out

View file

@ -29,12 +29,16 @@ def force_single_precision(x):
return x
def smoothen_field(field, smooth_scale, boxsize, threads=1):
def smoothen_field(field, smooth_scale, boxsize, threads=1, make_copy=False):
"""
Smooth a field with a Gaussian filter.
"""
W_k = SL.FT_filter(boxsize, smooth_scale, field.shape[0], "Gaussian",
threads)
if make_copy:
field = numpy.copy(field)
return SL.field_smoothing(field, W_k, threads)

View file

@ -13,17 +13,8 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from .box_units import CSiBORGBox, QuijoteBox # noqa
from .halo_cat import (CSiBORGHaloCatalogue, QuijoteHaloCatalogue, fiducial_observers) # noqa
from .knn_summary import kNNCDFReader # noqa
from .nearest_neighbour_summary import NearestNeighbourReader # noqa
from .obs import (SDSS, MCXCClusters, PlanckClusters, TwoMPPGalaxies, # noqa
TwoMPPGroups)
from .overlap_summary import weighted_stats # noqa
from .overlap_summary import (NPairsOverlap, PairOverlap, # noqa
binned_resample_mean, get_cross_sims) # noqa
from .halo_cat import CSiBORGHaloCatalogue, QuijoteHaloCatalogue, fiducial_observers # noqa
from .obs import SDSS, MCXCClusters, PlanckClusters, TwoMPPGalaxies, TwoMPPGroups # noqa
from .paths import Paths # noqa
from .pk_summary import PKReader # noqa
from .readsim import (MmainReader, CSiBORGReader, QuijoteReader, halfwidth_mask, # noqa
load_halo_particles) # noqa
from .tpcf_summary import TPCFReader # noqa
from .utils import (cols_to_structured, read_h5) # noqa
from .readsim import MmainReader, CSiBORGReader, QuijoteReader, halfwidth_mask, load_halo_particles # noqa
from .utils import cols_to_structured, read_h5 # noqa

View file

@ -22,7 +22,6 @@ from astropy.cosmology import LambdaCDM
from .readsim import CSiBORGReader, QuijoteReader
###############################################################################
# Base box #
###############################################################################

View file

@ -24,16 +24,15 @@ from itertools import product
from math import floor
import numpy
from readfof import FoF_catalog
from sklearn.neighbors import NearestNeighbors
from ..utils import (cartesian_to_radec, periodic_distance_two_points,
radec_to_cartesian, real2redshift)
from .box_units import CSiBORGBox, QuijoteBox
from .paths import Paths
from .readsim import CSiBORGReader
from .utils import add_columns, cols_to_structured, flip_cols
from ..utils import (periodic_distance_two_points, real2redshift,
cartesian_to_radec, radec_to_cartesian)
class BaseCatalogue(ABC):
@ -43,6 +42,8 @@ class BaseCatalogue(ABC):
_data = None
_paths = None
_nsim = None
_observer_location = None
_observer_velocity = None
@property
def nsim(self):
@ -51,7 +52,7 @@ class BaseCatalogue(ABC):
Returns
-------
nsim : int
int
"""
if self._nsim is None:
raise RuntimeError("`nsim` is not set!")
@ -70,7 +71,7 @@ class BaseCatalogue(ABC):
Returns
-------
nsnap : int
int
"""
pass
@ -81,7 +82,7 @@ class BaseCatalogue(ABC):
Returns
-------
simname : str
str
"""
pass
@ -92,7 +93,7 @@ class BaseCatalogue(ABC):
Returns
-------
paths : :py:class:`csiborgtools.read.Paths`
:py:class:`csiborgtools.read.Paths`
"""
if self._paths is None:
raise RuntimeError("`paths` is not set!")
@ -110,7 +111,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
structured array
"""
if self._data is None:
raise RuntimeError("`data` is not set!")
@ -123,7 +124,7 @@ class BaseCatalogue(ABC):
Returns
-------
box : instance of :py:class:`csiborgtools.units.BoxUnits`
instance of :py:class:`csiborgtools.units.BoxUnits`
"""
pass
@ -142,7 +143,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
structured array
"""
fits = numpy.load(paths.initmatch(self.nsim, simname, "fit"))
X, cols = [], []
@ -174,7 +175,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
structured array
"""
fits = numpy.load(paths.structfit(self.nsnap, self.nsim, simname))
@ -203,8 +204,7 @@ class BaseCatalogue(ABC):
Returns
-------
data : structured array
The filtered data based on the provided bounds.
structured array
"""
for key, (xmin, xmax) in bounds.items():
if key == "dist":
@ -229,7 +229,7 @@ class BaseCatalogue(ABC):
Returns
-------
obs_pos : 1-dimensional array of shape `(3,)`
1-dimensional array of shape `(3,)`
"""
if self._observer_location is None:
raise RuntimeError("`observer_location` is not set!")
@ -242,6 +242,25 @@ class BaseCatalogue(ABC):
assert obs_pos.shape == (3,)
self._observer_location = obs_pos
@property
def observer_velocity(self):
r"""
Velocity of the observer in units :math:`\mathrm{km} / \mathrm{s}`.
Returns
1-dimensional array of shape `(3,)`
"""
if self._observer_velocity is None:
raise RuntimeError("`observer_velocity` is not set!")
return self._observer_velocity
@observer_velocity.setter
def observer_velocity(self, obs_vel):
assert isinstance(obs_vel, (list, tuple, numpy.ndarray))
obs_vel = numpy.asanyarray(obs_vel)
assert obs_vel.shape == (3,)
self._observer_velocity = obs_vel
def position(self, in_initial=False, cartesian=True,
subtract_observer=False):
r"""
@ -261,8 +280,7 @@ class BaseCatalogue(ABC):
Returns
-------
pos : ndarray, shape `(nobjects, 3)`
Position components.
ndarray, shape `(nobjects, 3)`
"""
suffix = '0' if in_initial else ''
component_keys = [f"{comp}{suffix}" for comp in ('x', 'y', 'z')]
@ -285,7 +303,7 @@ class BaseCatalogue(ABC):
Returns
-------
radial_distance : 1-dimensional array of shape `(nobjects,)`
1-dimensional array of shape `(nobjects,)`
"""
pos = self.position(in_initial=in_initial, cartesian=True,
subtract_observer=True)
@ -301,7 +319,7 @@ class BaseCatalogue(ABC):
"""
return numpy.vstack([self["v{}".format(p)] for p in ("x", "y", "z")]).T
def redshift_space_position(self, cartesian=True, subtract_observer=False):
def redshift_space_position(self, cartesian=True):
"""
Calculates the position of objects in redshift space. Positions can be
returned in either Cartesian coordinates (default) or spherical
@ -318,14 +336,19 @@ class BaseCatalogue(ABC):
Returns
-------
pos : 2-dimensional array of shape `(nobjects, 3)`
Position of objects in the desired coordinate system.
2-dimensional array of shape `(nobjects, 3)`
"""
# Force subtraction of observer if not in Cartesian coordinates
subtract_observer = subtract_observer or not cartesian
if self.simname == "quijote":
raise NotImplementedError("Redshift space positions not "
"implemented for Quijote.")
rsp = real2redshift(self.position(cartesian=True), self.velocity(),
self.observer_location, self.box, make_copy=False)
return rsp if cartesian else cartesian_to_radec(rsp)
self.observer_location, self.observer_velocity,
self.box, make_copy=False)
if cartesian:
return rsp
return cartesian_to_radec(rsp - self.observer_location)
def angmomentum(self):
"""
@ -334,7 +357,7 @@ class BaseCatalogue(ABC):
Returns
-------
angmom : 2-dimensional array of shape `(nobjects, 3)`
2-dimensional array of shape `(nobjects, 3)`
"""
return numpy.vstack([self["L{}".format(p)] for p in ("x", "y", "z")]).T
@ -355,8 +378,7 @@ class BaseCatalogue(ABC):
Returns
-------
knn : :py:class:`sklearn.neighbors.NearestNeighbors`
kNN object fitted with object positions.
:py:class:`sklearn.neighbors.NearestNeighbors`
"""
if subtract_observer and periodic:
raise ValueError("Subtracting observer is not supported for "
@ -533,8 +555,6 @@ class CSiBORGHaloCatalogue(BaseCatalogue):
IC realisation index.
paths : py:class`csiborgtools.read.Paths`
Paths object.
observer_location : array, optional
Observer's location in :math:`\mathrm{Mpc} / h`.
bounds : dict
Parameter bounds; keys as names, values as (min, max) tuples. Use
`dist` for radial distance, `None` for no bound.
@ -544,14 +564,17 @@ class CSiBORGHaloCatalogue(BaseCatalogue):
Load initial positions.
with_lagpatch : bool, optional
Load halos with a resolved Lagrangian patch.
observer_velocity : 1-dimensional array, optional
Observer's velocity in :math:`\mathrm{km} / \mathrm{s}`.
"""
def __init__(self, nsim, paths, observer_location=[338.85, 338.85, 338.85],
bounds={"dist": (0, 155.5)},
load_fitted=True, load_initial=True, with_lagpatch=False):
def __init__(self, nsim, paths, bounds={"dist": (0, 155.5)},
load_fitted=True, load_initial=True, with_lagpatch=False,
observer_velocity=None):
self.nsim = nsim
self.paths = paths
self.observer_location = observer_location
self.observer_location = [338.85, 338.85, 338.85]
self.observer_velocity = observer_velocity
reader = CSiBORGReader(paths)
data = reader.read_fof_halos(self.nsim)
box = self.box
@ -695,7 +718,7 @@ class QuijoteHaloCatalogue(BaseCatalogue):
Returns
-------
nsnap : int
int
"""
return self._nsnap
@ -715,7 +738,7 @@ class QuijoteHaloCatalogue(BaseCatalogue):
Returns
-------
redshift : float
float
"""
return {4: 0.0, 3: 0.5, 2: 1.0, 1: 2.0, 0: 3.0}[self.nsnap]
@ -737,7 +760,7 @@ class QuijoteHaloCatalogue(BaseCatalogue):
Returns
-------
cat : instance of csiborgtools.read.QuijoteHaloCatalogue
instance of `csiborgtools.read.QuijoteHaloCatalogue`
"""
cat = deepcopy(self)
cat.observer_location = fiducial_observers(self.box.boxsize, rmax)[n]

View file

@ -23,6 +23,7 @@ import numpy
from astropy import units
from astropy.coordinates import SkyCoord
from astropy.io import fits
from astropy.cosmology import FlatLambdaCDM
from scipy import constants
from .utils import cols_to_structured
@ -143,7 +144,7 @@ class TwoMPPGroups(TextSurvey):
"""
name = "2M++_groups"
def __init__(self, fpath):
def __init__(self, fpath=None):
if fpath is None:
fpath = join("/mnt/extraspace/rstiskalek/catalogs",
"2M++_group_catalog.dat")
@ -323,7 +324,7 @@ class FitsSurvey(ABC):
-------
keys : list of str
"""
return self.routine_keys + self.fits_keys
return self.routine_keys + self.fits_keys + ["INDEX"]
def make_mask(self, steps):
"""
@ -356,20 +357,27 @@ class FitsSurvey(ABC):
return out
def __getitem__(self, key):
if key == "INDEX":
mask = self.selection_mask
if mask is None:
return numpy.arange(self.size)
else:
return numpy.arange(mask.size)[mask]
# Check duplicates
if key in self.routine_keys and key in self.fits_keys:
warn("Key `{}` found in both `routine_keys` and `fits_keys`. "
"Returning `routine_keys` value.".format(key), stacklevel=1)
warn(f"Key `{key}` found in both `routine_keys` and `fits_keys`. "
"Returning `routine_keys` value.")
if key in self.routine_keys:
func, args = self.routines[key]
out = func(*args)
elif key in self.fits_keys:
warn("Returning a FITS property. Be careful about little h!",
stacklevel=1)
warn(f"Returning a FITS property `{key}`. "
"Be careful about little h!")
out = self.get_fitsitem(key)
else:
raise KeyError("Unrecognised key `{}`.".format(key))
raise KeyError(f"Unrecognised key `{key}`.")
if self.selection_mask is None:
return out
@ -538,6 +546,7 @@ class MCXCClusters(FitsSurvey):
"""Get luminosity, puts back units to be in ergs/s."""
return self.get_fitsitem(key) * 1e44 * (self._hdata / self.h)**2
###############################################################################
# SDSS galaxies #
###############################################################################
@ -556,6 +565,8 @@ class SDSS(FitsSurvey):
h : float, optional
Little h. By default `h = 1`. The catalogue assumes this value.
The routine properties should take care of little h conversion.
Om0 : float, optional
Matter density. By default `Om0 = 0.3175`, matching CSiBORG.
sel_steps : py:function:
Steps to mask the survey. Expected to look for example like
```
@ -570,7 +581,7 @@ class SDSS(FitsSurvey):
"""
name = "SDSS"
def __init__(self, fpath=None, h=1, sel_steps=None):
def __init__(self, fpath=None, h=1, Om0=0.3175, sel_steps=None):
if fpath is None:
fpath = "/mnt/extraspace/rstiskalek/catalogs/nsa_v1_0_1.fits"
self._file = fits.open(fpath, memmap=False)
@ -603,6 +614,8 @@ class SDSS(FitsSurvey):
self.routines.update({key: val})
# Set DIST routine
self.routines.update({"DIST": (self._dist, ())})
self.routines.update(
{"DIST_UNCORRECTED": (self._dist_uncorrected, (Om0,))})
# Set MASS routines
for photo in self._photos:
key = "{}_MASS".format(photo)
@ -623,8 +636,11 @@ class SDSS(FitsSurvey):
@property
def size(self):
# Here pick some property that is in the catalogue..
return self.get_fitsitem("ZDIST").size
mask = self.selection_mask
if mask is not None:
return numpy.sum(mask)
else:
return self.get_fitsitem("ZDIST").size
def _absmag(self, photo, band):
"""
@ -674,6 +690,14 @@ class SDSS(FitsSurvey):
"""
return self.get_fitsitem("ZDIST") * constants.c * 1e-3 / (100 * self.h)
def _dist_uncorrected(self, Om0):
"""
Get the comoving distance estimate from `Z`, i.e. redshift uncorrected
for peculiar motion in the heliocentric frame.
"""
cosmo = FlatLambdaCDM(H0=100 * self.h, Om0=Om0)
return cosmo.comoving_distance(self.get_fitsitem("Z")).value
def _solmass(self, photo):
"""
Get solar mass of a given photometry. Converts little h.

View file

@ -561,9 +561,9 @@ class Paths:
return join(fdir, fname)
def field(self, kind, MAS, grid, nsim, in_rsp):
def field(self, kind, MAS, grid, nsim, in_rsp, smooth_scale=None):
r"""
Path to the files containing the calculated density fields in CSiBORG.
Path to the files containing the calculated fields in CSiBORG.
Parameters
----------
@ -578,6 +578,8 @@ class Paths:
IC realisation index.
in_rsp : bool
Whether the calculation is performed in redshift space.
smooth_scale : float, optional
Smoothing scale in Mpc/h.
Returns
-------
@ -593,6 +595,54 @@ class Paths:
kind = kind + "_rsp"
fname = f"{kind}_{MAS}_{str(nsim).zfill(5)}_grid{grid}.npy"
if smooth_scale is not None:
fname = fname.replace(".npy", f"_smooth{smooth_scale}.npy")
return join(fdir, fname)
def field_interpolated(self, survey, kind, MAS, grid, nsim, in_rsp,
smooth_scale=None):
"""
Path to the files containing the CSiBORG interpolated field for a given
survey.
Parameters
----------
survey : str
Survey name.
kind : str
Field type. Must be one of: `density`, `velocity`, `potential`,
`radvel`, `environment`.
MAS : str
Mass-assignment scheme.
grid : int
Grid size.
nsim : int
IC realisation index.
in_rsp : bool
Whether the calculation is performed in redshift space.
smooth_scale : float, optional
Smoothing scale in Mpc/h.
Returns
-------
str
"""
assert kind in ["density", "velocity", "potential", "radvel",
"environment"]
fdir = join(self.postdir, "environment_interpolated")
try_create_directory(fdir)
if in_rsp:
kind = kind + "_rsp"
fname = f"{survey}_{kind}_{MAS}_{str(nsim).zfill(5)}_grid{grid}.npz"
if smooth_scale is not None:
fname = fname.replace(".npz", f"_smooth{smooth_scale}.npz")
return join(fdir, fname)
def observer_peculiar_velocity(self, MAS, grid, nsim):

View file

@ -17,7 +17,6 @@ from os.path import isfile
import numpy
from h5py import File
###############################################################################
# Array manipulation #
###############################################################################

View file

@ -0,0 +1,21 @@
# Copyright (C) 2023 Richard Stiskalek
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from .knn_summary import kNNCDFReader # noqa
from .nearest_neighbour_summary import NearestNeighbourReader # noqa
from .overlap_summary import weighted_stats # noqa
from .overlap_summary import NPairsOverlap, PairOverlap, get_cross_sims # noqa
from .pk_summary import PKReader # noqa
from .tpcf_summary import TPCFReader # noqa

View file

@ -19,11 +19,10 @@ the final snapshot.
from math import floor
import numpy
from numba import jit
from scipy.integrate import cumulative_trapezoid, quad
from scipy.interpolate import interp1d
from scipy.stats import gaussian_kde, kstest
from numba import jit
from tqdm import tqdm

View file

@ -778,7 +778,7 @@ class NPairsOverlap:
Returns
-------
pairs : list of :py:class:`csiborgtools.read.PairOverlap`
pairs : list of :py:class:`csiborgtools.summary.PairOverlap`
"""
return self._pairs
@ -827,53 +827,3 @@ def get_cross_sims(simname, nsim0, paths, min_logmass, smoothed):
if isfile(f1) or isfile(f2):
nsimxs.append(nsimx)
return nsimxs
def binned_resample_mean(x, y, prob, bins, nresample=50, seed=42):
"""
Calculate binned average of `y` by MC resampling. Each point is kept with
probability `prob`.
Parameters
----------
x : 1-dimensional array
Independent variable.
y : 1-dimensional array
Dependent variable.
prob : 1-dimensional array
Sample probability.
bins : 1-dimensional array
Bin edges to bin `x`.
nresample : int, optional
Number of MC resamples. By default 50.
seed : int, optional
Random seed.
Returns
-------
bin_centres : 1-dimensional array
Bin centres.
stat : 2-dimensional array
Mean and its standard deviation from MC resampling.
"""
assert (x.ndim == 1) & (x.shape == y.shape == prob.shape)
gen = numpy.random.RandomState(seed)
loop_stat = numpy.full(nresample, numpy.nan) # Preallocate loop arr
stat = numpy.full((bins.size - 1, 2), numpy.nan) # Preallocate output
for i in range(bins.size - 1):
mask = (x > bins[i]) & (x <= bins[i + 1])
nsamples = numpy.sum(mask)
loop_stat[:] = numpy.nan # Clear it
for j in range(nresample):
loop_stat[j] = numpy.mean(y[mask][gen.rand(nsamples) < prob[mask]])
stat[i, 0] = numpy.mean(loop_stat)
stat[i, 1] = numpy.std(loop_stat)
bin_centres = (bins[1:] + bins[:-1]) / 2
return bin_centres, stat

View file

@ -16,8 +16,6 @@
import joblib
import numpy
from .paths import Paths
class TPCFReader:
"""
@ -45,7 +43,6 @@ class TPCFReader:
@paths.setter
def paths(self, paths):
assert isinstance(paths, Paths)
self._paths = paths
def read(self, run):