mirror of
https://github.com/Richard-Sti/csiborgtools.git
synced 2024-12-22 21:38:03 +00:00
4 MiB
4 MiB
In [12]:
import sys
import numpy as np
import h5py
import matplotlib.pyplot as plt
from numba import jit
import numpy
sys.path.append("../")
import csiborgtools
%matplotlib widget
%load_ext autoreload
%autoreload 2
In [13]:
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
# nsim0 = 7468
# nsimx = 7588
# nsnap0 = max(paths.get_snapshots(nsim0))
# nsnapx = max(paths.get_snapshots(nsimx))
# overlapper = csiborgtools.match.ParticleOverlap()
# partreader = csiborgtools.read.ParticleReader(paths)
In [58]:
quijote_cat = csiborgtools.read.QuijoteHaloCatalogue(0, paths, nsnap=4, minmass=None)
csiborg_cat = csiborgtools.read.HaloCatalogue(7444, paths, minmass=None)
In [15]:
csiborg_fof = np.genfromtxt("/mnt/extraspace/mhutt/csiborg_halo_catalogues/FOF_catalogues/FOF_catalogue_7444.dat")
In [60]:
quijote_cat.position().min()
Out[60]:
In [1]:
1000 / 155
Out[1]:
In [36]:
csiborg_fof[:, -2]
Out[36]:
In [48]:
plt.figure()
plt.hist(np.log10(csiborg_fof[:, -2] / 0.705), bins="auto", log=True, histtype="step", label='CSiBORG FoF')
plt.hist(np.log10(csiborg_cat["totpartmass"]), bins="auto", log=True, histtype="step", label="CSiBORG PHEW")
plt.hist(np.log10(quijote_cat["group_mass"]), bins="auto", histtype="step", label="Quijote FoF")
plt.legend()
plt.show()
In [ ]:
In [ ]:
In [51]:
Out[51]:
In [53]:
plt.figure()
plt.hist(dist / 0.705, bins="auto", histtype="step")
plt.hist(dist2, bins="auto", histtype="step")
plt.hist(np.linalg.norm(quijote_cat.position(), axis=1), bins="auto", histtype="step")
plt.show()
In [ ]:
In [9]:
X = np.array([0, 0, 0]).reshape(1, -1)
In [11]:
quijote_cat.radius_neigbours(X, 25, False)
Out[11]:
In [ ]:
In [ ]:
In [30]:
csiborg_cat.box.box2mpc(1) * 0.705
Out[30]:
In [31]:
np.sum(np.log10(csiborg_cat["M"]) > 12.38)
Out[31]:
In [34]:
csiborg_cat.keys
Out[34]:
In [46]:
plt.figure()
plt.hist(np.log10(quijote_cat["group_mass"] / 1.1), bins="auto", histtype="step")
plt.hist(np.log10(csiborg_cat["totpartmass"] /0.8), bins="auto", histtype="step")
plt.yscale("log")
plt.show()
In [ ]:
In [5]:
len(csiborg_cat)
Out[5]:
In [ ]:
In [ ]:
In [ ]:
In [11]:
snapdir = "/mnt/extraspace/mhutt/Quijote/0"
snapnum = 4
In [12]:
FoF = readfof.FoF_catalog(snapdir, snapnum, long_ids=False,
swap=False, SFR=False, read_IDs=False)
In [24]:
nsim0 = 7444
cat0 = csiborgtools.read.HaloCatalogue(nsim0, paths, load_initial=True, minmass=None, with_lagpatch=True, load_clumps_cat=True)
In [31]:
cat0.position()
Out[31]:
In [ ]:
In [22]:
clumpmap0 = csiborgtools.read.read_h5(paths.particles_path(nsim0))["clumpmap"]
parts0 = csiborgtools.read.read_h5(paths.initmatch_path(nsim0, "particles"))["particles"]
clid2map0 = {clid: i for i, clid in enumerate(clumpmap0[:, 0])}
# # clumpmapx = csiborgtools.read.read_h5(paths.particles_path(nsimx))["clumpmap"]
# # partsx = csiborgtools.read.read_h5(paths.initmatch_path(nsimx, "particles"))["particles"]
# # clid2mapx = {clid: i for i, clid in enumerate(clumpmapx[:, 0])}
cat0 = csiborgtools.read.HaloCatalogue(nsim0, paths, load_initial=True, minmass=None, with_lagpatch=True, load_clumps_cat=True)
catx = csiborgtools.read.HaloCatalogue(nsimx, paths, load_initial=True, minmass=None, with_lagpatch=True, load_clumps_cat=True)
In [4]:
f = np.load(paths.field_path("potential", "PCS", 256, 7444, False))
In [7]:
plt.figure()
plt.imshow(np.sum(f, axis=0))
plt.colorbar()
plt.show()
In [ ]:
In [ ]:
In [ ]:
In [7]:
f.shape
Out[7]:
In [16]:
plt.figure()
plt.imshow(np.log10(np.abs(np.mean(f[0, ...], axis=-1))))
plt.colorbar()
plt.savefig("../plots/eye.png", dpi=450)
plt.show()
In [ ]:
In [76]:
x = pair.dist(in_initial=True, norm_kind="sum_patch")
y = pair.overlap(False)
x = np.concatenate(x)
y = np.concatenate(y)
In [80]:
from scipy.special import comb
from itertools import combinations
In [81]:
ics = paths.get_ics()
In [88]:
import random
combs = list(combinations(ics, 2))
random.shuffle(combs)
In [91]:
random.Random(32).shuffle(combs)
In [92]:
combs
Out[92]:
In [ ]:
In [77]:
plt.figure()
plt.scatter(x, y, s=0.1)
plt.show()
In [ ]:
In [53]:
for i in list(clid2map0.keys())[50000:]:
print(i)
k = clid2map0[i]
X = csiborgtools.read.load_parent_particles(k, parts0, clumpmap0, clid2map0, cat0.clumps_cat)
if X is not None:
break
X
Out[53]:
In [54]:
plt.figure()
plt.scatter(X[:, 0], X[:, 1], s=1)
plt.show()
In [ ]:
In [5]:
parts = csiborgtools.read.read_h5(paths.particles_path(nsim0))["particles"]
In [6]:
box = csiborgtools.read.CSiBORGBox(nsnap0, nsim0, paths)
field = csiborgtools.field.DensityField(box, "CIC")
In [7]:
rho = np.load(paths.density_field_path("PCS", 7444, False))
rho_rsp = np.load(paths.density_field_path("PCS", 7444, True))
In [14]:
plt.figure()
plt.imshow(np.sum(rho, axis=1))
plt.show()
In [13]:
plt.figure()
plt.imshow(np.sum(rho_rsp, axis=1))
plt.show()
In [ ]:
In [25]:
np.abs(x - 1).max()
Out[25]:
In [ ]:
In [94]:
k0 = clumpmap0[clid2map0[0], 2] + 1
# parts.shape[0]
In [96]:
parts.shape[0] - k0
Out[96]:
In [44]:
clumps_cat = csiborgtools.read.ClumpsCatalogue(nsim0, paths, load_fitted=True, minmass=None, maxdist=155 / 0.705)
In [119]:
cat = csiborgtools.read.HaloCatalogue(nsim0, paths, minmass=None)
In [124]:
cat['ID']
Out[124]:
In [114]:
len(cat)
Out[114]:
In [118]:
numpy.isnan(cat["lagpatch"]).sum()
Out[118]:
In [ ]:
In [88]:
cat['x0']
Out[88]:
In [ ]:
In [46]:
len(clumps_cat)
Out[46]:
In [50]:
m = numpy.isfinite(clumps_cat["lambda200c"])
plt.figure()
plt.hist(np.log10(clumps_cat["lambda200c"][m]), bins="auto")
plt.show()
In [ ]:
In [ ]:
In [5]:
clumps_cat["index"][:10]
Out[5]:
In [6]:
X = csiborgtools.read.load_parent_particles(3, parts, clumpmap0, clid2map0, clumps_cat)
In [29]:
clump = csiborgtools.fits.Clump(X, clumps_cat[2], box)
clumps_cat[2]["index"]
Out[29]:
In [37]:
clump.spherical_overdensity_mass(200)
Out[37]:
In [26]:
def f(mass):
return numpy.cumsum(mass)
@jit(nopython=True)
def g(mass):
return numpy.cumsum(mass)
x = clump['M']
In [25]:
%timeit f(x)
In [28]:
%timeit g(x)
In [ ]:
In [19]:
%timeit clump.spherical_overdensity_mass(200)
In [14]:
plt.figure()
plt.plot(rs, y)
plt.yscale("log")
plt.xscale("log")
plt.show()
In [16]:
%timeit clump.r()
In [87]:
x, y, z = clump['x'], clump['y'], clump['z']
x0, y0, z0 = clumps_cat[3]['x'], clumps_cat[3]['y'], clumps_cat[3]['z']
In [93]:
%timeit ((x - x0)**2 + (y - y0)**2 + (z - z0)**2)**0.5
In [92]:
%timeit clump.r()
In [267]:
delta_bckg *= 2
In [260]:
%timeit delta1, delta2, cellmins, nonzero = overlapper.make_deltas(pos, pos2, mass, mass)
# __ = overlapper.make_deltas(pos, pos2, mass, mass)
In [271]:
%timeit overlapper(pos, pos2, mass, mass, delta_bckg)
In [259]:
fig, axs = plt.subplots(ncols=2, figsize=(10, 5))
axs[0].imshow(np.sum(delta1, axis=0))
axs[1].imshow(np.sum(delta2, axis=0))
fig.tight_layout()
In [93]:
delta = overlapper.make_bckg_delta(pos, mass / mass)
In [282]:
from functools import cache
from functools import lru_cache
In [283]:
from time import sleep
@lru_cache(maxsize=1024)
def f(x):
sleep(0.5)
return x * x
In [286]:
%timeit f(2)
In [77]:
clumpmap0[:]
Out[77]:
In [72]:
dens = overlapper.make_delta(pos, mass, subbox=True)
In [75]:
In [ ]:
In [ ]:
In [55]:
x.shape, x0.shape
Out[55]:
In [56]:
plt.figure()
# plt.scatter(x[:, 0], x[:, 1], s=0.1)
plt.scatter(x0[:, 0], x0[:, 1], s=0.1)
plt.show()
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [11]:
unique_clump_ids = numpy.unique(clump_ids)
In [22]:
unique_clump_ids[:100]
Out[22]:
In [26]:
%timeit numpy.nan == numpy.nan
In [27]:
%timeit numpy.isnan(numpy.nan)
In [29]:
test = {"asd": 1}
with h5py.File("test.hp", 'w') as f:
f.create_dataset("test", data=test)
In [ ]:
f
In [23]:
k0, kf = minmax_clump(259, clump_ids)
kf - k0 + 1
Out[23]:
In [24]:
clump_ids[k0:kf+1]
Out[24]:
In [ ]:
In [3]:
import numpy
nsim = nsim0
nsnap = nsnap0
part0 = h5py.File(paths.initmatch_path(nsim, "particles"), 'r')['particles']
cmap = h5py.File(paths.particles_path(nsim, "clumpmap"), 'r')
partf = h5py.File(paths.particles_path(nsim), 'r')["particles"]
In [8]:
part0[cmap[str(558655)], :]
In [5]:
cat = csiborgtools.read.ClumpsCatalogue(nsim, paths, rawdata=True)
In [44]:
from tqdm import tqdm
y = np.zeros((10000, 2), dtype=np.int32)
for i, hid in tqdm(enumerate(cat["index"][cat.ismain][:10000])):
n = np.sum(cat["parent"] == hid)
y[i, :] = hid, n
In [8]:
cat["index"][cat["parent"] == 558655]
Out[8]:
In [45]:
y[np.argmax(y[:, 1]), :]
Out[45]:
In [8]:
keys = list(cmap.keys())
In [11]:
csiborgtools.fits.load_parent_particles(558655, part0, cmap, cat)
# Xf = csiborgtools.fits.load_parent_particles(378, partf, cmap, cat)
In [6]:
plt.figure()
plt.scatter(X[:, 0], X[:, 1], s=0.1)
# plt.scatter(Xf[:, 0], Xf[:, 1], s=0.1)
plt.show()
In [ ]:
In [3]:
pid_final = np.load("pid_final.npy")
pid_init = np.load("pid_initial.npy")
In [4]:
pid_order = np.argsort(pid_final).astype(np.int32)
pid_order_2 = np.argsort(pid_order).astype(np.int32)
In [6]:
# Sort them
pid_init = pid_init[np.argsort(pid_init)]
In [11]:
np.alltrue(pid_init[pid_order_2] == pid_final)
Out[11]:
In [10]:
Out[10]:
In [ ]:
In [ ]:
In [ ]:
In [25]:
pid_final[pid_order][pid_order_2]
Out[25]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [18]:
part0['ID']
Out[18]:
In [ ]:
In [ ]:
In [8]:
indx1 = numpy.argsort(part0['ID'])
indx2 = numpy.argsort(pid)
indx1_inv = numpy.argsort(indx1)
part0 = part0[indx2][indx1_inv]
# pid = pid[indx2]
In [9]:
part0
Out[9]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [10]:
pid
Out[10]:
In [6]:
pid
Out[6]:
In [9]:
cat = csiborgtools.read.ClumpsCatalogue(nsim, paths, load_fitted=False, rawdata=True)
parent_ids = cat["index"][cat.ismain]
parent_ids[:50]
Out[9]:
In [54]:
i = cat["index"][np.argsort(cat["mass_cl"])[::-3][213]]
i
Out[54]:
In [55]:
# clid = 10170336
# clid = 10170336 + 12
clid = i
mmain_indxs = cat["index"][cat["parent"] == clid]
# mask = clump_ids == clid
# print(mask.sum())
print(mmain_indxs)
print([numpy.sum(clump_ids == j) for j in mmain_indxs])
In [56]:
mask = numpy.isin(clump_ids, mmain_indxs)
mask.sum()
Out[56]:
In [57]:
# coords = part0[mask, :]
plt.figure()
plt.scatter(part0['x'][mask], part0['y'][mask], s=1)
plt.show()
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [193]:
particles0 = h5py.File(paths.initmatch_path(nsim0, "particles"), 'r')['particles']
particlesx = h5py.File(paths.initmatch_path(nsimx, "particles") , 'r')['particles']
In [4]:
# delta_bckg = overlapper.make_bckg_delta(particles0, verbose=True)
# delta_bckg = overlapper.make_bckg_delta(particlesx, delta=delta_bckg, verbose=True)
# np.save("./bckg_{}_{}.npy".format(nsim0, nsimx), delta_bckg)
# delta_bckg = np.load("./bckg_{}_{}.npy".format(nsim0, nsimx))
In [194]:
map0 = h5py.File(paths.initmatch_path(nsim0, "halomap"), 'r')
mapx = h5py.File(paths.initmatch_path(nsimx, "halomap"), 'r')
In [174]:
keys = np.array(sorted([int(x) for x in mapx.keys()]))
In [175]:
paths = csiborgtools.read.Paths(**csiborgtools.paths_glamdring)
halocat = csiborgtools.read.HaloCatalogue(nsimx, paths, rawdata=True)
clumpcat = csiborgtools.read.ClumpsCatalogue(nsimx, paths, rawdata=True)
In [176]:
clid = 2
np.sum(clumpcat["parent"] == clid)
# mmain_indxs = clumpcat["index"][clumpcat["parent"] == clid]
# mmain_indxs
# mmain_mask = numpy.isin(clump_ids, mmain_indxs, assume_unique=True)
Out[176]:
In [195]:
fs = list(map0.keys())
In [196]:
from tqdm import tqdm
tot = 0
for f in tqdm(fs):
tot += map0[f].shape[0]
print(tot)
In [197]:
particles0
Out[197]:
In [198]:
tot / 182510395
Out[198]:
In [172]:
particlesx
Out[172]:
In [177]:
i = 10170336
k = np.where(cat["ID"] == i)[0][0]
k
Out[177]:
In [178]:
# i = np.random.choice(keys[keys<100000])
# # i = 10040155 #+ 11
print(i)
# i = 32
mask = map0[str(i)]
mask
Out[178]:
In [183]:
halo = particlesx[mask, :]
In [184]:
np.sum(halo[:, 3]) / 1.1641532e-10
Out[184]:
In [186]:
plt.figure()
plt.scatter(halo[:, 0], halo[:, 1], s=2)
# plt.scatter(cat['x'][k], cat['y'][k], s=10, c='r')
plt.xlabel(r"x")
plt.ylabel(r"y")
plt.show()
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [21]:
plt.figure()
plt.imshow(np.log10(np.sum(delta_bckg, axis=0)))
plt.show()
In [2]:
f = h5py.File("../data/particles_7444.h5", "r")
In [8]:
f["particles"][0, :]
Out[8]:
In [ ]: