84 lines
No EOL
2.8 KiB
Python
84 lines
No EOL
2.8 KiB
Python
import borg_velocity.likelihood as likelihood
|
|
import borg_velocity.forwards as forwards
|
|
import borg_velocity.utils as utils
|
|
|
|
import aquila_borg as borg
|
|
import configparser
|
|
import h5py as h5
|
|
import numpy as np
|
|
from tqdm import tqdm
|
|
import glob
|
|
|
|
dirname = '/data101/bartlett/fsigma8/borg_velocity/basic_run_ic'
|
|
ini_file = f'{dirname}/basic_ini.ini'
|
|
|
|
def get_mcmc_steps(nframe, iter_max, iter_min=0):
|
|
"""
|
|
Obtain evenly-spaced sample of MCMC steps to make movie from
|
|
"""
|
|
all_mcmc = glob.glob(dirname + '/mcmc_*.h5')
|
|
x = [m[len(dirname + '/mcmc_'):-3] for m in all_mcmc]
|
|
all_mcmc = np.sort([int(m[len(dirname + '/mcmc_'):-3]) for m in all_mcmc])
|
|
if iter_max >= 0:
|
|
all_mcmc = all_mcmc[all_mcmc <= iter_max]
|
|
all_mcmc = all_mcmc[all_mcmc >= iter_min]
|
|
if nframe > 0:
|
|
max_out = max(all_mcmc)
|
|
min_out = min(all_mcmc)
|
|
step = max(int((max_out - min_out+1) / nframe), 1)
|
|
all_mcmc = all_mcmc[::step]
|
|
if max_out not in all_mcmc:
|
|
all_mcmc = np.concatenate([all_mcmc, [max_out]])
|
|
return all_mcmc
|
|
|
|
# Input box
|
|
box_in = borg.forward.BoxModel()
|
|
config = configparser.ConfigParser()
|
|
config.read(ini_file)
|
|
box_in.L = (float(config['system']['L0']), float(config['system']['L1']), float(config['system']['L2']))
|
|
box_in.N = (int(config['system']['N0']), int(config['system']['N1']), int(config['system']['N2']))
|
|
box_in.xmin = (float(config['system']['corner0']), float(config['system']['corner1']), float(config['system']['corner2']))
|
|
|
|
# Setup BORG forward model and likelihood
|
|
model = likelihood.build_gravity_model(None, box_in, ini_file=ini_file)
|
|
cosmo = utils.get_cosmopar(ini_file)
|
|
model.setCosmoParams(cosmo)
|
|
fwd_param = forwards.NullForward(box_in)
|
|
fwd_vel = likelihood.fwd_vel
|
|
mylike = likelihood.VelocityBORGLikelihood(model, fwd_param, fwd_vel, ini_file)
|
|
|
|
# Load s_hat from file
|
|
with h5.File(f'{dirname}/mock_data.h5', 'r') as f:
|
|
s_hat = f['scalars/s_hat_field'][:]
|
|
print(f['scalars/hmc_Elh'][:], f['scalars/hmc_Eprior'][:])
|
|
|
|
# Make mock data
|
|
state = borg.likelihood.MarkovState()
|
|
mylike.initializeLikelihood(state)
|
|
mylike.updateCosmology(cosmo)
|
|
mylike.generateMockData(s_hat, state)
|
|
|
|
print(dir(mylike))
|
|
quit()
|
|
|
|
# Evaluate likelihood
|
|
initial_logL = mylike.logLikelihoodComplex(s_hat, None)
|
|
print(initial_logL)
|
|
print(np.var(s_hat))
|
|
# print('var(s_hat): 1.0002235630494603, Call to logLike: 2945.19580078125')
|
|
|
|
# Load some MCMC samples and see how they compare
|
|
nframe = 10
|
|
iter_min = 0
|
|
iter_max = 1748
|
|
all_mcmc = get_mcmc_steps(nframe, iter_max, iter_min=0)
|
|
all_logL = np.zeros(len(all_mcmc))
|
|
for i in tqdm(range(len(all_mcmc))):
|
|
with h5.File(f'{dirname}/mcmc_{all_mcmc[i]}.h5', 'r') as f:
|
|
s_hat = f['scalars/s_hat_field'][:]
|
|
# print(f['scalars'].keys())
|
|
print(f['scalars/hmc_Elh'][:], f['scalars/hmc_Eprior'][:])
|
|
# all_logL[i] = mylike.logLikelihoodComplex(s_hat, None)
|
|
|
|
print(all_logL)
|
|
|