Initial import
This commit is contained in:
commit
56a50eead3
820 changed files with 192077 additions and 0 deletions
94
extra/hades/src/generic_hades_bundle.hpp
Normal file
94
extra/hades/src/generic_hades_bundle.hpp
Normal file
|
@ -0,0 +1,94 @@
|
|||
#ifndef __TOOLS_GENERIC_HADES_BUNDLE_HPP
|
||||
#define __TOOLS_GENERIC_HADES_BUNDLE_HPP
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "libLSS/samplers/hades/hades_linear_likelihood.hpp"
|
||||
#include "libLSS/samplers/core/powerspec_tools.hpp"
|
||||
|
||||
#include "libLSS/physics/forward_model.hpp"
|
||||
|
||||
#include "libLSS/samplers/rgen/density_sampler.hpp"
|
||||
#include "libLSS/samplers/rgen/hmc/hmc_density_sampler.hpp"
|
||||
|
||||
#include "likelihood_info.hpp"
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
|
||||
|
||||
|
||||
namespace LibLSS {
|
||||
|
||||
namespace {
|
||||
|
||||
HMCOption::IntegratorScheme get_Scheme(const std::string &s) {
|
||||
std::string scheme = boost::to_upper_copy<std::string>(s);
|
||||
using namespace HMCOption;
|
||||
|
||||
if (scheme == "SI_2A" || scheme == "LEAP_FROG") {
|
||||
return SI_2A;
|
||||
} else if (scheme == "SI_2B") {
|
||||
return SI_2B;
|
||||
} else if (scheme == "SI_2C") {
|
||||
return SI_2C;
|
||||
} else if (scheme == "SI_3A") {
|
||||
return SI_3A;
|
||||
} else if (scheme == "SI_4B") {
|
||||
return SI_4B;
|
||||
} else if (scheme == "SI_4C") {
|
||||
return SI_4C;
|
||||
} else if (scheme == "SI_4D") {
|
||||
return SI_4D;
|
||||
} else if (scheme == "SI_6A") {
|
||||
return SI_6A;
|
||||
} else if (scheme == "CG_89") {
|
||||
return CG_89;
|
||||
} else {
|
||||
error_helper<ErrorBadState>(
|
||||
boost::format("Invalid integration scheme %s") % scheme);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
class DummyPowerSpectrum : public PowerSpectrumSampler_Base {
|
||||
public:
|
||||
DummyPowerSpectrum(MPI_Communication *comm)
|
||||
: PowerSpectrumSampler_Base(comm) {}
|
||||
|
||||
virtual void initialize(MarkovState &state) { initialize_base(state); }
|
||||
virtual void restore(MarkovState &state) { restore_base(state); }
|
||||
|
||||
virtual void sample(MarkovState &state) {}
|
||||
};
|
||||
|
||||
struct GenericHadesBundle {
|
||||
std::shared_ptr<HadesBaseDensityLikelihood> likelihood;
|
||||
std::shared_ptr<MarkovSampler> hades_meta;
|
||||
|
||||
virtual ~GenericHadesBundle() {}
|
||||
};
|
||||
|
||||
template <typename DensityLikelihood>
|
||||
struct HadesBundle : public GenericHadesBundle {
|
||||
typedef DensityLikelihood likelihood_t;
|
||||
typedef typename DensityLikelihood::grid_t grid_t;
|
||||
|
||||
std::shared_ptr<DensityLikelihood> hades_likelihood;
|
||||
|
||||
typedef typename grid_t::GridSizes GridSizes;
|
||||
typedef typename grid_t::GridLengths GridLengths;
|
||||
|
||||
HadesBundle(LikelihoodInfo &info)
|
||||
: hades_likelihood(std::make_shared<likelihood_t>(info)) {
|
||||
this->hades_meta = std::make_shared<HadesMetaSampler>(
|
||||
Likelihood::getMPI(info), hades_likelihood);
|
||||
this->likelihood = hades_likelihood;
|
||||
}
|
||||
|
||||
virtual ~HadesBundle() {}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
56
extra/hades/src/hades3.cpp
Normal file
56
extra/hades/src/hades3.cpp
Normal file
|
@ -0,0 +1,56 @@
|
|||
/*+
|
||||
ARES/HADES/BORG Package -- ./extra/hades/src/hades3.cpp
|
||||
Copyright (C) 2014-2020 Guilhem Lavaux <guilhem.lavaux@iap.fr>
|
||||
Copyright (C) 2009-2020 Jens Jasche <jens.jasche@fysik.su.se>
|
||||
|
||||
Additional contributions from:
|
||||
Guilhem Lavaux <guilhem.lavaux@iap.fr> (2023)
|
||||
|
||||
+*/
|
||||
#define SAMPLER_DATA_INIT "ares_init.hpp"
|
||||
#define SAMPLER_BUNDLE "hades_bundle.hpp"
|
||||
#define SAMPLER_BUNDLE_INIT "hades_bundle_init.hpp"
|
||||
#define SAMPLER_NAME "HADES3"
|
||||
#define SAMPLER_MOCK_GENERATOR "hades_mock_gen.hpp"
|
||||
#include "common/sampler_base.cpp"
|
||||
|
||||
#include "libLSS/tools/color_mod.hpp"
|
||||
using namespace LibLSS::Color;
|
||||
|
||||
namespace {
|
||||
|
||||
void init_splash() {
|
||||
|
||||
static string splash_str[] = {
|
||||
|
||||
" ",
|
||||
" /\\_/\\____, "
|
||||
"____________________________ ",
|
||||
" ,___/\\_/\\ \\ ~ / " +
|
||||
fg(RED, "HADES3", BRIGHT) + " ",
|
||||
" \\ ~ \\ ) XXX ",
|
||||
" XXX / /\\_/\\___, (c) Jens Jasche 2012 - 2020",
|
||||
" \\o-o/-o-o/ ~ / Guilhem Lavaux 2014 - 2020",
|
||||
" ) / \\ XXX "
|
||||
"____________________________ ",
|
||||
" _| / \\ \\_/ ",
|
||||
" ,-/ _ \\_/ \\ ",
|
||||
" / ( /____,__| ) ",
|
||||
" ( |_ ( ) \\) _| ",
|
||||
" _/ _) \\ \\__/ (_ ",
|
||||
" (,-(,(,(,/ \\,),),) "
|
||||
"",
|
||||
"Please acknowledge XXXX",
|
||||
};
|
||||
|
||||
static const int numSplashStr = sizeof(splash_str) / sizeof(splash_str[0]);
|
||||
|
||||
for (int i = 0; i < numSplashStr; i++)
|
||||
Console::instance().print<LOG_STD>(splash_str[i]);
|
||||
}
|
||||
|
||||
void close_splash() {}
|
||||
|
||||
RegisterStaticInit reg_splash(init_splash, close_splash, 12);
|
||||
|
||||
} // namespace
|
113
extra/hades/src/hades_bundle.hpp
Normal file
113
extra/hades/src/hades_bundle.hpp
Normal file
|
@ -0,0 +1,113 @@
|
|||
/*+
|
||||
ARES/HADES/BORG Package -- ./extra/hades/src/hades_bundle.hpp
|
||||
Copyright (C) 2014-2020 Guilhem Lavaux <guilhem.lavaux@iap.fr>
|
||||
Copyright (C) 2009-2020 Jens Jasche <jens.jasche@fysik.su.se>
|
||||
|
||||
Additional contributions from:
|
||||
Guilhem Lavaux <guilhem.lavaux@iap.fr> (2023)
|
||||
|
||||
+*/
|
||||
#ifndef _HADES_BUNDLE_HPP
|
||||
#define _HADES_BUNDLE_HPP
|
||||
#include "libLSS/samplers/hades/hades_linear_likelihood.hpp"
|
||||
#include "libLSS/samplers/core/powerspec_tools.hpp"
|
||||
#include "hades_option.hpp"
|
||||
#include "libLSS/samplers/ares/synthetic_selection.hpp"
|
||||
|
||||
#include "libLSS/physics/forward_model.hpp"
|
||||
#include "libLSS/physics/hades_log.hpp"
|
||||
#include "libLSS/physics/hades_pt.hpp"
|
||||
|
||||
#include "libLSS/samplers/rgen/density_sampler.hpp"
|
||||
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
# include "libLSS/borg_version.hpp"
|
||||
|
||||
# include "libLSS/physics/forwards/all_models.hpp"
|
||||
# include "libLSS/physics/modified_ngp.hpp"
|
||||
# include "libLSS/physics/modified_ngp_smooth.hpp"
|
||||
|
||||
# include "libLSS/samplers/borg/borg_poisson_likelihood.hpp"
|
||||
# include "libLSS/samplers/borg/borg_poisson_meta.hpp"
|
||||
|
||||
# include "libLSS/samplers/generic/generic_sigma8.hpp"
|
||||
|
||||
# ifdef HADES_SUPPORT_JULIA
|
||||
# include "libLSS/samplers/julia/julia_likelihood.hpp"
|
||||
# endif
|
||||
|
||||
# include "borg_generic_bundle.hpp"
|
||||
|
||||
# include "likelihood_info.hpp"
|
||||
|
||||
#endif
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include "generic_hades_bundle.hpp"
|
||||
|
||||
namespace LibLSS {
|
||||
|
||||
typedef HadesBundle<HadesLinearDensityLikelihood> LinearBundle;
|
||||
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
typedef HadesBundle<BorgPoissonLikelihood> PoissonBorgBundle;
|
||||
|
||||
# ifdef HADES_SUPPORT_JULIA
|
||||
struct JuliaBundle {
|
||||
JuliaDensitySampler borg_sampler;
|
||||
|
||||
JuliaBundle(
|
||||
MPI_Communication *comm, std::string code_path, std::string module_name)
|
||||
: borg_sampler(comm, code_path, module_name) {}
|
||||
};
|
||||
# endif
|
||||
|
||||
#endif
|
||||
|
||||
struct SamplerBundle {
|
||||
//BlockLoop foreground_block;
|
||||
typedef std::list<MarkovSampler *> SamplerList;
|
||||
std::function<MarkovSampler *(int, int)> foreground_sampler_generator;
|
||||
DummyPowerSpectrum dummy_ps;
|
||||
SamplerList foreground_samplers;
|
||||
MPI_Communication *comm;
|
||||
std::shared_ptr<GenericHadesBundle> hades_bundle;
|
||||
std::shared_ptr<GenericDensitySampler> density_mc;
|
||||
std::shared_ptr<MarkovSampler> borg_vobs, ap_sampler;
|
||||
std::unique_ptr<MarkovSampler> sigma8_sampler;
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
std::shared_ptr<VirtualGenericBundle> borg_generic;
|
||||
# ifdef HADES_SUPPORT_JULIA
|
||||
std::unique_ptr<JuliaBundle> borg_julia;
|
||||
# endif
|
||||
#endif
|
||||
BlockLoop foreground_block;
|
||||
SyntheticSelectionUpdater sel_updater;
|
||||
|
||||
SamplerBundle(MPI_Communication *comm) : comm(comm), dummy_ps(comm) {}
|
||||
|
||||
void newForeground(int catalog, int fgmap) {
|
||||
Console::instance().print<LOG_VERBOSE>("Adding new foreground sampler");
|
||||
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
MarkovSampler *fgsample = foreground_sampler_generator(catalog, fgmap);
|
||||
if (fgsample != 0) {
|
||||
foreground_samplers.push_back(fgsample);
|
||||
foreground_block << (*fgsample);
|
||||
}
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
~SamplerBundle() {
|
||||
LIBLSS_AUTO_CONTEXT(LOG_VERBOSE, ctx);
|
||||
for (SamplerList::iterator i = foreground_samplers.begin();
|
||||
i != foreground_samplers.end(); ++i) {
|
||||
delete (*i);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace LibLSS
|
||||
|
||||
#endif
|
477
extra/hades/src/hades_bundle_init.hpp
Normal file
477
extra/hades/src/hades_bundle_init.hpp
Normal file
|
@ -0,0 +1,477 @@
|
|||
/*+
|
||||
ARES/HADES/BORG Package -- ./extra/hades/src/hades_bundle_init.hpp
|
||||
Copyright (C) 2014-2020 Guilhem Lavaux <guilhem.lavaux@iap.fr>
|
||||
Copyright (C) 2009-2020 Jens Jasche <jens.jasche@fysik.su.se>
|
||||
|
||||
Additional contributions from:
|
||||
Guilhem Lavaux <guilhem.lavaux@iap.fr> (2023)
|
||||
|
||||
+*/
|
||||
#ifndef __HADES_BUNDLE_INIT_HPP
|
||||
#define __HADES_BUNDLE_INIT_HPP
|
||||
|
||||
#include "hades_bundle.hpp"
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
# include "libLSS/samplers/rgen/qnhmc/qnhmc_density_sampler.hpp"
|
||||
# include "libLSS/physics/bias/passthrough.hpp"
|
||||
# include "libLSS/samplers/altair/altair_meta_sampler.hpp"
|
||||
#endif
|
||||
#include "libLSS/samplers/core/generate_random_field.hpp"
|
||||
#include "libLSS/samplers/rgen/frozen/frozen_phase_density_sampler.hpp"
|
||||
#include "libLSS/samplers/generic/generic_sigma8_second.hpp"
|
||||
#include "libLSS/physics/likelihoods/eft.hpp"
|
||||
#include "setup_models.hpp"
|
||||
#include "libLSS/samplers/model_params.hpp"
|
||||
#include "libLSS/physics/haar.hpp"
|
||||
#include "libLSS/samplers/bias_model_params.hpp"
|
||||
|
||||
namespace LibLSS {
|
||||
|
||||
template <typename ptree>
|
||||
void sampler_bundle_init(
|
||||
MPI_Communication *mpi_world, ptree ¶ms, SamplerBundle &bundle,
|
||||
MainLoop &loop, bool resuming) {
|
||||
LIBLSS_AUTO_CONTEXT(LOG_INFO_SINGLE, ctx);
|
||||
using boost::format;
|
||||
using CosmoTool::square;
|
||||
auto system_params = params.get_child("system");
|
||||
auto block_loop_params = params.get_child_optional("block_loop");
|
||||
auto borg_params = params.get_child("gravity");
|
||||
|
||||
int hades_mixing = params.template get<int>("hades.mixing", 20);
|
||||
int bias_mixing = params.template get<int>("hades.bias_mixing", 10);
|
||||
std::string lh_type =
|
||||
params.template get<std::string>("hades.likelihood", "LINEAR");
|
||||
std::shared_ptr<MarkovSampler> nmean, bias;
|
||||
MarkovSampler *model_error = 0;
|
||||
MarkovState &state = loop.get_state();
|
||||
Console &cons = Console::instance();
|
||||
typedef GridDensityLikelihoodBase<3> grid_t;
|
||||
std::shared_ptr<grid_t> likelihood;
|
||||
|
||||
grid_t::GridSizes N = {
|
||||
size_t(state.getScalar<long>("N0")),
|
||||
size_t(state.getScalar<long>("N1")),
|
||||
size_t(state.getScalar<long>("N2"))};
|
||||
grid_t::GridLengths L = {
|
||||
state.getScalar<double>("L0"), state.getScalar<double>("L1"),
|
||||
state.getScalar<double>("L2")};
|
||||
grid_t::GridLengths corners = {
|
||||
state.getScalar<double>("corner0"), state.getScalar<double>("corner1"),
|
||||
state.getScalar<double>("corner2")};
|
||||
|
||||
LikelihoodInfo like_info;
|
||||
BorgModelElement *model = new BorgModelElement();
|
||||
loop.get_state().newElement("BORG_model", model);
|
||||
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
loop.get_state().newScalar("BORG_version", BORG_GIT_VERSION);
|
||||
#endif
|
||||
LibLSS_prepare::setupLikelihoodInfo(
|
||||
mpi_world, loop.get_state(), like_info, params, resuming);
|
||||
bool rsd = adapt<bool>(state, params, "gravity.do_rsd", true);
|
||||
|
||||
BoxModel box;
|
||||
box.xmin0 = state.getScalar<double>("corner0");
|
||||
box.xmin1 = state.getScalar<double>("corner1");
|
||||
box.xmin2 = state.getScalar<double>("corner2");
|
||||
box.L0 = L[0];
|
||||
box.L1 = L[1];
|
||||
box.L2 = L[2];
|
||||
box.N0 = N[0];
|
||||
box.N1 = N[1];
|
||||
box.N2 = N[2];
|
||||
|
||||
model->obj = buildModel(
|
||||
MPI_Communication::instance(), state, box, params, borg_params);
|
||||
|
||||
if (lh_type == "LINEAR") {
|
||||
bundle.hades_bundle = std::make_unique<LinearBundle>(like_info);
|
||||
likelihood = bundle.hades_bundle->likelihood;
|
||||
}
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
else if (lh_type == "BORG_POISSON") {
|
||||
auto poisson_bundle = std::make_unique<PoissonBorgBundle>(like_info);
|
||||
// Hack the bias sampler
|
||||
poisson_bundle->hades_meta =
|
||||
std::make_shared<BorgPoissonBiasSampler>(bundle.comm);
|
||||
nmean = std::make_shared<BorgPoissonNmeanSampler>(bundle.comm);
|
||||
bundle.hades_bundle = std::move(poisson_bundle);
|
||||
bundle.borg_vobs = std::make_unique<BorgPoissonVobsSampler>(bundle.comm);
|
||||
likelihood = bundle.hades_bundle->likelihood;
|
||||
} else {
|
||||
typedef std::shared_ptr<MarkovSampler> markov_ptr;
|
||||
std::map<
|
||||
std::string,
|
||||
std::function<std::shared_ptr<VirtualGenericBundle>(
|
||||
ptree &, std::shared_ptr<GridDensityLikelihoodBase<3>> &,
|
||||
markov_ptr &, markov_ptr &, markov_ptr &,
|
||||
std::function<MarkovSampler *(int, int)> &, LikelihoodInfo &)>>
|
||||
generic_map{
|
||||
{"GAUSSIAN_BROKEN_POWERLAW_BIAS",
|
||||
create_generic_bundle<
|
||||
AdaptBias_Gauss<bias::BrokenPowerLaw>, GaussianLikelihood,
|
||||
ptree &>},
|
||||
{"GAUSSIAN_MO_WHITE_BIAS",
|
||||
create_generic_bundle<
|
||||
AdaptBias_Gauss<bias::DoubleBrokenPowerLaw>,
|
||||
GaussianLikelihood, ptree &>},
|
||||
{"GAUSSIAN_POWERLAW_BIAS", create_generic_bundle<
|
||||
AdaptBias_Gauss<bias::PowerLaw>,
|
||||
GaussianLikelihood, ptree &>},
|
||||
{"POISSON",
|
||||
create_generic_bundle<
|
||||
bias::Passthrough, VoxelPoissonLikelihood, ptree &>},
|
||||
// FS: for now, disallow bundling of EFTBias to GaussianLikelihood
|
||||
// {"EFT_BIAS_WITH_THRESHOLDER",
|
||||
// create_generic_bundle<
|
||||
// bias::EFTBias<true>, GaussianLikelihood,
|
||||
// ptree &>},
|
||||
{"GENERIC_POISSON_POWERLAW_BIAS",
|
||||
create_generic_bundle<
|
||||
bias::PowerLaw, VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_POWERLAW_BIAS_DEGRADE4",
|
||||
create_generic_bundle<
|
||||
bias::Downgrader<bias::PowerLaw>, VoxelPoissonLikelihood,
|
||||
ptree &>},
|
||||
{"GENERIC_POISSON_BROKEN_POWERLAW_BIAS_DEGRADE4",
|
||||
create_generic_bundle<
|
||||
bias::Downgrader<bias::BrokenPowerLaw>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_BROKEN_POWERLAW_BIAS",
|
||||
create_generic_bundle<
|
||||
bias::BrokenPowerLaw, VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_GAUSSIAN_LINEAR_BIAS",
|
||||
create_generic_bundle<
|
||||
AdaptBias_Gauss<bias::LinearBias>, GaussianLikelihood,
|
||||
ptree &>},
|
||||
{"GENERIC_GAUSSIAN_MANY_POWER_1^1",
|
||||
create_generic_bundle<
|
||||
AdaptBias_Gauss<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1>>>,
|
||||
GaussianLikelihood, ptree &>},
|
||||
{"GENERIC_GAUSSIAN_MANY_POWER_1^2",
|
||||
create_generic_bundle<
|
||||
AdaptBias_Gauss<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1, 1>>>,
|
||||
GaussianLikelihood, ptree &>},
|
||||
{"GENERIC_GAUSSIAN_MANY_POWER_1^4",
|
||||
create_generic_bundle<
|
||||
AdaptBias_Gauss<bias::ManyPower<
|
||||
bias::ManyPowerLevels<double, 1, 1, 1, 1>>>,
|
||||
GaussianLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_POWER_LAW",
|
||||
create_generic_bundle<
|
||||
bias::PowerLaw, VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_MANY_POWER_1^1",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_MANY_POWER_1^2",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1, 1>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_MANY_POWER_2^2",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 2, 2>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_MANY_POWER_1^4",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1, 1, 1, 1>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_MANY_POWER_2^4",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 2, 2, 2, 2>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"ROBUST_POISSON_POWERLAW_BIAS",
|
||||
create_generic_bundle<
|
||||
bias::PowerLaw, RobustPoissonLikelihood, ptree &>},
|
||||
{"ROBUST_POISSON_BROKEN_POWERLAW_BIAS",
|
||||
create_generic_bundle<
|
||||
bias::BrokenPowerLaw, RobustPoissonLikelihood, ptree &>},
|
||||
{"ROBUST_POISSON_MANY_POWER_1^1",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1>>,
|
||||
RobustPoissonLikelihood, ptree &, true>},
|
||||
{"ROBUST_POISSON_MANY_POWER_1^2",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1, 1>>,
|
||||
RobustPoissonLikelihood, ptree &, true>},
|
||||
{"ROBUST_POISSON_MANY_POWER_2^2",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 2, 2>>,
|
||||
RobustPoissonLikelihood, ptree &, true>},
|
||||
{"GENERIC_POISSON_MANY_POWER_4^1",
|
||||
create_generic_bundle<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 4>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_MANY_POWER_1^2_DEGRADE2",
|
||||
create_generic_bundle<
|
||||
bias::Downgrader<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 1, 1>>,
|
||||
bias::DegradeGenerator<1, 1>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
{"GENERIC_POISSON_MANY_POWER_2^2_DEGRADE4",
|
||||
create_generic_bundle<
|
||||
bias::Downgrader<
|
||||
bias::ManyPower<bias::ManyPowerLevels<double, 2, 2>>,
|
||||
bias::DegradeGenerator<1, 1, 1>>,
|
||||
VoxelPoissonLikelihood, ptree &>},
|
||||
// FS: for now, disallow bundling of EFTBias to GaussianLikelihood
|
||||
// {"EFT_BIAS_GAUSS", create_generic_bundle<bias::EFTBiasDefault, GaussianLikelihood,ptree &>},
|
||||
{"EFT_BIAS_LIKE",
|
||||
create_generic_bundle<
|
||||
bias::EFTBiasDefault, EFTLikelihood, ptree &>}};
|
||||
|
||||
auto iter = generic_map.find(lh_type);
|
||||
if (iter != generic_map.end()) {
|
||||
bundle.borg_generic = iter->second(
|
||||
system_params, likelihood, nmean, bias, bundle.borg_vobs,
|
||||
bundle.foreground_sampler_generator, like_info);
|
||||
bundle.borg_vobs.reset();
|
||||
} else {
|
||||
error_helper<ErrorParams>(
|
||||
"Unknown Generic Hades likelihood " + lh_type);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!likelihood) {
|
||||
error_helper<ErrorParams>("Unknown Hades likelihood " + lh_type);
|
||||
}
|
||||
|
||||
if (!bias && bundle.hades_bundle && bundle.hades_bundle->hades_meta) {
|
||||
bias = bundle.hades_bundle->hades_meta;
|
||||
}
|
||||
|
||||
cons.print<LOG_STD>("Selected Hades likelihood: " + lh_type);
|
||||
|
||||
// Initialize foregrounds
|
||||
LibLSS_prepare::initForegrounds(
|
||||
mpi_world, state,
|
||||
[&bundle](int c, int a) { bundle.newForeground(c, a); }, params);
|
||||
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
bool sigma8block = adapt_optional<bool>(
|
||||
loop.get_state(), block_loop_params, "sigma8_sampler_blocked", true,
|
||||
DO_NOT_RESTORE);
|
||||
|
||||
if (!sigma8block) {
|
||||
ctx.print("Sampling sigma8");
|
||||
bundle.sigma8_sampler =
|
||||
std::make_unique<GenericSigma8SecondVariantSampler>(
|
||||
bundle.comm, likelihood, like_info);
|
||||
}
|
||||
#endif
|
||||
|
||||
std::string algorithm_name =
|
||||
params.template get<std::string>("hades.algorithm", "HMC");
|
||||
|
||||
if (algorithm_name == "HMC") {
|
||||
// -----------------------------------
|
||||
// HMC algorithm initialization
|
||||
|
||||
double maxEpsilon =
|
||||
params.template get<double>("hades.max_epsilon", 0.02);
|
||||
int maxTimeSteps = params.template get<int>("hades.max_timesteps", 100);
|
||||
std::string I_scheme_s =
|
||||
params.template get<std::string>("hades.scheme", "SI_2A");
|
||||
double kmax = params.template get<double>("hades.kmax", 0);
|
||||
HMCOption::IntegratorScheme I_scheme = get_Scheme(I_scheme_s);
|
||||
auto density_mc =
|
||||
std::make_unique<HMCDensitySampler>(mpi_world, likelihood, kmax);
|
||||
density_mc->setIntegratorScheme(I_scheme);
|
||||
density_mc->setMaxEpsilon(maxEpsilon);
|
||||
density_mc->setMaxTimeSteps(maxTimeSteps);
|
||||
if (auto phase_file =
|
||||
params.template get_optional<std::string>("hades.phases")) {
|
||||
// A file containing phases is providing. Schedule for loading.
|
||||
density_mc->setPhaseFile(
|
||||
*phase_file,
|
||||
params.template get<std::string>("hades.phasesDataKey"));
|
||||
}
|
||||
// HMC algorithm initialization - end
|
||||
// -----------------------------------
|
||||
if (params.template get("hades.haar", false)) {
|
||||
auto haar = std::make_shared<ForwardHaar>(bundle.comm, box, false);
|
||||
auto inverse_haar =
|
||||
std::make_shared<ForwardHaar>(bundle.comm, box, true);
|
||||
density_mc->setTransforms(haar, inverse_haar);
|
||||
}
|
||||
bundle.density_mc = std::move(density_mc);
|
||||
} else if (algorithm_name == "FROZEN-PHASE") {
|
||||
auto density_mc =
|
||||
std::make_unique<FrozenPhaseDensitySampler>(mpi_world, likelihood);
|
||||
|
||||
if (auto phase_file =
|
||||
params.template get_optional<std::string>("hades.phases")) {
|
||||
// A file containing phases is providing. Schedule for loading.
|
||||
density_mc->setPhaseFile(
|
||||
*phase_file,
|
||||
params.template get<std::string>("hades.phasesDataKey"));
|
||||
} else {
|
||||
if (!params.template get<bool>("hades.noPhasesProvided"))
|
||||
error_helper<ErrorParams>("If no phases are provided, "
|
||||
"noPhasesProvided must be set to true.");
|
||||
}
|
||||
bundle.density_mc = std::move(density_mc);
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
} else if (algorithm_name == "QN-HMC") {
|
||||
double maxEpsilon =
|
||||
params.template get<double>("hades.max_epsilon", 0.02);
|
||||
int maxTimeSteps = params.template get<int>("hades.max_timesteps", 100);
|
||||
std::string I_scheme_s =
|
||||
params.template get<std::string>("hades.scheme", "SI_2A");
|
||||
HMCOption::IntegratorScheme I_scheme = get_Scheme(I_scheme_s);
|
||||
auto density_mc =
|
||||
std::make_unique<QNHMCDensitySampler>(mpi_world, likelihood);
|
||||
density_mc->setIntegratorScheme(I_scheme);
|
||||
density_mc->setMaxEpsilon(maxEpsilon);
|
||||
density_mc->setMaxTimeSteps(maxTimeSteps);
|
||||
bundle.density_mc = std::move(density_mc);
|
||||
#endif
|
||||
} else {
|
||||
error_helper<ErrorBadState>(
|
||||
"Invalid algorithm name: " + algorithm_name +
|
||||
" (choice is HMC, FROZEN-PHASE or QN-HMC)");
|
||||
}
|
||||
|
||||
bool hblock = adapt_optional<bool>(
|
||||
loop.get_state(), block_loop_params, "hades_sampler_blocked", false,
|
||||
DO_NOT_RESTORE);
|
||||
adapt_optional<bool>(
|
||||
loop.get_state(), block_loop_params, "bias_sampler_blocked", false,
|
||||
DO_NOT_RESTORE);
|
||||
adapt_optional<bool>(
|
||||
loop.get_state(), block_loop_params, "nmean_sampler_blocked", false,
|
||||
DO_NOT_RESTORE);
|
||||
|
||||
Console::instance().print<LOG_INFO_SINGLE>(
|
||||
format("Hades mixing per mcmc step is %d") % hades_mixing);
|
||||
Console::instance().print<LOG_INFO_SINGLE>(
|
||||
format("Hades density is blocked: %s") % (hblock ? "YES" : "NO"));
|
||||
|
||||
//loop << bundle.dummy_ps;
|
||||
loop << bundle.sel_updater;
|
||||
|
||||
// ==================
|
||||
// MAIN LOOP PROGRAM
|
||||
{
|
||||
auto bias_loop = new BlockLoop(bias_mixing);
|
||||
if (nmean &&
|
||||
loop.get_state().getScalar<bool>("nmean_sampler_blocked") == false)
|
||||
*bias_loop << nmean;
|
||||
if (bias)
|
||||
*bias_loop << bias;
|
||||
|
||||
loop
|
||||
<< (BlockLoop(hades_mixing)
|
||||
<< *bundle.density_mc << *bias_loop
|
||||
<< (BlockLoop(10) << bundle.foreground_block));
|
||||
delete bias_loop;
|
||||
}
|
||||
|
||||
if (model_error != 0)
|
||||
loop << *model_error;
|
||||
|
||||
// Only do observer vobs sampling if we know how to sample vobs and rsd is
|
||||
// activated.
|
||||
if (rsd && bundle.borg_vobs)
|
||||
loop << *bundle.borg_vobs;
|
||||
|
||||
#ifdef HADES_SUPPORT_BORG
|
||||
// If active, sample sigma8
|
||||
if (bundle.sigma8_sampler != 0)
|
||||
loop << *bundle.sigma8_sampler;
|
||||
if (params.template get<bool>("hades.altair", false)) {
|
||||
CosmologicalParameters bound_min, bound_max;
|
||||
bound_min.w = -1.5;
|
||||
bound_max.w = -0.5;
|
||||
bound_min.wprime = -1.5;
|
||||
bound_max.wprime = 1.5;
|
||||
bound_min.omega_m = 0.1;
|
||||
bound_max.omega_m = 0.9;
|
||||
bundle.ap_sampler = std::make_shared<AltairMetaSampler>(
|
||||
mpi_world, likelihood, model->obj, bound_min, bound_max, 0.01);
|
||||
loop << *bundle.ap_sampler;
|
||||
}
|
||||
#endif
|
||||
{
|
||||
auto model_param_list = params.template get_optional<std::string>(
|
||||
"hades.model_params_to_set");
|
||||
if (model_param_list) {
|
||||
auto params_list =
|
||||
string_as_vector<std::string>(*model_param_list, ",");
|
||||
ModelDictionnary param_map;
|
||||
for (auto const &p : params_list) {
|
||||
auto equal = p.find("=");
|
||||
double value = 0.1;
|
||||
std::string name = p;
|
||||
if (equal != std::string::npos) {
|
||||
value = boost::lexical_cast<double>(p.substr(equal + 1));
|
||||
name = p.substr(0, equal);
|
||||
}
|
||||
param_map[name] = value;
|
||||
}
|
||||
model->obj->setModelParams(param_map);
|
||||
}
|
||||
}
|
||||
auto model_param_list = params.template get_optional<std::string>(
|
||||
"hades.model_params_to_sample");
|
||||
if (model_param_list) {
|
||||
auto params_list = string_as_vector<std::string>(*model_param_list, ",");
|
||||
std::vector<std::string> params_list2;
|
||||
|
||||
ModelDictionnary param_map;
|
||||
for (auto const &p : params_list) {
|
||||
if (p.find("cosmology.", 0) == 0)
|
||||
continue;
|
||||
auto equal = p.find("=");
|
||||
double value = 0.1;
|
||||
std::string name = p;
|
||||
if (equal != std::string::npos) {
|
||||
value = boost::lexical_cast<double>(p.substr(equal + 1));
|
||||
name = p.substr(0, equal);
|
||||
}
|
||||
param_map[name] = value;
|
||||
params_list2.push_back(name);
|
||||
}
|
||||
loop << std::shared_ptr<MarkovSampler>(new ModelParamsSampler(
|
||||
bundle.comm, "", params_list2, likelihood, model->obj, param_map));
|
||||
}
|
||||
auto model_bias = params.template get_optional<int>("hades.model_bias");
|
||||
if (model_bias && *model_bias > 0) {
|
||||
loop << std::shared_ptr<MarkovSampler>(new BiasModelParamsSampler(
|
||||
bundle.comm, likelihood, model->obj, *model_bias, ""));
|
||||
}
|
||||
} // namespace LibLSS
|
||||
|
||||
template <typename ptree>
|
||||
void
|
||||
sampler_setup_ic(SamplerBundle &bundle, MainLoop &loop, ptree const ¶ms) {
|
||||
MarkovState &state = loop.get_state();
|
||||
double initialRandomScaling =
|
||||
params.template get<double>("mcmc.init_random_scaling", 0.1);
|
||||
|
||||
bool random_ic = params.template get<bool>("mcmc.random_ic", true);
|
||||
if (random_ic)
|
||||
generateRandomField(bundle.comm, state);
|
||||
state.get<CArrayType>("s_hat_field")->eigen() *= initialRandomScaling;
|
||||
state.get<ArrayType>("s_field")->eigen() *= initialRandomScaling;
|
||||
|
||||
bool scramble_bias = params.template get<bool>("mcmc.scramble_bias", false);
|
||||
if (scramble_bias) {
|
||||
int Ncat = state.getScalar<long>("NCAT");
|
||||
for (int i = 0; i < Ncat; i++) {
|
||||
auto &a = *(state.formatGet<ArrayType1d>("galaxy_bias_%d", i)->array);
|
||||
fwrap(a) = 0.01;
|
||||
a[0] = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void sampler_bundle_cleanup() {}
|
||||
|
||||
} // namespace LibLSS
|
||||
|
||||
#endif
|
47
extra/hades/src/hades_mock_gen.hpp
Normal file
47
extra/hades/src/hades_mock_gen.hpp
Normal file
|
@ -0,0 +1,47 @@
|
|||
/*+
|
||||
ARES/HADES/BORG Package -- ./extra/hades/src/hades_mock_gen.hpp
|
||||
Copyright (C) 2014-2020 Guilhem Lavaux <guilhem.lavaux@iap.fr>
|
||||
Copyright (C) 2009-2020 Jens Jasche <jens.jasche@fysik.su.se>
|
||||
|
||||
Additional contributions from:
|
||||
Guilhem Lavaux <guilhem.lavaux@iap.fr> (2023)
|
||||
|
||||
+*/
|
||||
#ifndef __HADES_MOCK_GEN_HPP
|
||||
#define __HADES_MOCK_GEN_HPP
|
||||
|
||||
#include <CosmoTool/algo.hpp>
|
||||
#include <cmath>
|
||||
|
||||
namespace LibLSS {
|
||||
|
||||
template <typename PTree>
|
||||
void prepareMockData(
|
||||
PTree &ptree, MPI_Communication *comm, MarkovState &state,
|
||||
CosmologicalParameters &cosmo_params, SamplerBundle &bundle) {
|
||||
ConsoleContext<LOG_INFO_SINGLE> ctx("prepareMockData");
|
||||
using boost::format;
|
||||
using CosmoTool::square;
|
||||
|
||||
double Rsmooth = ptree.template get<double>("system.hades_smoothing", 1.0);
|
||||
// createCosmologicalPowerSpectrum(state, cosmo_params);
|
||||
|
||||
bundle.sel_updater.sample(state);
|
||||
bundle.density_mc->generateMockData(state);
|
||||
|
||||
{
|
||||
std::shared_ptr<H5::H5File> f;
|
||||
|
||||
if (comm->rank() == 0) {
|
||||
f = std::make_shared<H5::H5File>("mock_data.h5", H5F_ACC_TRUNC);
|
||||
}
|
||||
state.mpiSaveState(f, comm, false);
|
||||
}
|
||||
|
||||
// bundle.hmc->generateRandomField(state);
|
||||
// state.get<CArrayType>("s_hat_field")->eigen() *= 0.02;
|
||||
// state.get<ArrayType>("s_field")->eigen() *= 0.02;
|
||||
}
|
||||
} // namespace LibLSS
|
||||
|
||||
#endif
|
216
extra/hades/src/likelihood_info.cpp
Normal file
216
extra/hades/src/likelihood_info.cpp
Normal file
|
@ -0,0 +1,216 @@
|
|||
/*+
|
||||
ARES/HADES/BORG Package -- ./extra/hades/src/likelihood_info.cpp
|
||||
Copyright (C) 2018 Natalia Porqueres <natalia_porqueres@hotmail.com>
|
||||
Copyright (C) 2018 Doogesh Kodi Ramanah <ramanah@iap.fr>
|
||||
Copyright (C) 2018 Guilhem Lavaux <guilhem.lavaux@iap.fr>
|
||||
Copyright (C) 2018 Jens Jasche <jens.jasche@fysik.su.se>
|
||||
|
||||
Additional contributions from:
|
||||
Guilhem Lavaux <guilhem.lavaux@iap.fr> (2023)
|
||||
|
||||
+*/
|
||||
#include <string>
|
||||
#include "libLSS/tools/console.hpp"
|
||||
#include "common/preparation_types.hpp"
|
||||
#include "common/preparation_tools.hpp"
|
||||
#include "likelihood_info.hpp"
|
||||
#include "libLSS/physics/likelihoods/base.hpp"
|
||||
#include "libLSS/data/integer_window3d.hpp"
|
||||
#include <healpix_cxx/pointing.h>
|
||||
#include <healpix_cxx/healpix_map.h>
|
||||
#include <healpix_cxx/healpix_map_fitsio.h>
|
||||
#include <H5Cpp.h>
|
||||
#include <CosmoTool/hdf5_array.hpp>
|
||||
#include "libLSS/tools/fusewrapper.hpp"
|
||||
#include "libLSS/tools/fuse/healpix.hpp"
|
||||
|
||||
using namespace LibLSS;
|
||||
|
||||
namespace {
|
||||
class ColorAdaptor {
|
||||
|
||||
public:
|
||||
Healpix_Map<double> sky;
|
||||
double rmin, rmax;
|
||||
int slices;
|
||||
unsigned int max_color;
|
||||
// Use concepts of sky selection
|
||||
|
||||
ColorAdaptor(double rmin_, double rmax_, int slices_)
|
||||
: rmin(rmin_), rmax(rmax_), slices(slices_) {}
|
||||
|
||||
void loadSky(const std::string &fname) {
|
||||
read_Healpix_map_from_fits(fname, sky);
|
||||
for (long n = 0; n < sky.Npix(); n++) {
|
||||
if (std::isnan(sky[n]))
|
||||
sky[n] = 0;
|
||||
}
|
||||
max_color = fwrap(sky).max() + 1;
|
||||
}
|
||||
|
||||
unsigned int get_sky_completeness(double x, double y, double z) const {
|
||||
return sky[sky.vec2pix(vec3(x, y, z))];
|
||||
}
|
||||
|
||||
int getNumRadial() const { return 0; }
|
||||
|
||||
unsigned int getRadialSelection(double r, int) const {
|
||||
int slice = std::floor(slices * (r - rmin) / (rmax - rmin));
|
||||
if ((slice < 0) or (slice > slices)) {
|
||||
Console::instance().format<LOG_ERROR>(
|
||||
"Slice is %d (max=%d) for r=%g", slice, slices, r);
|
||||
error_helper<ErrorParams>("Invalid radial position.");
|
||||
}
|
||||
return max_color * (unsigned int)(slice);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
void LibLSS_prepare::setupLikelihoodInfo(
|
||||
MPI_Communication *comm, LibLSS::MarkovState &state, LikelihoodInfo &info,
|
||||
LibLSS_prepare::ptree ¶ms, bool resuming) {
|
||||
ConsoleContext<LOG_DEBUG> ctx("setupLikelihoodInfo");
|
||||
|
||||
Likelihood::GridLengths gridLength(boost::extents[6]);
|
||||
gridLength[0] = state.getScalar<double>("corner0");
|
||||
gridLength[1] = gridLength[0] + state.getScalar<double>("L0");
|
||||
gridLength[2] = state.getScalar<double>("corner1");
|
||||
gridLength[3] = gridLength[2] + state.getScalar<double>("L1");
|
||||
gridLength[4] = state.getScalar<double>("corner2");
|
||||
gridLength[5] = gridLength[4] + state.getScalar<double>("L2");
|
||||
|
||||
info[Likelihood::GRID_LENGTH] = gridLength;
|
||||
|
||||
Likelihood::GridSize grid(boost::extents[3]);
|
||||
state.getScalarArray<long, 3>("N", grid);
|
||||
info[Likelihood::GRID] = grid;
|
||||
state.getScalarArray<long, 3>("Ndata", grid);
|
||||
info[Likelihood::DATA_GRID] = grid;
|
||||
|
||||
info[Likelihood::MPI] = comm;
|
||||
|
||||
auto like_params = params.get_child_optional("likelihood");
|
||||
if (!like_params) {
|
||||
ctx.print2<LOG_WARNING>("No [likelihood] section in params tree");
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.print("Inspecting likelihood options");
|
||||
if (auto eft_lambda =
|
||||
like_params->template get_optional<double>("EFT_Lambda")) {
|
||||
info["EFT_Lambda"] = *eft_lambda;
|
||||
}
|
||||
|
||||
if (auto manypower_prior =
|
||||
like_params->template get_optional<double>("ManyPower_prior_width")) {
|
||||
info["ManyPower_prior_width"] = *manypower_prior;
|
||||
}
|
||||
|
||||
// === sigma8 sampler-specific part ===
|
||||
if (auto val =
|
||||
like_params->template get_optional<double>("sigma8_step")) {
|
||||
info["sigma8_step"] = *val;
|
||||
}
|
||||
if (auto val =
|
||||
like_params->template get_optional<double>("sigma8_min")) {
|
||||
info["sigma8_min"] = *val;
|
||||
}
|
||||
if (auto val =
|
||||
like_params->template get_optional<double>("sigma8_max")) {
|
||||
info["sigma8_max"] = *val;
|
||||
}
|
||||
|
||||
// ==================================
|
||||
|
||||
if (auto robust_map =
|
||||
like_params->template get_optional<std::string>("colormap_3d")) {
|
||||
|
||||
ctx.print2<LOG_INFO_SINGLE>("Robust MAP provided: " + (*robust_map));
|
||||
H5::H5File f(*robust_map, H5F_ACC_RDONLY);
|
||||
|
||||
long N0 = state.getScalar<long>("N0");
|
||||
long N1 = state.getScalar<long>("N1");
|
||||
long N2 = state.getScalar<long>("N2");
|
||||
|
||||
auto cmap_like = std::shared_ptr<boost::multi_array<long, 3>>(
|
||||
new boost::multi_array<long, 3>(boost::extents[N0][N1][N2]));
|
||||
CosmoTool::hdf5_read_array(f, "map", *cmap_like);
|
||||
|
||||
info[Likelihood::COLOR_MAP] = cmap_like;
|
||||
} else if (
|
||||
auto robust_map =
|
||||
like_params->template get_optional<std::string>("colormap_sky")) {
|
||||
ctx.print2<LOG_INFO_SINGLE>("Robust SKYMAP provided: " + (*robust_map));
|
||||
|
||||
double rmax = like_params->template get<double>("rmax");
|
||||
int numSlices = like_params->template get<int>("slices");
|
||||
|
||||
auto &rng = state.get<RandomGen>("random_generator")->get();
|
||||
long startN0 = state.getScalar<long>("startN0");
|
||||
long localN0 = state.getScalar<long>("localN0");
|
||||
double L[3];
|
||||
size_t Ndata[3];
|
||||
double xmin[3];
|
||||
size_t localNdata[6];
|
||||
|
||||
state.getScalarArray<double, 3>("L", L);
|
||||
state.getScalarArray<double, 3>("corner", xmin);
|
||||
state.getScalarArray<long, 3>("Ndata", Ndata);
|
||||
state.getScalarArray<long, 6>("localNdata", localNdata);
|
||||
|
||||
double delta[3];
|
||||
std::transform(
|
||||
L, L + 3, Ndata, delta, [](double l, size_t n) { return l / n; });
|
||||
|
||||
ColorAdaptor colormap(0, rmax, numSlices);
|
||||
auto colormapElement =
|
||||
new ArrayStateElement<long, 3, track_allocator<long>, true>(
|
||||
boost::extents[range(localNdata[0], localNdata[1])][Ndata[1]]
|
||||
[Ndata[2]]);
|
||||
colormapElement->setRealDims(ArrayDimension(Ndata[0], Ndata[1], Ndata[2]));
|
||||
|
||||
state.newElement("colormap3d", colormapElement);
|
||||
|
||||
colormap.loadSky(*robust_map);
|
||||
|
||||
std::shared_ptr<boost::multi_array_ref<long, 3>> cmap =
|
||||
colormapElement->array;
|
||||
auto promise = make_promise_pointer(cmap);
|
||||
info[Likelihood::COLOR_MAP] = promise;
|
||||
|
||||
if (!resuming) {
|
||||
computeMajorityVoteWindow3d(
|
||||
comm, rng, colormap, *colormapElement->array, L, delta, xmin, Ndata, 1000);
|
||||
promise.defer.submit_ready();
|
||||
} else {
|
||||
colormapElement->deferLoad.ready(
|
||||
[promise]() mutable { promise.defer.submit_ready(); });
|
||||
}
|
||||
|
||||
{
|
||||
std::string fname = "dump_colormap.h5_" + std::to_string(comm->rank());
|
||||
H5::H5File f(fname.c_str(), H5F_ACC_TRUNC);
|
||||
CosmoTool::hdf5_write_array(f, "cmap", *colormapElement->array);
|
||||
CosmoTool::hdf5_write_array(
|
||||
f, "sky",
|
||||
boost::multi_array_ref<double, 1>(
|
||||
(double *)&colormap.sky.Map()[0],
|
||||
boost::extents[colormap.sky.Npix()]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ARES TAG: authors_num = 4
|
||||
// ARES TAG: name(0) = Natalia Porqueres
|
||||
// ARES TAG: email(0) = natalia_porqueres@hotmail.com
|
||||
// ARES TAG: year(0) = 2018
|
||||
// ARES TAG: name(1) = Doogesh Kodi Ramanah
|
||||
// ARES TAG: email(1) = ramanah@iap.fr
|
||||
// ARES TAG: year(1) = 2018
|
||||
// ARES TAG: name(2) = Guilhem Lavaux
|
||||
// ARES TAG: email(2) = guilhem.lavaux@iap.fr
|
||||
// ARES TAG: year(2) = 2018
|
||||
// ARES TAG: name(3) = Jens Jasche
|
||||
// ARES TAG: email(3) = jens.jasche@fysik.su.se
|
||||
// ARES TAG: year(3) = 2018
|
16
extra/hades/src/likelihood_info.hpp
Normal file
16
extra/hades/src/likelihood_info.hpp
Normal file
|
@ -0,0 +1,16 @@
|
|||
#ifndef __LIBLSS_PREPARE_LIKELIHOOD_INFO_HPP
|
||||
#define __LIBLSS_PREPARE_LIKELIHOOD_INFO_HPP
|
||||
|
||||
#include "common/preparation_types.hpp"
|
||||
#include "libLSS/mpi/generic_mpi.hpp"
|
||||
#include "libLSS/physics/likelihoods/base.hpp"
|
||||
#include "libLSS/mcmc/global_state.hpp"
|
||||
|
||||
namespace LibLSS_prepare {
|
||||
|
||||
void setupLikelihoodInfo(
|
||||
MPI_Communication *comm, LibLSS::MarkovState &state,
|
||||
LibLSS::LikelihoodInfo &info, ptree ¶ms, bool resuming);
|
||||
}
|
||||
|
||||
#endif
|
176
extra/hades/src/setup_models.cpp
Normal file
176
extra/hades/src/setup_models.cpp
Normal file
|
@ -0,0 +1,176 @@
|
|||
/*+
|
||||
ARES/HADES/BORG Package -- ./extra/hades/src/setup_models.cpp
|
||||
Copyright (C) 2018 Guilhem Lavaux <guilhem.lavaux@iap.fr>
|
||||
|
||||
Additional contributions from:
|
||||
Guilhem Lavaux <guilhem.lavaux@iap.fr> (2023)
|
||||
|
||||
+*/
|
||||
#include <tuple>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <H5Cpp.h>
|
||||
#include <boost/format.hpp>
|
||||
#include "libLSS/tools/hdf5_error.hpp"
|
||||
#include "libLSS/mpi/generic_mpi.hpp"
|
||||
#include "libLSS/physics/forward_model.hpp"
|
||||
#include "libLSS/tools/console.hpp"
|
||||
#include <functional>
|
||||
#include "libLSS/tools/errors.hpp"
|
||||
|
||||
#include "libLSS/mcmc/global_state.hpp"
|
||||
#include "libLSS/physics/forwards/borg_lpt.hpp"
|
||||
#include "libLSS/physics/forwards/borg_2lpt.hpp"
|
||||
#include "libLSS/physics/forwards/borg_multi_pm.hpp"
|
||||
#include "libLSS/physics/hades_pt.hpp"
|
||||
#include "libLSS/tools/static_init.hpp"
|
||||
|
||||
#include "common/preparation.hpp"
|
||||
|
||||
#include "model_generator.hpp"
|
||||
|
||||
using namespace LibLSS;
|
||||
using boost::format;
|
||||
|
||||
template <typename Model>
|
||||
void borgForwardSaveTiming(CosmoTool::H5_CommonFileGroup &fg, Model &model) {
|
||||
auto &lc = model.lightConeTiming();
|
||||
|
||||
CosmoTool::hdf5_write_array(fg, "timing", lc);
|
||||
}
|
||||
|
||||
template <typename Model>
|
||||
std::shared_ptr<BORGForwardModel> setup_LPT_model(
|
||||
MPI_Communication *comm, BoxModel const &box, PropertyProxy const ¶ms,
|
||||
ParticleSaver_t &save_particles, TimingSaver_t &save_timing, int &nstep) {
|
||||
namespace ph = std::placeholders;
|
||||
int ss_factor = params.get<int>("supersampling");
|
||||
int f_factor = params.get<int>("forcesampling");
|
||||
double ai = params.get<double>("a_initial");
|
||||
double af = params.get<double>("a_final");
|
||||
double p_factor = params.get<double>("part_factor", 1.2);
|
||||
bool rsd = params.get<bool>("do_rsd", false);
|
||||
bool lightcone = params.get<bool>("lightcone", false);
|
||||
double lightcone_boost = params.get<double>(
|
||||
"lightcone_boost",
|
||||
1.0); // This is an artificial factor just to make cool plots.
|
||||
|
||||
auto model = std::make_shared<Model>(
|
||||
comm, box, box, rsd, ss_factor, p_factor, ai, af, lightcone,
|
||||
lightcone_boost);
|
||||
|
||||
save_particles = std::bind(
|
||||
borgSaveParticles<Model>, ph::_1, std::ref(*model), ph::_2, ph::_3,
|
||||
ph::_4);
|
||||
save_timing =
|
||||
std::bind(borgForwardSaveTiming<Model>, ph::_1, std::ref(*model));
|
||||
|
||||
nstep = 1;
|
||||
return model;
|
||||
}
|
||||
|
||||
template <typename Model>
|
||||
std::shared_ptr<BORGForwardModel> setup_2LPT_model(
|
||||
MPI_Communication *comm, BoxModel const &box, PropertyProxy const ¶ms,
|
||||
ParticleSaver_t &save_particles, TimingSaver_t &save_timing, int &nstep) {
|
||||
namespace ph = std::placeholders;
|
||||
int ss_factor = params.get<int>("supersampling");
|
||||
int f_factor = params.get<int>("forcesampling");
|
||||
double ai = params.get<double>("a_initial");
|
||||
double af = params.get<double>("a_final");
|
||||
double p_factor = params.get<double>("part_factor", 1.2);
|
||||
bool rsd = params.get<bool>("do_rsd", false);
|
||||
bool lightcone = params.get<bool>("lightcone", false);
|
||||
double lightcone_boost = params.get<double>(
|
||||
"lightcone_boost",
|
||||
1.0); // This is an artificial factor just to make cool plots.
|
||||
|
||||
auto model = std::make_shared<Model>(
|
||||
comm, box, rsd, ss_factor, p_factor, ai, af, lightcone);
|
||||
|
||||
save_particles = std::bind(
|
||||
borgSaveParticles<Model>, ph::_1, std::ref(*model), ph::_2, ph::_3,
|
||||
ph::_4);
|
||||
//save_timing =
|
||||
// std::bind(borgForwardSaveTiming<Model>, ph::_1, std::ref(*model));
|
||||
|
||||
nstep = 1;
|
||||
return model;
|
||||
}
|
||||
|
||||
template <typename Model>
|
||||
std::shared_ptr<BORGForwardModel> setup_PM_model(
|
||||
MPI_Communication *comm, BoxModel const &box, PropertyProxy const ¶ms,
|
||||
ParticleSaver_t &save_particles, TimingSaver_t &save_timing, int &nstep) {
|
||||
namespace ph = std::placeholders;
|
||||
int ss_factor = params.get<int>("supersampling");
|
||||
int f_factor = params.get<int>("forcesampling");
|
||||
double ai = params.get<double>("a_initial");
|
||||
double af = params.get<double>("a_final");
|
||||
double p_factor = params.get<double>("part_factor", 1.2);
|
||||
bool rsd = params.get<bool>("do_rsd", false);
|
||||
bool lightcone = params.get<bool>("lightcone", false);
|
||||
int pm_nsteps = params.get<int>("pm_nsteps", 30);
|
||||
double z_start = params.get<double>("pm_start_z", 69.);
|
||||
bool tcola = params.get<bool>("tCOLA", false);
|
||||
|
||||
Model *model = new Model(
|
||||
comm, box, ss_factor, f_factor, pm_nsteps, p_factor, rsd, ai, af, z_start,
|
||||
tcola);
|
||||
|
||||
save_particles = std::bind(
|
||||
borgSaveParticles<Model>, ph::_1, std::ref(*model), ph::_2, ph::_3,
|
||||
ph::_4);
|
||||
nstep = pm_nsteps;
|
||||
model->setAdjointRequired(false);
|
||||
return model;
|
||||
}
|
||||
|
||||
std::shared_ptr<BORGForwardModel> setup_Linear_model(
|
||||
MPI_Communication *comm, BoxModel const &box, PropertyProxy const ¶ms,
|
||||
ParticleSaver_t &save_particles, TimingSaver_t &save_timing, int &nstep) {
|
||||
double ai = params.get<double>("a_initial");
|
||||
nstep = 1;
|
||||
return std::make_shared<HadesLinear>(comm, box, box, ai);
|
||||
}
|
||||
|
||||
ModelSetup_t LibLSS::setup_forward_model(std::string const &model_name) {
|
||||
std::map<std::string, std::tuple<std::string, ModelSetup_t>> models = {
|
||||
{"HADES_LINEAR", {"Linear scaling model", setup_Linear_model}},
|
||||
{"LPT",
|
||||
{"LPT model with Quad projection",
|
||||
setup_LPT_model<BorgLptModel<ModifiedNGP<double, NGPGrid::Quad>>>}},
|
||||
{"LPT_CIC",
|
||||
{"LPT model with CIC projection",
|
||||
setup_LPT_model<BorgLptModel<ClassicCloudInCell<double>>>}},
|
||||
{"LPT_DBL",
|
||||
{"LPT model with Double projection",
|
||||
setup_LPT_model<BorgLptModel<ModifiedNGP<double, NGPGrid::Double>>>}},
|
||||
{"2LPT",
|
||||
{"2LPT model with Quad projection",
|
||||
setup_2LPT_model<Borg2LPTModel<ModifiedNGP<double, NGPGrid::Quad>>>}},
|
||||
{"2LPT_CIC",
|
||||
{"2LPT model with CIC projection",
|
||||
setup_2LPT_model<Borg2LPTModel<ClassicCloudInCell<double>>>}},
|
||||
{"2LPT_DBL",
|
||||
{"2LPT model with Double projection",
|
||||
setup_2LPT_model<Borg2LPTModel<ModifiedNGP<double, NGPGrid::Double>>>}},
|
||||
{"PM_CIC",
|
||||
{"Particle mesh model with CIC projection",
|
||||
setup_PM_model<MetaBorgPMModel<ClassicCloudInCell<double>>>}}};
|
||||
|
||||
if (models.find(model_name) == models.end()) {
|
||||
error_helper<ErrorParams>("Unknown BORG model '" + model_name + "'");
|
||||
}
|
||||
|
||||
Console::instance().print<LOG_INFO_SINGLE>(
|
||||
format("Selecting model %s: %s") % model_name %
|
||||
std::get<0>(models[model_name]));
|
||||
|
||||
return std::get<1>(models[model_name]);
|
||||
}
|
||||
|
||||
// ARES TAG: authors_num = 1
|
||||
// ARES TAG: name(0) = Guilhem Lavaux
|
||||
// ARES TAG: email(0) = guilhem.lavaux@iap.fr
|
||||
// ARES TAG: year(0) = 2018
|
102
extra/hades/src/setup_models.hpp
Normal file
102
extra/hades/src/setup_models.hpp
Normal file
|
@ -0,0 +1,102 @@
|
|||
/*+
|
||||
ARES/HADES/BORG Package -- ./extra/hades/src/setup_models.hpp
|
||||
Copyright (C) 2020 Guilhem Lavaux <guilhem.lavaux@iap.fr>
|
||||
Copyright (C) 2009-2020 Jens Jasche <jens.jasche@fysik.su.se>
|
||||
|
||||
Additional contributions from:
|
||||
Guilhem Lavaux <guilhem.lavaux@iap.fr> (2023)
|
||||
|
||||
+*/
|
||||
#pragma once
|
||||
#ifndef __BORG_FORWARD_MODELS_HPP
|
||||
# define __BORG_FORWARD_MODELS_HPP
|
||||
|
||||
# include <memory>
|
||||
# include "libLSS/tools/console.hpp"
|
||||
# include "libLSS/physics/forward_model.hpp"
|
||||
# include "libLSS/tools/ptree_vectors.hpp"
|
||||
# include "libLSS/tools/string_tools.hpp"
|
||||
# include "libLSS/tools/itertools.hpp"
|
||||
# include "libLSS/physics/chain_forward_model.hpp"
|
||||
# include "libLSS/physics/forwards/registry.hpp"
|
||||
# include "libLSS/physics/forwards/primordial.hpp"
|
||||
# include "libLSS/physics/forwards/transfer_ehu.hpp"
|
||||
# include "libLSS/physics/hermitic.hpp"
|
||||
|
||||
namespace LibLSS {
|
||||
|
||||
typedef std::function<void(CosmoTool::H5_CommonFileGroup &, bool, bool, int)>
|
||||
ParticleSaver_t;
|
||||
typedef std::function<void(CosmoTool::H5_CommonFileGroup &)> TimingSaver_t;
|
||||
|
||||
/**
|
||||
* @brief Build a new complete forward model from configuration.
|
||||
*
|
||||
* This function construct a new complete forward model from
|
||||
* the provided property tree holding configuration.
|
||||
*
|
||||
* @tparam ptree Property tree type
|
||||
* @param comm MPI communicator
|
||||
* @param box Basic box size
|
||||
* @param params Property tree root parameter
|
||||
* @param current Current sub-property-tree
|
||||
* @return std::shared_ptr<BORGForwardModel> the forward model
|
||||
*/
|
||||
template <typename ptree>
|
||||
std::shared_ptr<ChainForwardModel> buildModel(
|
||||
MPI_Communication *comm, MarkovState &state, BoxModel box, ptree ¶ms,
|
||||
ptree ¤t) {
|
||||
std::string model_type = current.template get<std::string>("model");
|
||||
double ai = adapt<double>(
|
||||
state, current, "a_initial", 0.001, RESTORE, "borg_a_initial");
|
||||
double af =
|
||||
adapt<double>(state, current, "a_final", 1.0, RESTORE, "borg_a_final");
|
||||
|
||||
Console::instance().print<LOG_VERBOSE>("Init borg model: " + model_type);
|
||||
|
||||
ParticleSaver_t save_particles;
|
||||
TimingSaver_t save_timing;
|
||||
if (model_type == "CHAIN") {
|
||||
auto model = std::make_shared<ChainForwardModel>(comm, box);
|
||||
auto split_models = string_as_vector<std::string>(
|
||||
current.template get<std::string>("models"), ",");
|
||||
model->addModel(std::make_shared<ForwardHermiticOperation>(comm, box));
|
||||
for (auto this_model : itertools::enumerate(split_models)) {
|
||||
Console::instance().print<LOG_VERBOSE>(
|
||||
"Chaining with " + this_model.template get<1>());
|
||||
auto setup = setup_forward_model(this_model.template get<1>());
|
||||
std::string sub_name = std::string("gravity_chain_") +
|
||||
to_string(this_model.template get<0>());
|
||||
auto new_model = setup(
|
||||
comm, box,
|
||||
make_proxy_property_tree(params.get_child_optional(sub_name)));
|
||||
|
||||
if (auto name = params.template get_optional<std::string>(sub_name + ".name")) {
|
||||
model->addModel(new_model, *name);
|
||||
} else {
|
||||
model->addModel(new_model);
|
||||
}
|
||||
box = new_model->get_box_model_output();
|
||||
}
|
||||
return model;
|
||||
} else {
|
||||
auto setup = setup_forward_model(model_type);
|
||||
|
||||
auto model = std::make_shared<ChainForwardModel>(comm, box);
|
||||
auto real_model = setup(comm, box, make_proxy_property_tree(current));
|
||||
|
||||
model->addModel(std::make_shared<ForwardHermiticOperation>(comm, box));
|
||||
model->addModel(std::make_shared<ForwardPrimordial>(comm, box, ai));
|
||||
model->addModel(std::make_shared<ForwardEisensteinHu>(comm, box));
|
||||
model->addModel(real_model, "dynamics");
|
||||
return model;
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace LibLSS
|
||||
#endif
|
||||
// ARES TAG: num_authors = 1
|
||||
// ARES TAG: name(0) = Guilhem Lavaux
|
||||
// ARES TAG: year(0) = 2020
|
||||
// ARES TAG: email(0) = guilhem.lavaux@iap.fr
|
38
extra/hades/src/tools.cmake
Normal file
38
extra/hades/src/tools.cmake
Normal file
|
@ -0,0 +1,38 @@
|
|||
check_ares_module(BORG_PRESENT borg)
|
||||
|
||||
set(extra_hades ${CMAKE_SOURCE_DIR}/extra/hades/src)
|
||||
|
||||
SET(HADES_OPTION)
|
||||
|
||||
set(BORG_CODE)
|
||||
IF(BORG_PRESENT)
|
||||
cmessage(STATUS "HADES found BORG")
|
||||
SET(HADES_OPTION "${HADES_OPTION}#define HADES_SUPPORT_BORG 1\n")
|
||||
|
||||
IF(BUILD_JULIA)
|
||||
cmessage(STATUS "HADES will support JULIA likelihood")
|
||||
SET(HADE_OPTION "${HADES_OPTION}#define HADES_SUPPORT_JULIA 1\n")
|
||||
else()
|
||||
cmessage(CWARNING "HADES did not find JULIA")
|
||||
endif()
|
||||
else()
|
||||
cmessage(CWARNING "HADES did not find BORG")
|
||||
ENDIF()
|
||||
|
||||
FILE(WRITE ${CMAKE_BINARY_DIR}/src/hades_option.hpp ${HADES_OPTION})
|
||||
include_directories(${extra_hades})
|
||||
|
||||
add_library(hades ${extra_hades}/likelihood_info.cpp ${BORG_CODE})
|
||||
add_dependencies(hades ${ares_DEPS})
|
||||
|
||||
add_executable(hades3 ${extra_hades}/hades3.cpp ${extra_hades}/hades_mock_gen.hpp )
|
||||
|
||||
target_link_libraries(hades3 hades LSS ${DEP_LIBS})
|
||||
add_dependencies(hades3 ${ares_DEPS})
|
||||
set_property(SOURCE ${extra_hades}/hades3.cpp APPEND PROPERTY OBJECT_DEPENDS
|
||||
${extra_hades}/hades_mock_gen.hpp
|
||||
${extra_hades}/hades_bundle.hpp
|
||||
${extra_hades}/hades_bundle_init.hpp
|
||||
${CMAKE_BINARY_DIR}/libLSS/physics/forwards/all_models.hpp
|
||||
${CMAKE_SOURCE_DIR}/src/ares_init.hpp)
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue