preparing public version

This commit is contained in:
P.M. Sutter 2014-04-21 23:58:17 -04:00
parent c57359197c
commit ad3e5d1577
25 changed files with 934 additions and 1500 deletions

89
CMakeLists.txt Normal file
View file

@ -0,0 +1,89 @@
cmake_minimum_required(VERSION 2.8)
project(zobovPerso)
include(CheckCCompilerFlag)
include(ExternalProject)
check_c_compiler_flag(-march=native SUPPORT_ARCH_NATIVE )
find_library(MATH_LIB m)
macro(add_genopt _sourcelist _ggofile _basefile)
unset(_structname)
unset(_funcname)
if(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile})
set(_ggofile2 ${CMAKE_CURRENT_SOURCE_DIR}/${_ggofile})
else(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile})
set(_ggofile2 ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile})
endif(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile})
foreach(arg ${ARGN})
if ("x${arg}" MATCHES "^x(STRUCTNAME|FUNCNAME)$")
SET(doing "${arg}")
elseif(doing STREQUAL "STRUCTNAME")
SET(_structname ${arg})
elseif(doing STREQUAL "FUNCNAME")
SET(_funcname ${arg})
endif()
endforeach(arg ${ARGN})
if(NOT DEFINED _structname)
set(_structname ${_basefile})
endif(NOT DEFINED _structname)
if(NOT DEFINED _funcname)
set(_funcname ${_basefile})
endif(NOT DEFINED _funcname)
set(_cfile ${CMAKE_CURRENT_BINARY_DIR}/${_basefile}.c)
set(_hfile ${CMAKE_CURRENT_BINARY_DIR}/${_basefile}.h)
add_custom_command(
OUTPUT ${_cfile} ${_hfile}
COMMAND ${GENGETOPT} -i ${_ggofile2} -f ${_funcname} -a ${_structname} zobovConf_info -F ${_basefile} -C
DEPENDS ${_ggofile2}
)
set(${_sourcelist} ${_cfile} ${${_sourcelist}})
endmacro(add_genopt)
macro(configure_exec _source _destdir _destfile)
SET(TMP_CONFIGURE_DIR ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY})
configure_file(${_source} ${TMP_CONFIGURE_DIR}/${_destfile} @ONLY)
file(COPY ${TMP_CONFIGURE_DIR}/${_destfile} DESTINATION ${_destdir}
FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
endmacro(configure_exec)
include(${CMAKE_SOURCE_DIR}/external/external_build.cmake)
include(${CMAKE_SOURCE_DIR}/external/external_python_build.cmake)
SET(PYTHONPATH ${PYTHON_LOCAL_SITE_PACKAGE})
configure_exec(${CMAKE_SOURCE_DIR}/run_python.sh.in ${CMAKE_BINARY_DIR} run_python.sh)
configure_exec(${CMAKE_SOURCE_DIR}/run_env.sh.in ${CMAKE_BINARY_DIR} run_env.sh)
configure_exec(${CMAKE_SOURCE_DIR}/python_tools/pipeline_source/prepareCatalogs.in.py
${CMAKE_BINARY_DIR}/pipeline prepareCatalogs.py)
#configure_exec(${CMAKE_SOURCE_DIR}/python_tools/pipeline_source/applyMaskToMock.in.py
${CMAKE_BINARY_DIR}/pipeline applyMaskToMock.py)
#configure_exec(${CMAKE_SOURCE_DIR}/python_tools/pipeline_source/buildSkyProjections.in.py
${CMAKE_BINARY_DIR}/pipeline/miscTools buildSkyProjections.py)
SET(python_build_environment
${CMAKE_COMMAND} -DPYTHON_LOCAL_SITE_PACKAGE=${PYTHON_LOCAL_SITE_PACKAGE} -DVOID_GSL=${CMAKE_BINARY_DIR}/ext_build/gsl -DPYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} -DTARGET_PATH=${CMAKE_BINARY_DIR}/ext_build/python -P)
add_custom_target(python_pipeline ALL
COMMAND ${python_build_environment} ${CMAKE_SOURCE_DIR}/external/python_build.cmake
COMMAND ${python_build_environment} ${CMAKE_SOURCE_DIR}/external/python_install.cmake
DEPENDS gsl cython netcdf4-python ${PYTHON_AUXILIARY_DEPEND}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/python_tools
)
subdirs(zobov c_tools)

50
c_tools/CMakeLists.txt Normal file
View file

@ -0,0 +1,50 @@
include(FindPkgConfig)
pkg_check_modules(CAIROMM cairomm-1.0)
pkg_check_modules(SIGC sigc++-2.0)
pkg_check_modules(CAIRO cairo)
pkg_check_modules(FREETYPE freetype2)
find_path(CAIROMMCONFIG_INCLUDE_PATH NAMES cairommconfig.h HINTS ${CAIROMM_INCLUDE_DIRS})
find_path(CAIROMM_INCLUDE_PATH NAMES cairomm/cairomm.h HINTS ${CAIROMM_INCLUDE_DIRS})
find_path(SIGC_INCLUDE_PATH NAMES sigc++/slot.h HINTS ${SIGC_INCLUDE_DIRS})
find_path(SIGCCONFIG_INCLUDE_PATH NAMES sigc++config.h HINTS ${SIGC_INCLUDE_DIRS})
find_path(CAIRO_INCLUDE_PATH NAMES cairo.h HINTS ${CAIRO_INCLUDE_DIRS})
find_path(FREETYPE_INCLUDE_PATH NAMES freetype/config/ftheader.h HINTS ${FREETYPE_INCLUDE_DIRS})
find_library(CAIROMM_LIB NAMES ${CAIROMM_LIBRARIES} HINTS CAIROMM_LIBRARY_DIRS)
IF (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH AND SIGCCONFIG_INCLUDE_PATH AND CAIRO_INCLUDE_PATH AND FREETYPE_INCLUDE_PATH AND CAIROMM_LIB)
SET(CAIRO_FOUND 1)
SET(ALL_CAIROMM_LIBS ${CAIROMM_LIB})
SET(CAIRO_HEADERS ${CAIROMM_INCLUDE_PATH}
${CAIROMMCONFIG_INCLUDE_PATH} ${SIGC_INCLUDE_PATH}
${SIGCCONFIG_INCLUDE_PATH} ${CAIRO_INCLUDE_PATH}
${FREETYPE_INCLUDE_PATH})
ELSE (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH AND SIGCCONFIG_INCLUDE_PATH AND CAIRO_INCLUDE_PATH AND FREETYPE_INCLUDE_PATH AND CAIROMM_LIB)
SET(CAIRO_FOUND 0)
ENDIF (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH AND SIGCCONFIG_INCLUDE_PATH AND CAIRO_INCLUDE_PATH AND FREETYPE_INCLUDE_PATH AND CAIROMM_LIB)
SET(ZOB_LIBS zobovTool
${COSMOTOOL_LIBRARY} ${GSL_LIBRARIES}
${NETCDF_LIBRARIES})
include_directories(
${QHULL_INCLUDES}
${HEALPIX_INCLUDE_PATH}
${CMAKE_CURRENT_BINARY_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/libzobov)
set(computeAverageDistortion_SRCS computeAverageDistortion.cpp)
add_genopt(computeAverageDistortion_SRCS computeAverageDistortion.ggo computeAverageDistortion_conf STRUCTNAME Params)
add_executable(computeAverageDistortion ${computeAverageDistortion_SRCS})
target_link_libraries(computeAverageDistortion ${ZOB_LIBS})
subdirs(libzobov test mock stacking visualization analysis zobov2 hod)

View file

@ -0,0 +1,6 @@
include_directories(${CMAKE_CURRENT_BINARY_DIR})
SET(voidOverlap_SRCS voidOverlap.cpp)
add_genopt(voidOverlap_SRCS voidOverlap.ggo voidOverlap_conf STRUCTNAME voidOverlap_info)
add_executable(voidOverlap ${voidOverlap_SRCS})
target_link_libraries(voidOverlap ${ZOB_LIBS})

View file

@ -0,0 +1,21 @@
include_directories(${CMAKE_CURRENT_BINARY_DIR})
SET(hod_SRCS header.c main.c utility.c sigmac.c transfnc.c transfunc_file.c
nonlinear_power_spectrum.c least_squares.c hod_functions.c
xi_matter.c one_halo_rspace.c two_halo_rspace.c
input_params.c dFdx.c mstar.c halo_concentration.c growthfactor.c
halo_mass_conversion.c nfw_transform.c
pair_density.c tasks.c wp_minimization.c
kaiser_distortions.c
tf_eisenstein_hu.c jeans.c
populate_simulation.c test.c
dark_matter_statistics.c
meshlink2.c nbrsfind2.c i3tensor_2.c
mcmc.c halo_mass_function.c halo_bias.c
nrutil.c qromo.c midpnt.c midinf.c polint.c splint.c spline.c
zbrent.c qtrap.c trapzd.c cisi.c complex.c amoeba.c amotry.c
gaussj.c powell.c linmin.c f1dim.c mnbrak.c brent.c gasdev.c
ran1.c jacobi.c splin2.c splie2.c ran2.c sort2.c
)
add_executable(hod ${hod_SRCS})
target_link_libraries(hod -lm)

Binary file not shown.

View file

@ -0,0 +1,8 @@
add_library(zobovTool loadZobov.cpp particleInfo.cpp contour_pixels.cpp)
set_target_properties(zobovTool PROPERTIES COMPILE_FLAGS ${OpenMP_CXX_FLAGS} LINK_FLAGS ${OpenMP_CXX_FLAGS})
SET(ZOB_LIBS zobovTool ${COSMOTOOL_LIBRARY} ${GSL_LIBRARY} ${GSLCBLAS_LIBRARY} ${NETCDFCPP_LIBRARY} ${NETCDF_LIBRARY})
SET(ZOB_LIBS PARENT ${ZOB_LIBS})

View file

@ -0,0 +1,23 @@
include_directories(${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/loaders)
IF(SDF_SUPPORT)
add_definitions(-DSDF_SUPPORT)
ENDIF(SDF_SUPPORT)
SET(generateMock_SRCS generateMock.cpp )
add_genopt(generateMock_SRCS generateMock.ggo generateMock_conf STRUCTNAME generateMock_info)
add_executable(generateMock ${generateMock_SRCS})
target_link_libraries(generateMock simu_loaders ${ZOB_LIBS} ${HDF5HL_CPP_LIBRARY} ${HDF5_CPP_LIBRARY} ${HDF5_LIBRARY} ${LIBSDF_LIBRARY})
add_executable(generateTestMock generateTestMock.cpp)
target_link_libraries(generateTestMock ${ZOB_LIBS})
SET(generateFromCatalog_SRCS generateFromCatalog.cpp)
add_genopt(generateFromCatalog_SRCS generateFromCatalog.ggo generateFromCatalog_conf STRUCTNAME generateFromCatalog_info)
add_executable(generateFromCatalog ${generateFromCatalog_SRCS})
target_link_libraries(generateFromCatalog ${ZOB_LIBS} ${HDF5HL_CPP_LIBRARY} ${HDF5_CPP_LIBRARY} ${HEALPIX_LIBRARIES})
set_target_properties(generateFromCatalog PROPERTIES COMPILE_FLAGS ${OpenMP_CXX_FLAGS} LINK_FLAGS ${OpenMP_CXX_FLAGS})
subdirs(loaders)

View file

@ -0,0 +1,11 @@
SET(SIMU_LOADERS_SRCS
simulation_loader.cpp ramses_loader.cpp
gadget_loader.cpp flash_loader.cpp)
IF (SDF_SUPPORT)
add_definitions(-DSDF_SUPPORT)
SET(SIMU_LOADERS_SRCS ${SIMU_LOADERS_SRCS} sdf_loader.cpp multidark_loader.cpp)
ENDIF (SDF_SUPPORT)
add_library(simu_loaders ${SIMU_LOADERS_SRCS})

View file

@ -0,0 +1,39 @@
include_directories(${CMAKE_CURRENT_BINARY_DIR})
SET(stackVoids_SRCS stackVoids.cpp)
add_genopt(stackVoids_SRCS stackVoids.ggo stackVoids_conf STRUCTNAME stackVoids_info)
add_executable(stackVoids ${stackVoids_SRCS})
target_link_libraries(stackVoids ${ZOB_LIBS})
SET(stackVoidsZero_SRCS stackVoidsZero.cpp)
add_genopt(stackVoidsZero_SRCS stackVoidsZero.ggo stackVoidsZero_conf STRUCTNAME stackVoidsZero_info)
add_executable(stackVoidsZero ${stackVoidsZero_SRCS})
target_link_libraries(stackVoidsZero ${ZOB_LIBS})
set(computeVelocityProfile_SRCS computeVelocityProfile.cpp)
add_genopt(computeVelocityProfile_SRCS computeVelocityProfile.ggo computeVelocityProfile_conf STRUCT Params)
add_executable(computeVelocityProfile ${computeVelocityProfile_SRCS})
target_link_libraries(computeVelocityProfile ${ZOB_LIBS})
set(stackDensityField_SRCS stackDensityField.cpp)
add_genopt(stackDensityField_SRCS stackDensityField.ggo stackDensityField_conf STRUCT PARAMS)
add_executable(stackDensityField ${stackDensityField_SRCS})
target_link_libraries(stackDensityField ${ZOB_LIBS})
set(stackVelocityField_SRCS stackVelocityField.cpp)
add_genopt(stackVelocityField_SRCS stackVelocityField.ggo stackVelocityField_conf STRUCT PARAMS)
add_executable(stackVelocityField ${stackVelocityField_SRCS})
target_link_libraries(stackVelocityField ${ZOB_LIBS})
SET(pruneVoids_SRCS pruneVoids.cpp)
add_genopt(pruneVoids_SRCS pruneVoids.ggo pruneVoids_conf STRUCTNAME pruneVoids_info)
add_executable(pruneVoids ${pruneVoids_SRCS})
target_link_libraries(pruneVoids ${ZOB_LIBS})
SET(makeAHFOutput_SRCS makeAHFOutput.cpp)
add_genopt(makeAHFOutput_SRCS makeAHFOutput.ggo makeAHFOutput_conf STRUCTNAME makeAHFOutput_info)
add_executable(makeAHFOutput ${makeAHFOutput_SRCS})
target_link_libraries(makeAHFOutput ${ZOB_LIBS})

View file

@ -0,0 +1 @@
subdirs(jozov2 voz1b1)

View file

@ -0,0 +1,24 @@
function(omp_add_flags TARGET LANG)
if(OPENMP_FOUND)
if(NOT LANG)
get_target_property(LANG ${TARGET} LINKER_LANGUAGE)
endif()
if(LANG MATCHES "C(XX)?")
set_property(TARGET ${TARGET} APPEND
PROPERTY COMPILE_FLAGS ${OpenMP_${LANG}_FLAGS})
set_property(TARGET ${TARGET} APPEND
PROPERTY LINK_FLAGS ${OpenMP_${LANG}_FLAGS})
else()
message(WARNING "omp_add_flags: target '${TARGET}'"
" link language '${LANG}' is not supported")
endif()
endif()
endfunction()
add_executable(jozov2 jozov2.cpp jozov2_io.cpp jozov2_zones.cpp jozov2_watershed.cpp findrtop.c)
if (ENABLE_OPENMP)
Find_Package(OpenMP)
omp_add_flags(jozov2 CXX)
add_definitions(-DOPENMP)
ENDIF(ENABLE_OPENMP)

View file

@ -0,0 +1,4 @@
add_executable(voz1b1_2 voz1b1.cpp vozutil.c voz_io.cpp)
target_link_libraries(voz1b1_2 ${QHULL_LIBRARY} ${COSMOTOOL_LIBRARY} ${MATH_LIB})
include_directories(${COSMOTOOL_INCLUDE_PATH} ${QHULL_INCLUDE_PATH})

86
external/cosmotool/CMakeLists.txt vendored Normal file
View file

@ -0,0 +1,86 @@
cmake_minimum_required(VERSION 2.6)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}")
project(CosmoToolbox)
include(GetGitRevisionDescription)
include(ExternalProject)
include(FindOpenMP)
get_git_head_revision(HEAD GIT_VER)
option(BUILD_SHARED_LIBS "Build shared libraries." OFF)
option(BUILD_STATIC_LIBS "Build static libraries." ON)
option(ENABLE_OPENMP "Enable OpenMP support." OFF)
option(ENABLE_SHARP "Enable SPHT support." ON)
find_path(NETCDF_INCLUDE_PATH NAMES netcdf.h)
find_path(NETCDFCPP_INCLUDE_PATH NAMES netcdfcpp.h netcdf)
find_path(GSL_INCLUDE_PATH NAMES gsl/gsl_blas.h)
IF(EXISTS ${NETCDFCPP_INCLUDE_PATH}/netcdf)
SET(FOUND_NETCDF4 1)
FILE(WRITE ${CMAKE_BINARY_DIR}/src/ctool_netcdf_ver.hpp "#define NETCDFCPP4 1")
ELSE(EXISTS ${NETCDFCPP_INCLUDE_PATH}/netcdf)
SET(FOUND_NETCDF3 1)
FILE(WRITE ${CMAKE_BINARY_DIR}/src/ctool_netcdf_ver.hpp "#undef NETCDFCPP4")
ENDIF(EXISTS ${NETCDFCPP_INCLUDE_PATH}/netcdf)
find_library(NETCDF_LIBRARY netcdf)
find_library(NETCDFCPP_LIBRARY NAMES netcdf_c++ netcdf_c++4)
find_library(GSL_LIBRARY gsl)
find_library(GSLCBLAS_LIBRARY gslcblas)
if (ENABLE_SHARP)
SET(SHARP_SOURCE ${CMAKE_SOURCE_DIR}/external/sharp)
SET(DEP_BUILD ${CMAKE_SOURCE_DIR}/external/sharp/auto)
ExternalProject_Add(sharp
SOURCE_DIR ${SHARP_SOURCE}
BUILD_IN_SOURCE 1
CONFIGURE_COMMAND ${SHARP_SOURCE}/configure --prefix=${DEP_BUILD}
BUILD_COMMAND ${CMAKE_MAKE_PROGRAM}
INSTALL_COMMAND echo "No install"
)
SET(SHARP_LIBRARY ${DEP_BUILD}/lib/libsharp.a)
SET(SHARP_LIBRARIES ${SHARP_LIBRARY})
SET(SHARP_INCLUDE_PATH ${DEP_BUILD}/include)
endif (ENABLE_SHARP)
set(HDF5_FIND_COMPONENTS HL CXX)
if(HDF5_ROOTDIR)
SET(ENV{HDF5_ROOT} ${HDF5_ROOTDIR})
endif(HDF5_ROOTDIR)
include(FindHDF5)
include(FindPkgConfig)
pkg_check_modules(FFTW3 fftw3>=3.3)
pkg_check_modules(FFTW3F fftw3f>=3.3)
pkg_check_modules(EIGEN3 eigen3)
include(FindPackageHandleStandardArgs)
set(NETCDF_FIND_REQUIRED TRUE)
set(GSL_FIND_REQUIRED TRUE)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(NetCDF DEFAULT_MSG NETCDF_LIBRARY NETCDFCPP_LIBRARY NETCDF_INCLUDE_PATH)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(GSL DEFAULT_MSG GSL_LIBRARY GSLCBLAS_LIBRARY GSL_INCLUDE_PATH)
set(GSL_LIBRARIES ${GSL_LIBRARY} ${GSLCBLAS_LIBRARY})
# CPACK Configuration
SET(CPACK_PACKAGE_DESCRIPTION_SUMMARY "A toolbox for impatient cosmologists")
SET(CPACK_PACKAGE_VENDOR "Guilhem Lavaux")
SET(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LICENCE_CeCILL_V2")
SET(CPACK_PACKAGE_VERSION_MAJOR "1")
SET(CPACK_PACKAGE_VERSION_MINOR "0")
SET(CPACK_PACKAGE_VERSION_PATCH "0-${GIT_VER}")
SET(CPACK_PACKAGE_INSTALL_DIRECTORY "CosmoToolbox-${GalaxExplorer_VERSION_MAJOR}.${GalaxExplorer_VERSION_MINOR}")
SET(CPACK_STRIP_FILES "lib/libCosmoTool.so")
SET(CPACK_SOURCE_IGNORE_FILES
"/CVS/;/\\\\.git/;/\\\\.svn/;\\\\.swp$;\\\\.#;/#;.*~;cscope.*;/CMakeFiles/;.*\\\\.cmake;Makefile")
add_subdirectory(src)
add_subdirectory(sample)
include(CPack)

View file

@ -0,0 +1,65 @@
SET(tolink ${GSL_LIBRARIES} CosmoTool ${CosmoTool_LIBS})
include_directories(${CMAKE_SOURCE_DIR}/src ${FFTW3_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} ${NETCDF_INCLUDE_PATH} ${GSL_INCLUDE_PATH})
IF(SHARP_INCLUDE_PATH)
include_directories(BEFORE ${SHARP_INCLUDE_PATH})
ENDIF(SHARP_INCLUDE_PATH)
add_executable(testBQueue testBQueue.cpp)
target_link_libraries(testBQueue ${tolink})
add_executable(testInterpolate testInterpolate.cpp)
target_link_libraries(testInterpolate ${tolink})
add_executable(testSmooth testSmooth.cpp)
target_link_libraries(testSmooth ${tolink})
add_executable(testkd testkd.cpp)
target_link_libraries(testkd ${tolink})
add_executable(testkd2 testkd2.cpp)
target_link_libraries(testkd2 ${tolink})
add_executable(testkd3 testkd3.cpp)
target_link_libraries(testkd3 ${tolink})
add_executable(testDelaunay testDelaunay.cpp)
target_link_libraries(testDelaunay ${tolink})
add_executable(testNewton testNewton.cpp)
target_link_libraries(testNewton ${tolink})
add_executable(testPool testPool.cpp)
target_link_libraries(testPool ${tolink})
if (HDF5_FOUND)
add_executable(testReadFlash testReadFlash.cpp)
target_link_libraries(testReadFlash ${tolink})
endif (HDF5_FOUND)
add_executable(testEskow testEskow.cpp)
target_link_libraries(testEskow ${tolink})
add_executable(testAlgo testAlgo.cpp)
target_link_libraries(testAlgo ${tolink})
add_executable(testBSP testBSP.cpp)
target_link_libraries(testBSP ${tolink})
if (FFTW3_FOUND AND FFTW3F_FOUND AND EIGEN3_FOUND)
add_executable(test_fft_calls test_fft_calls.cpp)
target_link_libraries(test_fft_calls ${tolink} ${FFTW3_LIBRARIES} ${FFTW3F_LIBRARIES})
endif (FFTW3_FOUND AND FFTW3F_FOUND AND EIGEN3_FOUND)
if (ENABLE_SHARP AND SHARP_LIBRARY AND SHARP_INCLUDE_PATH AND EIGEN3_FOUND)
include_directories(${SHARP_INCLUDE_PATH})
add_executable(test_healpix_calls test_healpix_calls.cpp)
target_link_libraries(test_healpix_calls ${tolink} ${SHARP_LIBRARIES})
set_target_properties(test_healpix_calls PROPERTIES COMPILE_FLAGS ${OpenMP_CXX_FLAGS} LINK_FLAGS ${OpenMP_CXX_FLAGS})
add_dependencies(test_healpix_calls sharp)
endif (ENABLE_SHARP AND SHARP_LIBRARY AND SHARP_INCLUDE_PATH AND EIGEN3_FOUND)
add_executable(test_cosmopower test_cosmopower.cpp)
target_link_libraries(test_cosmopower ${tolink})

92
external/cosmotool/src/CMakeLists.txt vendored Normal file
View file

@ -0,0 +1,92 @@
SET(CosmoTool_SRCS
fortran.cpp
interpolate.cpp
load_data.cpp
loadGadget.cpp
loadRamses.cpp
octTree.cpp
powerSpectrum.cpp
miniargs.cpp
growthFactor.cpp
cosmopower.cpp
)
IF(FOUND_NETCDF3)
SET(CosmoTool_SRCS ${CosmoTool_SRCS} yorick_nc3.cpp)
ELSE(FOUND_NETCDF3)
IF(FOUND_NETCDF4)
SET(CosmoTool_SRCS ${CosmoTool_SRCS} yorick_nc4.cpp)
ENDIF(FOUND_NETCDF4)
ENDIF(FOUND_NETCDF3)
if (HDF5_FOUND)
set(CosmoTool_SRCS ${CosmoTool_SRCS}
h5_readFlash.cpp
loadFlash.cpp
)
else(HDF5_FOUND)
set(CosmoTool_SRCS ${CosmoTool_SRCS}
loadFlash_dummy.cpp
)
endif (HDF5_FOUND)
SET(CosmoTool_SRCS ${CosmoTool_SRCS}
bqueue.hpp
config.hpp
dinterpolate.hpp
field.hpp
fixArray.hpp
fortran.hpp
interpolate3d.hpp
interpolate.hpp
kdtree_leaf.hpp
load_data.hpp
loadGadget.hpp
loadRamses.hpp
loadSimu.hpp
miniargs.hpp
mykdtree.hpp
octTree.hpp
powerSpectrum.hpp
sparseGrid.hpp
sphSmooth.hpp
yorick.hpp
growthFactor.hpp
)
include_directories(${GSL_INCLUDE_PATH} ${NETCDF_INCLUDE_PATH} ${NETCDFCPP_INCLUDE_PATH} ${CMAKE_BINARY_DIR}/src)
set(CosmoTool_LIBS ${NETCDFCPP_LIBRARY} ${NETCDF_LIBRARY} ${GSL_LIBRARIES})
if (HDF5_FOUND)
set(CosmoTool_LIBS ${CosmoTool_LIBS} ${HDF5_CXX_LIBRARIES} ${HDF5_LIBRARIES})
include_directories(${HDF5_INCLUDE_DIRS})
endif (HDF5_FOUND)
set(CosmoTool_LIBS ${CosmoTool_LIBS} PARENT_SCOPE)
if (BUILD_SHARED_LIBS)
add_library(CosmoTool SHARED ${CosmoTool_SRCS})
target_link_libraries(CosmoTool ${CosmoTool_LIBS})
if (BUILD_STATIC_LIBS)
add_library(CosmoTool_static STATIC ${CosmoTool_SRCS})
endif(BUILD_STATIC_LIBS)
else (BUILD_SHARED_LIBS)
add_library(CosmoTool STATIC ${CosmoTool_SRCS})
endif (BUILD_SHARED_LIBS)
install(TARGETS CosmoTool
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib)
if (BUILD_SHARED_LIBS)
install(TARGETS CosmoTool_static
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib)
endif (BUILD_SHARED_LIBS)
install(DIRECTORY . DESTINATION include/CosmoTool
FILES_MATCHING PATTERN "*.hpp")
install(DIRECTORY . DESTINATION include/CosmoTool
FILES_MATCHING PATTERN "*.tcc")

View file

@ -0,0 +1,250 @@
#+
# VIDE -- Void IDentification and Examination -- ./pipeline/sdss/sdss_dr7LCDM.py
# Copyright (C) 2010-2013 Guilhem Lavaux
# Copyright (C) 2011-2013 P. M. Sutter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
#!/usr/bin/env python
# does analysis in real space assuming LCDM cosmology
import os
import numpy as np
from void_python_tools.backend.classes import *
# does full void analysis. Also generates 2d/1d stacked plots and hubble diagram
# to combine multiple samples:
# For example, combining dim from 0.0-0.1 and bright from 0.1-0.2:
# 1) add dim+bright to sampleList - "+" is required to detect combo!
# if True, will scan log files for last known completed state and run from there
continueRun = False
# stages:
# 1 : extract redshift slices from data
# 2 : void extraction using zobov
# 3 : removal of small voids and voids near the edge
# 4 : void stacking and combining
# 5 : 1-d and 2-d profile generation
# 6 : fitting and plotting stacked voids
# 7 : determination of error bars (if requested)
# 8 : hubble diagram generation
# 9 : liklihood determination
startCatalogStage = 1
endCatalogStage = 1
catalogName = "lcdm"
# where main data files are present/will go
workDir = os.getenv("HOME")+"/workspace/Voids/sdss_dr7LCDM/"
inputDataDir = os.getenv("HOME")+"/workspace/Voids/catalogs/nyuvagc/"
logDir = os.getenv("PWD")+"/../logs/sdss_dr7LCDM"
figDir = os.getenv("PWD")+"/../figs/sdss_dr7LCDM"
ZOBOV_PATH = os.getenv("PWD")+"/../zobov/"
CTOOLS_PATH = os.getenv("PWD")+"/../c_tools/"
freshStack = True # "no" to continue building onto existing stacked voids
errorBars = "CALCULATED" # "ESTIMATED" for error bars from multiple null tests
#errorBars = "ESTIMATED" # "ESTIMATED" for error bars from multiple null tests
# "CALCULATED" to use fitting procedure
numIncoherentRuns = 100
ranSeed = 101010
useComoving = True # if true, convert to real space using LCDM cosmology
# try different portions of data. available options: "all, central, edge"
dataPortions = ["central", "all"]
numZobovDivisions = 2
numZobovThreads = 2
dataSampleList = []
newSample = Sample(fullName = "lss.dr72dim1.dat",
nickName = "dim1",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
selFunFile = inputDataDir+"/czselfunc.all.dr72dim1.dat",
zBoundary = (0.0, 0.05),
zRange = (0.0, 0.05),
skyFraction = 0.19,
minVoidRadius = 3.479,
fakeDensity = 0.01,
volumeLimited = True,
includeInHubble = False,
partOfCombo = True,
isCombo = False,
comboList= None)
newSample.addStack(0.0, 0.1, 8, 12, False, True)
newSample.addStack(0.0, 0.1, 12, 16, False, True)
newSample.addStack(0.0, 0.1, 16, 20, False, True)
newSample.addStack(0.0, 0.1, 20, 28, False, True)
dataSampleList.append(newSample)
newSample = Sample(fullName = "lss.dr72dim2.dat",
nickName = "dim2",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
selFunFile = inputDataDir+"/czselfunc.all.dr72dim2.dat",
zBoundary = (0.0, 0.1),
zRange = (0.05, 0.1),
skyFraction = 0.19,
minVoidRadius = 4.723,
fakeDensity = 0.01,
volumeLimited = True,
includeInHubble = False,
partOfCombo = True,
isCombo = False,
comboList= None)
newSample.addStack(0.0, 0.1, 8, 12, False, True)
newSample.addStack(0.0, 0.1, 12, 16, False, True)
newSample.addStack(0.0, 0.1, 16, 20, False, True)
newSample.addStack(0.0, 0.1, 20, 28, False, True)
dataSampleList.append(newSample)
newSample = Sample(fullName = "lss.dr72dim1+dim2.dat",
nickName = "dim1+dim2",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
selFunFile = inputDataDir+"/czselfunc.all.dr72dim.dat",
zBoundary = (0.0, 0.1),
zRange = (0.0, 0.1),
skyFraction = 0.19,
minVoidRadius = 4.723,
fakeDensity = 0.01,
profileBinSize = "auto",
volumeLimited = True,
includeInHubble = True,
partOfCombo = False,
isCombo = True,
comboList= ("lss.dr72dim1.dat", "lss.dr72dim2.dat"))
newSample.addStack(0.0, 0.1, 8, 12, True, False)
newSample.addStack(0.0, 0.1, 12, 16, True, False)
newSample.addStack(0.0, 0.1, 16, 20, True, False)
newSample.addStack(0.0, 0.1, 20, 28, True, False)
dataSampleList.append(newSample)
newSample = Sample(fullName = "lss.dr72bright1.dat",
nickName = "bright1",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
selFunFile = inputDataDir+"/czselfunc.all.dr72bright1.dat",
zBoundary = (0.0, 0.15),
zRange = (0.1, 0.15),
skyFraction = 0.19,
minVoidRadius = 6.763,
fakeDensity = 0.01,
volumeLimited = True,
includeInHubble = False,
partOfCombo = True,
isCombo = False,
comboList= None)
newSample.addStack(0.1, 0.2, 13, 20, False, True)
newSample.addStack(0.1, 0.2, 20, 28, False, True)
newSample.addStack(0.1, 0.2, 28, 36, False, True)
newSample.addStack(0.1, 0.2, 36, 44, False, True)
dataSampleList.append(newSample)
newSample = Sample(fullName = "lss.dr72bright2.dat",
nickName = "bright2",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
selFunFile = inputDataDir+"/czselfunc.all.dr72bright2.dat",
zBoundary = (0.0, 0.2),
zRange = (0.15, 0.2),
skyFraction = 0.19,
minVoidRadius = 10.844,
fakeDensity = 0.001,
volumeLimited = True,
includeInHubble = False,
partOfCombo = True,
isCombo = False,
comboList= None)
newSample.addStack(0.1, 0.2, 13, 20, False, True)
newSample.addStack(0.1, 0.2, 20, 28, False, True)
newSample.addStack(0.1, 0.2, 28, 36, False, True)
newSample.addStack(0.1, 0.2, 36, 44, False, True)
dataSampleList.append(newSample)
newSample = Sample(fullName = "lss.dr72bright1+bright2.dat",
nickName = "bright1+bright2",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
zBoundary = (0.0, 0.2),
zRange = (0.1, 0.2),
skyFraction = 0.19,
minVoidRadius = -1,
fakeDensity = -1,
profileBinSize = "auto",
volumeLimited = True,
includeInHubble = True,
partOfCombo = False,
isCombo = True,
comboList= ("lss.dr72bright1.dat", "lss.dr72bright2.dat"))
newSample.addStack(0.1, 0.2, 13, 20, True, False)
newSample.addStack(0.1, 0.2, 20, 28, True, False)
newSample.addStack(0.1, 0.2, 28, 36, True, False)
newSample.addStack(0.1, 0.2, 36, 44, True, False)
dataSampleList.append(newSample)
newSample = Sample(fullName = "lss.dr72lrgdim.dat",
nickName = "lrgdim",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
selFunFile = inputDataDir+"/czselfunc.all.dr72lrgdim.dat",
zBoundary = (0.16, 0.36),
zRange = (0.16, 0.36),
skyFraction = 0.19,
minVoidRadius = 24,
fakeDensity = 0.001,
profileBinSize = 2, # KEEP
volumeLimited = True,
includeInHubble = True,
partOfCombo = False,
isCombo = False,
comboList= None)
newSample.addStack(0.16, 0.36, 54, 70, True, False, zMinPlot=0.2)
newSample.addStack(0.16, 0.36, 70, 106, True, False, zMinPlot=0.2)
dataSampleList.append(newSample)
newSample = Sample(fullName = "lss.dr72lrgbright.dat",
nickName = "lrgbright",
dataType = "observation",
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
selFunFile = inputDataDir+"/czselfunc.all.dr72lrgbright.dat",
zBoundary = (0.36, 0.44),
zRange = (0.36, 0.44),
skyFraction = 0.19,
minVoidRadius = 38,
fakeDensity = 0.001,
profileBinSize = "auto",
volumeLimited = True,
includeInHubble = False,
partOfCombo = False,
isCombo = False,
comboList= None)
newSample.addStack(0.36, 0.44, 76, 92, False, False)
newSample.addStack(0.36, 0.44, 92, 108, False, False)
dataSampleList.append(newSample)

View file

@ -0,0 +1,144 @@
#+
# VIDE -- Void IDEntification pipeline -- ./pipeline/datasets/mergertree.py
# Copyright (C) 2010-2013 Guilhem Lavaux
# Copyright (C) 2011-2013 P. M. Sutter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
import os
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# CONFIGURATION
startCatalogStage = 0
endCatalogStage = 0
continueRun = False
startAPStage = 1
endAPStage = 2
# directory for the input simulation/observational particle files
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/mergertree1024/"
# path to HOD code
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
# where to put the final void catalog, figures, and output logs
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/mergertree1024/"
figDir = os.getenv("PWD")+"/../figs/mergertree1024/"
logDir = os.getenv("PWD")+"/../logs/mergertree1024/"
# where to place the pipeline scripts
scriptDir = os.getenv("PWD")+"/mergertree1024/"
# simulation or observation?
dataType = "simulation"
# available formats for simulation: gadget, mergertree
dataFormat = "sdf"
dataUnit = 1 # as multiple of Mpc/h
# place particles on the lightcone?
useLightCone = False
# also do peculiar velocities?
doPecVel = False
# common filename of particle files
particleFileBase = "mf_4s_1G_1k_NNNNN"
particleFileDummy = 'NNNNN'
# list of file numbers for the particle files
# to get particle file name, we take particleFileBase+fileNum
fileNums = ["1.000"]
# redshift of each file in the above list
redshifts = ["0.0"]
# how many independent slices along the z-axis?
numSlices = 1
#numSlices = 4
numAPSlices = 1
# how many subdivisions along the x- and y- axis?
# ( = 2 will make 4 subvolumes for each slice, = 3 will make 9, etc.)
numSubvolumes = 1
# prefix to give all outputs
prefix = "mt_"
# list of desired subsamples - these are in unts of h Mpc^-3!
subSamples = [0.1, 0.05, 0.02, 0.01, 0.004, 0.002, 0.001, 0.0003, 0.0001]
#doSubSampling = False # do the subsampling in preparation script?
doSubSampling = True # do the subsampling in preparation script?
# common filename of halo files, leave blank to ignore halos
haloFileBase = "mf_4s_1G_1k_bgc2_NNNNN.sdf"
haloFileDummy = 'NNNNN'
# minimum halo mass cuts to apply for the halo catalog
# use "none" to get all halos
minHaloMasses = ["none", 1.2e13]
#minHaloMasses = [7.0e11, 1.2e13]
# locations of data in the halo catalog
haloFileMCol = 6
haloFileXCol = 0
haloFileYCol = 1
haloFileZCol = 2
haloFileVXCol = 3
haloFileVYCol = 4
haloFileVZCol = 5
haloFileColSep = ','
haloFileNumComLines = 0
# adjust these two parameters given the memory contraints on your system:
# numZobovDivisions: how many sub-volumes per dimension will zobov process
# numZobovThreads: how many sub-volumes to process at once?
numZobovDivisions = 4
numZobovThreads = 4
# simulation information
numPart = 1024*1024*1024
lbox = 999.983 # Mpc/h
omegaM = 0.2847979853038958
hubble = 0.6962
#galDens = 0.000225
hodParmList = [
{'name' : "LowRes", #BOSS: Manera et al. 2012, eq. 26
'Mmin' : 0.0,
'M1' : 1.e14,
'sigma_logM' : 0.596,
'alpha' : 1.0127,
'Mcut' : 1.19399e13,
'galDens' : 0.0002,
},
{'name' : "HighRes",
'Mmin' : 0.0,
#'Mmin' : 1.99525e12,
'M1' : 3.80189e13,
'sigma_logM' : 0.21,
'alpha' : 1.12,
'Mcut' : 6.91831e11,
'galDens' : 0.02,
}
]
# END CONFIGURATION
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------

View file

@ -17,6 +17,4 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
from velocityProfileFitNative import *
from likelihood import *
from cosmologyTools import *

View file

@ -24,7 +24,7 @@ import numpy as np
import scipy.integrate as integrate
from void_python_tools.backend import *
__all__=['expansion', 'angularDiameter', 'expectedStretch', 'aveStretch', 'aveExpansion', 'aveStretchCone', 'aveWeightedStretch']
__all__=['expansion', 'angularDiameter', 'aveExpansion']
# returns 1/E(z) for the given cosmology
def expansion(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
@ -42,113 +42,6 @@ def angularDiameter(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
da = integrate.quad(expansion, 0.0, z, args=(Om, Ot, w0, wa))[0]
return da
# returns expected void stretch for the given cosmology
def expectedStretch(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
ez = 1./expansion(z, Om=Om, Ot=Ot, w0=w0, wa=wa)
da = angularDiameter(z, Om=Om, Ot=Ot, w0=w0, wa=wa)
return ez*da/z
# -----------------------------------------------------------------------------
# returns average expected void stretch for a given redshift range
# assuming a cone
def aveStretchCone(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0,
w0 = -1.0, wa = 0.0):
#print "assuming observation!", skyFrac
if zStart == 0.0: zStart = 1.e-6
h1 = zStart
h2 = zEnd
r1 = skyFrac * 4* np.pi * zStart**2
r2 = skyFrac * 4 * np.pi * zEnd**2
# surface area of a slice within a cone
def coneSlice(x, h, r):
return np.pi * (r/h*x)**2
def coneFunc(z, h, r, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
return coneSlice(z, h, r) * expectedStretch(z, Om, Ot, w0, wa)
aveHigh = integrate.quad(coneFunc, 0.0, zEnd, args=(h2, r2, Om, Ot, w0, wa), full_output=1)[0]
aveLow = integrate.quad(coneFunc, 0.0, zStart, args=(h1, r1, Om, Ot, w0, wa), full_output=1)[0]
volumeHigh = integrate.quad(coneSlice, 0.0, zEnd, args=(h2, r2))[0]
volumeLow = integrate.quad(coneSlice, 0.0, zStart, args=(h1, r1))[0]
return (aveHigh-aveLow)/(volumeHigh-volumeLow)
# returns average expected void stretch for a given redshift range
def aveStretch(sample, zStart, zEnd,
Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
if zStart == 0.0: zStart = 1.e-6
if sample.dataType == "observation":
stretch = aveStretchCone(zStart, zEnd,
skyFrac=sample.skyFraction, Om=Om, Ot=Ot,
w0=w0, wa=wa)
else:
ave = integrate.quad(expectedStretch, zStart, zEnd,
args=(Om, Ot, w0, wa))[0]
ave /= (zEnd-zStart)
stretch = ave
# if in comoving space, calculate stretch for fiducial cosmology
# and take relative amount
if not sample.useLightCone or sample.useComoving:
if sample.dataType == "observation":
stretchFid = aveStretchCone(zStart, zEnd,
skyFrac=sample.skyFraction, Om=sample.omegaM, Ot=Ot,
w0=w0, wa=wa)
else:
ave = integrate.quad(expectedStretch, zStart, zEnd,
args=(sample.omegaM, Ot, w0, wa))[0]
ave /= (zEnd-zStart)
stretchFid = ave
stretch = stretchFid/stretch
return stretch
# -----------------------------------------------------------------------------
# returns average expected void stretch for a given redshift range
# weighted by an actual void distribution
def aveWeightedStretch(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0,
w0 = -1.0, wa = 0.0, dist=None, bins=None,
useComoving=True, OmFid=None):
if zStart == 0.0: zStart = 1.e-6
def weightedSlice(x):
return np.interp(x, bins[:-1], dist)
def weightedFunc(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
return expectedStretch(z, Om, Ot, w0, wa) *\
weightedSlice(z)
ave = integrate.quad(weightedFunc, zStart, zEnd, args=(Om, Ot, w0, wa),
full_output=1)[0]
volume = integrate.quad(weightedSlice, zStart, zEnd, full_output=1)[0]
if volume == 0.0: volume = 1.0
stretch = ave/volume
# if in comoving space, calculate stretch for fiducial cosmology
# and take relative amount
if useComoving:
ave = integrate.quad(weightedFunc, zStart, zEnd, args=(OmFid,
Ot, w0, wa),
full_output=1)[0]
volume = integrate.quad(weightedSlice, zStart, zEnd, full_output=1)[0]
if volume == 0.0: volume = 1.0
stretchFid = ave/volume
if stretchFid != 0.0:
stretch = stretchFid/stretch
return stretch
# -----------------------------------------------------------------------------
# returns average expected expansion for a given redshift range
def aveExpansion(zStart, zEnd, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):

View file

@ -17,10 +17,4 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
from build import *
from draw import *
from fit import *
from inertia import *
from mcmc import *
from generateExpFigure import *
from getSurveyProps import *

View file

@ -19,4 +19,3 @@
#+
from classes import *
from launchers import *
from catalogPrep import *

File diff suppressed because it is too large Load diff

View file

@ -34,7 +34,7 @@ ncFloat = 'f8'
# -----------------------------------------------------------------------------
def loadPart(sampleDir):
#print " Loading particle data..."
print " Loading particle data..."
sys.stdout.flush()
with open(sampleDir+"/sample_info.dat", 'rb') as input:

View file

@ -2,72 +2,12 @@ import numpy as np
def cic( x, Lbox, Lboxcut = 0, Nmesh = 128, weights = None ):
if weights == None: weights = 1
wm = np.mean(weights)
ws = np.mean(weights**2)
d = np.mod(x/(Lbox+2*Lboxcut)*Nmesh,1)
box = ([Lboxcut,Lbox+Lboxcut],[Lboxcut,Lbox+Lboxcut],[Lboxcut,Lbox+Lboxcut])
rho = np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*(1-d[:,1])*(1-d[:,2]))[0] \
+ np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*(1-d[:,1])*(1-d[:,2]))[0],1,0) \
+ np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*d[:,1]*(1-d[:,2]))[0],1,1) \
+ np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*(1-d[:,1])*d[:,2])[0],1,2) \
+ np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*d[:,1]*(1-d[:,2]))[0],1,0),1,1) \
+ np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*(1-d[:,1])*d[:,2])[0],1,0),1,2) \
+ np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*d[:,1]*d[:,2])[0],1,1),1,2) \
+ np.roll(np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*d[:,1]*d[:,2])[0],1,0),1,1),1,2)
rho /= wm
rho = rho/rho.mean() - 1.
return (rho, wm, ws)
return
def powcor( d1, d2, Lbox, Nmesh = 128, Nbin = 100, scale = 'lin', cor = False ):
# Fourier transform
d1 = np.fft.fftn(d1)
d2 = np.fft.fftn(d2)
# CIC correction
wid = np.indices(np.shape(d1)) - Nmesh/2
#wid[np.where(wid >= Nmesh/2)] -= Nmesh
wid = wid*np.pi/Nmesh + 1e-100
wcic = np.prod(np.sin(wid)/wid,0)**2
# Shell average power spectrum
dk = 2*np.pi/Lbox
Pk = np.conj(d1)*d2*(Lbox/Nmesh**2)**3
Pk = np.fft.fftshift(Pk)/wcic**2
(Nm, km, Pkm, SPkm) = shellavg(np.real(Pk), dk, Nmesh, Nbin = Nbin, xmin = 0., xmax = Nmesh*dk/2, scale = scale)
# Inverse Fourier transform and shell average correlation function
if cor == True:
dx = Lbox/Nmesh
Xr = np.fft.ifftshift(np.fft.ifftn(np.fft.ifftshift(Pk)))*(Nmesh/Lbox)**3
(Nmx, rm, Xrm, SXrm) = shellavg(np.real(Xr), dx, Nmesh, Nbin = Nbin, xmin = dx, xmax = 140., scale = scale)
return ((Nm, km, Pkm, SPkm),(Nmx, rm, Xrm, SXrm))
else: return (Nm, km, Pkm, SPkm)
return
def shellavg( f, dx, Nmesh, Nbin = 100, xmin = 0., xmax = 1., scale = 'lin' ):
x = np.indices(np.shape(f)) - Nmesh/2
#x[np.where(x >= Nmesh/2)] -= Nmesh
x = dx*np.sqrt(np.sum(x**2,0))
if scale == 'lin': bins = xmin+(xmax-xmin)* (np.arange(Nbin+1)/float(Nbin))
if scale == 'log': bins = xmin*(xmax/xmin)**(np.arange(Nbin+1)/float(Nbin))
Nm = np.histogram(x, bins = bins)[0]
xm = np.histogram(x, bins = bins, weights = x)[0]/Nm
fm = np.histogram(x, bins = bins, weights = f)[0]/Nm
fs = np.sqrt((np.histogram(x, bins = bins, weights = f**2)[0]/Nm - fm**2)/(Nm-1))
return (Nm, xm, fm, fs)
return

16
zobov/CMakeLists.txt Normal file
View file

@ -0,0 +1,16 @@
add_executable(voz1b1 voz1b1.c readfiles.c vozutil.c voz.h)
target_link_libraries(voz1b1 ${QHULL_LIBRARY} ${MATH_LIB})
add_executable(jozov jozov.c findrtop.c)
target_link_libraries(jozov ${MATH_LIB})
add_executable(vozinit vozinit.c readfiles.c)
target_link_libraries(vozinit ${MATH_LIB})
add_executable(voztie voztie.c readfiles.c)
target_link_libraries(voztie ${MATH_LIB})