diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..2dfc7d4 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,89 @@ +cmake_minimum_required(VERSION 2.8) + +project(zobovPerso) + +include(CheckCCompilerFlag) +include(ExternalProject) + +check_c_compiler_flag(-march=native SUPPORT_ARCH_NATIVE ) + + + +find_library(MATH_LIB m) + +macro(add_genopt _sourcelist _ggofile _basefile) + + unset(_structname) + unset(_funcname) + + if(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile}) + set(_ggofile2 ${CMAKE_CURRENT_SOURCE_DIR}/${_ggofile}) + else(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile}) + set(_ggofile2 ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile}) + endif(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${_ggofile}) + + foreach(arg ${ARGN}) + if ("x${arg}" MATCHES "^x(STRUCTNAME|FUNCNAME)$") + SET(doing "${arg}") + elseif(doing STREQUAL "STRUCTNAME") + SET(_structname ${arg}) + elseif(doing STREQUAL "FUNCNAME") + SET(_funcname ${arg}) + endif() + endforeach(arg ${ARGN}) + + if(NOT DEFINED _structname) + set(_structname ${_basefile}) + endif(NOT DEFINED _structname) + + if(NOT DEFINED _funcname) + set(_funcname ${_basefile}) + endif(NOT DEFINED _funcname) + + set(_cfile ${CMAKE_CURRENT_BINARY_DIR}/${_basefile}.c) + set(_hfile ${CMAKE_CURRENT_BINARY_DIR}/${_basefile}.h) + + add_custom_command( + OUTPUT ${_cfile} ${_hfile} + COMMAND ${GENGETOPT} -i ${_ggofile2} -f ${_funcname} -a ${_structname} zobovConf_info -F ${_basefile} -C + DEPENDS ${_ggofile2} + ) + + set(${_sourcelist} ${_cfile} ${${_sourcelist}}) + +endmacro(add_genopt) + +macro(configure_exec _source _destdir _destfile) + + SET(TMP_CONFIGURE_DIR ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}) + configure_file(${_source} ${TMP_CONFIGURE_DIR}/${_destfile} @ONLY) + file(COPY ${TMP_CONFIGURE_DIR}/${_destfile} DESTINATION ${_destdir} + FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) + +endmacro(configure_exec) + +include(${CMAKE_SOURCE_DIR}/external/external_build.cmake) +include(${CMAKE_SOURCE_DIR}/external/external_python_build.cmake) + +SET(PYTHONPATH ${PYTHON_LOCAL_SITE_PACKAGE}) +configure_exec(${CMAKE_SOURCE_DIR}/run_python.sh.in ${CMAKE_BINARY_DIR} run_python.sh) +configure_exec(${CMAKE_SOURCE_DIR}/run_env.sh.in ${CMAKE_BINARY_DIR} run_env.sh) +configure_exec(${CMAKE_SOURCE_DIR}/python_tools/pipeline_source/prepareCatalogs.in.py +${CMAKE_BINARY_DIR}/pipeline prepareCatalogs.py) +#configure_exec(${CMAKE_SOURCE_DIR}/python_tools/pipeline_source/applyMaskToMock.in.py +${CMAKE_BINARY_DIR}/pipeline applyMaskToMock.py) +#configure_exec(${CMAKE_SOURCE_DIR}/python_tools/pipeline_source/buildSkyProjections.in.py +${CMAKE_BINARY_DIR}/pipeline/miscTools buildSkyProjections.py) + +SET(python_build_environment + ${CMAKE_COMMAND} -DPYTHON_LOCAL_SITE_PACKAGE=${PYTHON_LOCAL_SITE_PACKAGE} -DVOID_GSL=${CMAKE_BINARY_DIR}/ext_build/gsl -DPYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} -DTARGET_PATH=${CMAKE_BINARY_DIR}/ext_build/python -P) + +add_custom_target(python_pipeline ALL + COMMAND ${python_build_environment} ${CMAKE_SOURCE_DIR}/external/python_build.cmake + COMMAND ${python_build_environment} ${CMAKE_SOURCE_DIR}/external/python_install.cmake + DEPENDS gsl cython netcdf4-python ${PYTHON_AUXILIARY_DEPEND} + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/python_tools +) + + +subdirs(zobov c_tools) diff --git a/c_tools/CMakeLists.txt b/c_tools/CMakeLists.txt new file mode 100644 index 0000000..863d264 --- /dev/null +++ b/c_tools/CMakeLists.txt @@ -0,0 +1,50 @@ + +include(FindPkgConfig) + +pkg_check_modules(CAIROMM cairomm-1.0) +pkg_check_modules(SIGC sigc++-2.0) +pkg_check_modules(CAIRO cairo) +pkg_check_modules(FREETYPE freetype2) + +find_path(CAIROMMCONFIG_INCLUDE_PATH NAMES cairommconfig.h HINTS ${CAIROMM_INCLUDE_DIRS}) +find_path(CAIROMM_INCLUDE_PATH NAMES cairomm/cairomm.h HINTS ${CAIROMM_INCLUDE_DIRS}) +find_path(SIGC_INCLUDE_PATH NAMES sigc++/slot.h HINTS ${SIGC_INCLUDE_DIRS}) +find_path(SIGCCONFIG_INCLUDE_PATH NAMES sigc++config.h HINTS ${SIGC_INCLUDE_DIRS}) +find_path(CAIRO_INCLUDE_PATH NAMES cairo.h HINTS ${CAIRO_INCLUDE_DIRS}) +find_path(FREETYPE_INCLUDE_PATH NAMES freetype/config/ftheader.h HINTS ${FREETYPE_INCLUDE_DIRS}) + +find_library(CAIROMM_LIB NAMES ${CAIROMM_LIBRARIES} HINTS CAIROMM_LIBRARY_DIRS) + +IF (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH AND SIGCCONFIG_INCLUDE_PATH AND CAIRO_INCLUDE_PATH AND FREETYPE_INCLUDE_PATH AND CAIROMM_LIB) + + SET(CAIRO_FOUND 1) + SET(ALL_CAIROMM_LIBS ${CAIROMM_LIB}) + SET(CAIRO_HEADERS ${CAIROMM_INCLUDE_PATH} + ${CAIROMMCONFIG_INCLUDE_PATH} ${SIGC_INCLUDE_PATH} + ${SIGCCONFIG_INCLUDE_PATH} ${CAIRO_INCLUDE_PATH} + ${FREETYPE_INCLUDE_PATH}) + +ELSE (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH AND SIGCCONFIG_INCLUDE_PATH AND CAIRO_INCLUDE_PATH AND FREETYPE_INCLUDE_PATH AND CAIROMM_LIB) + + SET(CAIRO_FOUND 0) + +ENDIF (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH AND SIGCCONFIG_INCLUDE_PATH AND CAIRO_INCLUDE_PATH AND FREETYPE_INCLUDE_PATH AND CAIROMM_LIB) + +SET(ZOB_LIBS zobovTool + ${COSMOTOOL_LIBRARY} ${GSL_LIBRARIES} + ${NETCDF_LIBRARIES}) + + +include_directories( + ${QHULL_INCLUDES} + ${HEALPIX_INCLUDE_PATH} + ${CMAKE_CURRENT_BINARY_DIR} + ${CMAKE_CURRENT_SOURCE_DIR}/libzobov) + + +set(computeAverageDistortion_SRCS computeAverageDistortion.cpp) +add_genopt(computeAverageDistortion_SRCS computeAverageDistortion.ggo computeAverageDistortion_conf STRUCTNAME Params) +add_executable(computeAverageDistortion ${computeAverageDistortion_SRCS}) +target_link_libraries(computeAverageDistortion ${ZOB_LIBS}) + +subdirs(libzobov test mock stacking visualization analysis zobov2 hod) diff --git a/c_tools/analysis/CMakeLists.txt b/c_tools/analysis/CMakeLists.txt new file mode 100644 index 0000000..4fd2153 --- /dev/null +++ b/c_tools/analysis/CMakeLists.txt @@ -0,0 +1,6 @@ +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +SET(voidOverlap_SRCS voidOverlap.cpp) +add_genopt(voidOverlap_SRCS voidOverlap.ggo voidOverlap_conf STRUCTNAME voidOverlap_info) +add_executable(voidOverlap ${voidOverlap_SRCS}) +target_link_libraries(voidOverlap ${ZOB_LIBS}) diff --git a/c_tools/hod/CMakeLists.txt b/c_tools/hod/CMakeLists.txt new file mode 100644 index 0000000..ef85f62 --- /dev/null +++ b/c_tools/hod/CMakeLists.txt @@ -0,0 +1,21 @@ +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +SET(hod_SRCS header.c main.c utility.c sigmac.c transfnc.c transfunc_file.c + nonlinear_power_spectrum.c least_squares.c hod_functions.c + xi_matter.c one_halo_rspace.c two_halo_rspace.c + input_params.c dFdx.c mstar.c halo_concentration.c growthfactor.c + halo_mass_conversion.c nfw_transform.c + pair_density.c tasks.c wp_minimization.c + kaiser_distortions.c + tf_eisenstein_hu.c jeans.c + populate_simulation.c test.c + dark_matter_statistics.c + meshlink2.c nbrsfind2.c i3tensor_2.c + mcmc.c halo_mass_function.c halo_bias.c + nrutil.c qromo.c midpnt.c midinf.c polint.c splint.c spline.c + zbrent.c qtrap.c trapzd.c cisi.c complex.c amoeba.c amotry.c + gaussj.c powell.c linmin.c f1dim.c mnbrak.c brent.c gasdev.c + ran1.c jacobi.c splin2.c splie2.c ran2.c sort2.c +) +add_executable(hod ${hod_SRCS}) +target_link_libraries(hod -lm) diff --git a/c_tools/hod/users_manual.pdf b/c_tools/hod/users_manual.pdf new file mode 100644 index 0000000..f9bd01d Binary files /dev/null and b/c_tools/hod/users_manual.pdf differ diff --git a/c_tools/libzobov/CMakeLists.txt b/c_tools/libzobov/CMakeLists.txt new file mode 100644 index 0000000..ead0a51 --- /dev/null +++ b/c_tools/libzobov/CMakeLists.txt @@ -0,0 +1,8 @@ + +add_library(zobovTool loadZobov.cpp particleInfo.cpp contour_pixels.cpp) + +set_target_properties(zobovTool PROPERTIES COMPILE_FLAGS ${OpenMP_CXX_FLAGS} LINK_FLAGS ${OpenMP_CXX_FLAGS}) + +SET(ZOB_LIBS zobovTool ${COSMOTOOL_LIBRARY} ${GSL_LIBRARY} ${GSLCBLAS_LIBRARY} ${NETCDFCPP_LIBRARY} ${NETCDF_LIBRARY}) +SET(ZOB_LIBS PARENT ${ZOB_LIBS}) + diff --git a/c_tools/mock/CMakeLists.txt b/c_tools/mock/CMakeLists.txt new file mode 100644 index 0000000..8761175 --- /dev/null +++ b/c_tools/mock/CMakeLists.txt @@ -0,0 +1,23 @@ +include_directories(${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/loaders) + +IF(SDF_SUPPORT) + add_definitions(-DSDF_SUPPORT) +ENDIF(SDF_SUPPORT) + +SET(generateMock_SRCS generateMock.cpp ) +add_genopt(generateMock_SRCS generateMock.ggo generateMock_conf STRUCTNAME generateMock_info) +add_executable(generateMock ${generateMock_SRCS}) +target_link_libraries(generateMock simu_loaders ${ZOB_LIBS} ${HDF5HL_CPP_LIBRARY} ${HDF5_CPP_LIBRARY} ${HDF5_LIBRARY} ${LIBSDF_LIBRARY}) + +add_executable(generateTestMock generateTestMock.cpp) +target_link_libraries(generateTestMock ${ZOB_LIBS}) + + +SET(generateFromCatalog_SRCS generateFromCatalog.cpp) +add_genopt(generateFromCatalog_SRCS generateFromCatalog.ggo generateFromCatalog_conf STRUCTNAME generateFromCatalog_info) +add_executable(generateFromCatalog ${generateFromCatalog_SRCS}) +target_link_libraries(generateFromCatalog ${ZOB_LIBS} ${HDF5HL_CPP_LIBRARY} ${HDF5_CPP_LIBRARY} ${HEALPIX_LIBRARIES}) + +set_target_properties(generateFromCatalog PROPERTIES COMPILE_FLAGS ${OpenMP_CXX_FLAGS} LINK_FLAGS ${OpenMP_CXX_FLAGS}) + +subdirs(loaders) diff --git a/c_tools/mock/loaders/CMakeLists.txt b/c_tools/mock/loaders/CMakeLists.txt new file mode 100644 index 0000000..55818a3 --- /dev/null +++ b/c_tools/mock/loaders/CMakeLists.txt @@ -0,0 +1,11 @@ +SET(SIMU_LOADERS_SRCS + simulation_loader.cpp ramses_loader.cpp + gadget_loader.cpp flash_loader.cpp) + +IF (SDF_SUPPORT) + add_definitions(-DSDF_SUPPORT) + SET(SIMU_LOADERS_SRCS ${SIMU_LOADERS_SRCS} sdf_loader.cpp multidark_loader.cpp) +ENDIF (SDF_SUPPORT) + + +add_library(simu_loaders ${SIMU_LOADERS_SRCS}) diff --git a/c_tools/stacking/CMakeLists.txt b/c_tools/stacking/CMakeLists.txt new file mode 100644 index 0000000..fce1f3d --- /dev/null +++ b/c_tools/stacking/CMakeLists.txt @@ -0,0 +1,39 @@ +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +SET(stackVoids_SRCS stackVoids.cpp) +add_genopt(stackVoids_SRCS stackVoids.ggo stackVoids_conf STRUCTNAME stackVoids_info) +add_executable(stackVoids ${stackVoids_SRCS}) +target_link_libraries(stackVoids ${ZOB_LIBS}) + +SET(stackVoidsZero_SRCS stackVoidsZero.cpp) +add_genopt(stackVoidsZero_SRCS stackVoidsZero.ggo stackVoidsZero_conf STRUCTNAME stackVoidsZero_info) +add_executable(stackVoidsZero ${stackVoidsZero_SRCS}) +target_link_libraries(stackVoidsZero ${ZOB_LIBS}) + +set(computeVelocityProfile_SRCS computeVelocityProfile.cpp) +add_genopt(computeVelocityProfile_SRCS computeVelocityProfile.ggo computeVelocityProfile_conf STRUCT Params) +add_executable(computeVelocityProfile ${computeVelocityProfile_SRCS}) +target_link_libraries(computeVelocityProfile ${ZOB_LIBS}) + + +set(stackDensityField_SRCS stackDensityField.cpp) +add_genopt(stackDensityField_SRCS stackDensityField.ggo stackDensityField_conf STRUCT PARAMS) +add_executable(stackDensityField ${stackDensityField_SRCS}) +target_link_libraries(stackDensityField ${ZOB_LIBS}) + + + +set(stackVelocityField_SRCS stackVelocityField.cpp) +add_genopt(stackVelocityField_SRCS stackVelocityField.ggo stackVelocityField_conf STRUCT PARAMS) +add_executable(stackVelocityField ${stackVelocityField_SRCS}) +target_link_libraries(stackVelocityField ${ZOB_LIBS}) + +SET(pruneVoids_SRCS pruneVoids.cpp) +add_genopt(pruneVoids_SRCS pruneVoids.ggo pruneVoids_conf STRUCTNAME pruneVoids_info) +add_executable(pruneVoids ${pruneVoids_SRCS}) +target_link_libraries(pruneVoids ${ZOB_LIBS}) + +SET(makeAHFOutput_SRCS makeAHFOutput.cpp) +add_genopt(makeAHFOutput_SRCS makeAHFOutput.ggo makeAHFOutput_conf STRUCTNAME makeAHFOutput_info) +add_executable(makeAHFOutput ${makeAHFOutput_SRCS}) +target_link_libraries(makeAHFOutput ${ZOB_LIBS}) diff --git a/c_tools/zobov2/CMakeLists.txt b/c_tools/zobov2/CMakeLists.txt new file mode 100644 index 0000000..38ad864 --- /dev/null +++ b/c_tools/zobov2/CMakeLists.txt @@ -0,0 +1 @@ +subdirs(jozov2 voz1b1) diff --git a/c_tools/zobov2/jozov2/CMakeLists.txt b/c_tools/zobov2/jozov2/CMakeLists.txt new file mode 100644 index 0000000..81c1f1d --- /dev/null +++ b/c_tools/zobov2/jozov2/CMakeLists.txt @@ -0,0 +1,24 @@ +function(omp_add_flags TARGET LANG) + if(OPENMP_FOUND) + if(NOT LANG) + get_target_property(LANG ${TARGET} LINKER_LANGUAGE) + endif() + if(LANG MATCHES "C(XX)?") + set_property(TARGET ${TARGET} APPEND + PROPERTY COMPILE_FLAGS ${OpenMP_${LANG}_FLAGS}) + set_property(TARGET ${TARGET} APPEND + PROPERTY LINK_FLAGS ${OpenMP_${LANG}_FLAGS}) + else() + message(WARNING "omp_add_flags: target '${TARGET}'" + " link language '${LANG}' is not supported") + endif() + endif() +endfunction() + +add_executable(jozov2 jozov2.cpp jozov2_io.cpp jozov2_zones.cpp jozov2_watershed.cpp findrtop.c) + +if (ENABLE_OPENMP) + Find_Package(OpenMP) + omp_add_flags(jozov2 CXX) + add_definitions(-DOPENMP) +ENDIF(ENABLE_OPENMP) diff --git a/c_tools/zobov2/voz1b1/CMakeLists.txt b/c_tools/zobov2/voz1b1/CMakeLists.txt new file mode 100644 index 0000000..98fa941 --- /dev/null +++ b/c_tools/zobov2/voz1b1/CMakeLists.txt @@ -0,0 +1,4 @@ +add_executable(voz1b1_2 voz1b1.cpp vozutil.c voz_io.cpp) +target_link_libraries(voz1b1_2 ${QHULL_LIBRARY} ${COSMOTOOL_LIBRARY} ${MATH_LIB}) + +include_directories(${COSMOTOOL_INCLUDE_PATH} ${QHULL_INCLUDE_PATH}) diff --git a/external/cosmotool/CMakeLists.txt b/external/cosmotool/CMakeLists.txt new file mode 100644 index 0000000..7faca8a --- /dev/null +++ b/external/cosmotool/CMakeLists.txt @@ -0,0 +1,86 @@ +cmake_minimum_required(VERSION 2.6) +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}") + +project(CosmoToolbox) + +include(GetGitRevisionDescription) +include(ExternalProject) +include(FindOpenMP) + +get_git_head_revision(HEAD GIT_VER) + +option(BUILD_SHARED_LIBS "Build shared libraries." OFF) +option(BUILD_STATIC_LIBS "Build static libraries." ON) +option(ENABLE_OPENMP "Enable OpenMP support." OFF) +option(ENABLE_SHARP "Enable SPHT support." ON) + +find_path(NETCDF_INCLUDE_PATH NAMES netcdf.h) +find_path(NETCDFCPP_INCLUDE_PATH NAMES netcdfcpp.h netcdf) +find_path(GSL_INCLUDE_PATH NAMES gsl/gsl_blas.h) + +IF(EXISTS ${NETCDFCPP_INCLUDE_PATH}/netcdf) + SET(FOUND_NETCDF4 1) + FILE(WRITE ${CMAKE_BINARY_DIR}/src/ctool_netcdf_ver.hpp "#define NETCDFCPP4 1") +ELSE(EXISTS ${NETCDFCPP_INCLUDE_PATH}/netcdf) + SET(FOUND_NETCDF3 1) + FILE(WRITE ${CMAKE_BINARY_DIR}/src/ctool_netcdf_ver.hpp "#undef NETCDFCPP4") +ENDIF(EXISTS ${NETCDFCPP_INCLUDE_PATH}/netcdf) + +find_library(NETCDF_LIBRARY netcdf) +find_library(NETCDFCPP_LIBRARY NAMES netcdf_c++ netcdf_c++4) +find_library(GSL_LIBRARY gsl) +find_library(GSLCBLAS_LIBRARY gslcblas) + +if (ENABLE_SHARP) + SET(SHARP_SOURCE ${CMAKE_SOURCE_DIR}/external/sharp) + SET(DEP_BUILD ${CMAKE_SOURCE_DIR}/external/sharp/auto) + ExternalProject_Add(sharp + SOURCE_DIR ${SHARP_SOURCE} + BUILD_IN_SOURCE 1 + CONFIGURE_COMMAND ${SHARP_SOURCE}/configure --prefix=${DEP_BUILD} + BUILD_COMMAND ${CMAKE_MAKE_PROGRAM} + INSTALL_COMMAND echo "No install" + ) + SET(SHARP_LIBRARY ${DEP_BUILD}/lib/libsharp.a) + SET(SHARP_LIBRARIES ${SHARP_LIBRARY}) + SET(SHARP_INCLUDE_PATH ${DEP_BUILD}/include) +endif (ENABLE_SHARP) + + +set(HDF5_FIND_COMPONENTS HL CXX) +if(HDF5_ROOTDIR) + SET(ENV{HDF5_ROOT} ${HDF5_ROOTDIR}) +endif(HDF5_ROOTDIR) +include(FindHDF5) + +include(FindPkgConfig) + +pkg_check_modules(FFTW3 fftw3>=3.3) +pkg_check_modules(FFTW3F fftw3f>=3.3) +pkg_check_modules(EIGEN3 eigen3) + +include(FindPackageHandleStandardArgs) +set(NETCDF_FIND_REQUIRED TRUE) +set(GSL_FIND_REQUIRED TRUE) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(NetCDF DEFAULT_MSG NETCDF_LIBRARY NETCDFCPP_LIBRARY NETCDF_INCLUDE_PATH) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(GSL DEFAULT_MSG GSL_LIBRARY GSLCBLAS_LIBRARY GSL_INCLUDE_PATH) + + +set(GSL_LIBRARIES ${GSL_LIBRARY} ${GSLCBLAS_LIBRARY}) + +# CPACK Configuration +SET(CPACK_PACKAGE_DESCRIPTION_SUMMARY "A toolbox for impatient cosmologists") +SET(CPACK_PACKAGE_VENDOR "Guilhem Lavaux") +SET(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LICENCE_CeCILL_V2") +SET(CPACK_PACKAGE_VERSION_MAJOR "1") +SET(CPACK_PACKAGE_VERSION_MINOR "0") +SET(CPACK_PACKAGE_VERSION_PATCH "0-${GIT_VER}") +SET(CPACK_PACKAGE_INSTALL_DIRECTORY "CosmoToolbox-${GalaxExplorer_VERSION_MAJOR}.${GalaxExplorer_VERSION_MINOR}") +SET(CPACK_STRIP_FILES "lib/libCosmoTool.so") +SET(CPACK_SOURCE_IGNORE_FILES +"/CVS/;/\\\\.git/;/\\\\.svn/;\\\\.swp$;\\\\.#;/#;.*~;cscope.*;/CMakeFiles/;.*\\\\.cmake;Makefile") + +add_subdirectory(src) +add_subdirectory(sample) + +include(CPack) diff --git a/external/cosmotool/sample/CMakeLists.txt b/external/cosmotool/sample/CMakeLists.txt new file mode 100644 index 0000000..969480c --- /dev/null +++ b/external/cosmotool/sample/CMakeLists.txt @@ -0,0 +1,65 @@ +SET(tolink ${GSL_LIBRARIES} CosmoTool ${CosmoTool_LIBS}) +include_directories(${CMAKE_SOURCE_DIR}/src ${FFTW3_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} ${NETCDF_INCLUDE_PATH} ${GSL_INCLUDE_PATH}) + +IF(SHARP_INCLUDE_PATH) + include_directories(BEFORE ${SHARP_INCLUDE_PATH}) +ENDIF(SHARP_INCLUDE_PATH) + +add_executable(testBQueue testBQueue.cpp) +target_link_libraries(testBQueue ${tolink}) + +add_executable(testInterpolate testInterpolate.cpp) +target_link_libraries(testInterpolate ${tolink}) + +add_executable(testSmooth testSmooth.cpp) +target_link_libraries(testSmooth ${tolink}) + +add_executable(testkd testkd.cpp) +target_link_libraries(testkd ${tolink}) + +add_executable(testkd2 testkd2.cpp) +target_link_libraries(testkd2 ${tolink}) + +add_executable(testkd3 testkd3.cpp) +target_link_libraries(testkd3 ${tolink}) + +add_executable(testDelaunay testDelaunay.cpp) +target_link_libraries(testDelaunay ${tolink}) + +add_executable(testNewton testNewton.cpp) +target_link_libraries(testNewton ${tolink}) + +add_executable(testPool testPool.cpp) +target_link_libraries(testPool ${tolink}) + +if (HDF5_FOUND) + add_executable(testReadFlash testReadFlash.cpp) + target_link_libraries(testReadFlash ${tolink}) +endif (HDF5_FOUND) + + +add_executable(testEskow testEskow.cpp) +target_link_libraries(testEskow ${tolink}) + +add_executable(testAlgo testAlgo.cpp) +target_link_libraries(testAlgo ${tolink}) + +add_executable(testBSP testBSP.cpp) +target_link_libraries(testBSP ${tolink}) + +if (FFTW3_FOUND AND FFTW3F_FOUND AND EIGEN3_FOUND) + add_executable(test_fft_calls test_fft_calls.cpp) + target_link_libraries(test_fft_calls ${tolink} ${FFTW3_LIBRARIES} ${FFTW3F_LIBRARIES}) +endif (FFTW3_FOUND AND FFTW3F_FOUND AND EIGEN3_FOUND) + +if (ENABLE_SHARP AND SHARP_LIBRARY AND SHARP_INCLUDE_PATH AND EIGEN3_FOUND) + include_directories(${SHARP_INCLUDE_PATH}) + add_executable(test_healpix_calls test_healpix_calls.cpp) + target_link_libraries(test_healpix_calls ${tolink} ${SHARP_LIBRARIES}) + set_target_properties(test_healpix_calls PROPERTIES COMPILE_FLAGS ${OpenMP_CXX_FLAGS} LINK_FLAGS ${OpenMP_CXX_FLAGS}) + add_dependencies(test_healpix_calls sharp) +endif (ENABLE_SHARP AND SHARP_LIBRARY AND SHARP_INCLUDE_PATH AND EIGEN3_FOUND) + +add_executable(test_cosmopower test_cosmopower.cpp) +target_link_libraries(test_cosmopower ${tolink}) + diff --git a/external/cosmotool/src/CMakeLists.txt b/external/cosmotool/src/CMakeLists.txt new file mode 100644 index 0000000..8901027 --- /dev/null +++ b/external/cosmotool/src/CMakeLists.txt @@ -0,0 +1,92 @@ +SET(CosmoTool_SRCS + fortran.cpp + interpolate.cpp + load_data.cpp + loadGadget.cpp + loadRamses.cpp + octTree.cpp + powerSpectrum.cpp + miniargs.cpp + growthFactor.cpp + cosmopower.cpp +) + +IF(FOUND_NETCDF3) + SET(CosmoTool_SRCS ${CosmoTool_SRCS} yorick_nc3.cpp) +ELSE(FOUND_NETCDF3) +IF(FOUND_NETCDF4) + SET(CosmoTool_SRCS ${CosmoTool_SRCS} yorick_nc4.cpp) +ENDIF(FOUND_NETCDF4) +ENDIF(FOUND_NETCDF3) + + +if (HDF5_FOUND) + set(CosmoTool_SRCS ${CosmoTool_SRCS} + h5_readFlash.cpp + loadFlash.cpp + ) +else(HDF5_FOUND) + set(CosmoTool_SRCS ${CosmoTool_SRCS} + loadFlash_dummy.cpp + ) +endif (HDF5_FOUND) + +SET(CosmoTool_SRCS ${CosmoTool_SRCS} + bqueue.hpp + config.hpp + dinterpolate.hpp + field.hpp + fixArray.hpp + fortran.hpp + interpolate3d.hpp + interpolate.hpp + kdtree_leaf.hpp + load_data.hpp + loadGadget.hpp + loadRamses.hpp + loadSimu.hpp + miniargs.hpp + mykdtree.hpp + octTree.hpp + powerSpectrum.hpp + sparseGrid.hpp + sphSmooth.hpp + yorick.hpp + growthFactor.hpp +) + +include_directories(${GSL_INCLUDE_PATH} ${NETCDF_INCLUDE_PATH} ${NETCDFCPP_INCLUDE_PATH} ${CMAKE_BINARY_DIR}/src) + +set(CosmoTool_LIBS ${NETCDFCPP_LIBRARY} ${NETCDF_LIBRARY} ${GSL_LIBRARIES}) +if (HDF5_FOUND) + set(CosmoTool_LIBS ${CosmoTool_LIBS} ${HDF5_CXX_LIBRARIES} ${HDF5_LIBRARIES}) + include_directories(${HDF5_INCLUDE_DIRS}) +endif (HDF5_FOUND) + +set(CosmoTool_LIBS ${CosmoTool_LIBS} PARENT_SCOPE) + +if (BUILD_SHARED_LIBS) + add_library(CosmoTool SHARED ${CosmoTool_SRCS}) + target_link_libraries(CosmoTool ${CosmoTool_LIBS}) + + if (BUILD_STATIC_LIBS) + add_library(CosmoTool_static STATIC ${CosmoTool_SRCS}) + endif(BUILD_STATIC_LIBS) +else (BUILD_SHARED_LIBS) + add_library(CosmoTool STATIC ${CosmoTool_SRCS}) +endif (BUILD_SHARED_LIBS) + +install(TARGETS CosmoTool + LIBRARY DESTINATION lib + ARCHIVE DESTINATION lib) + +if (BUILD_SHARED_LIBS) + install(TARGETS CosmoTool_static + LIBRARY DESTINATION lib + ARCHIVE DESTINATION lib) +endif (BUILD_SHARED_LIBS) + +install(DIRECTORY . DESTINATION include/CosmoTool + FILES_MATCHING PATTERN "*.hpp") +install(DIRECTORY . DESTINATION include/CosmoTool + FILES_MATCHING PATTERN "*.tcc") diff --git a/pipeline/datasets/example_observation.py b/pipeline/datasets/example_observation.py new file mode 100644 index 0000000..e7518f9 --- /dev/null +++ b/pipeline/datasets/example_observation.py @@ -0,0 +1,250 @@ +#+ +# VIDE -- Void IDentification and Examination -- ./pipeline/sdss/sdss_dr7LCDM.py +# Copyright (C) 2010-2013 Guilhem Lavaux +# Copyright (C) 2011-2013 P. M. Sutter +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +#+ +#!/usr/bin/env python + +# does analysis in real space assuming LCDM cosmology + +import os +import numpy as np +from void_python_tools.backend.classes import * + +# does full void analysis. Also generates 2d/1d stacked plots and hubble diagram + +# to combine multiple samples: +# For example, combining dim from 0.0-0.1 and bright from 0.1-0.2: +# 1) add dim+bright to sampleList - "+" is required to detect combo! + +# if True, will scan log files for last known completed state and run from there +continueRun = False + +# stages: +# 1 : extract redshift slices from data +# 2 : void extraction using zobov +# 3 : removal of small voids and voids near the edge +# 4 : void stacking and combining +# 5 : 1-d and 2-d profile generation +# 6 : fitting and plotting stacked voids +# 7 : determination of error bars (if requested) +# 8 : hubble diagram generation +# 9 : liklihood determination +startCatalogStage = 1 +endCatalogStage = 1 + +catalogName = "lcdm" + +# where main data files are present/will go +workDir = os.getenv("HOME")+"/workspace/Voids/sdss_dr7LCDM/" +inputDataDir = os.getenv("HOME")+"/workspace/Voids/catalogs/nyuvagc/" + +logDir = os.getenv("PWD")+"/../logs/sdss_dr7LCDM" +figDir = os.getenv("PWD")+"/../figs/sdss_dr7LCDM" + +ZOBOV_PATH = os.getenv("PWD")+"/../zobov/" +CTOOLS_PATH = os.getenv("PWD")+"/../c_tools/" + +freshStack = True # "no" to continue building onto existing stacked voids + +errorBars = "CALCULATED" # "ESTIMATED" for error bars from multiple null tests +#errorBars = "ESTIMATED" # "ESTIMATED" for error bars from multiple null tests + # "CALCULATED" to use fitting procedure +numIncoherentRuns = 100 + +ranSeed = 101010 + +useComoving = True # if true, convert to real space using LCDM cosmology + +# try different portions of data. available options: "all, central, edge" +dataPortions = ["central", "all"] + +numZobovDivisions = 2 +numZobovThreads = 2 + +dataSampleList = [] + + +newSample = Sample(fullName = "lss.dr72dim1.dat", + nickName = "dim1", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + selFunFile = inputDataDir+"/czselfunc.all.dr72dim1.dat", + zBoundary = (0.0, 0.05), + zRange = (0.0, 0.05), + skyFraction = 0.19, + minVoidRadius = 3.479, + fakeDensity = 0.01, + volumeLimited = True, + includeInHubble = False, + partOfCombo = True, + isCombo = False, + comboList= None) +newSample.addStack(0.0, 0.1, 8, 12, False, True) +newSample.addStack(0.0, 0.1, 12, 16, False, True) +newSample.addStack(0.0, 0.1, 16, 20, False, True) +newSample.addStack(0.0, 0.1, 20, 28, False, True) +dataSampleList.append(newSample) + +newSample = Sample(fullName = "lss.dr72dim2.dat", + nickName = "dim2", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + selFunFile = inputDataDir+"/czselfunc.all.dr72dim2.dat", + zBoundary = (0.0, 0.1), + zRange = (0.05, 0.1), + skyFraction = 0.19, + minVoidRadius = 4.723, + fakeDensity = 0.01, + volumeLimited = True, + includeInHubble = False, + partOfCombo = True, + isCombo = False, + comboList= None) +newSample.addStack(0.0, 0.1, 8, 12, False, True) +newSample.addStack(0.0, 0.1, 12, 16, False, True) +newSample.addStack(0.0, 0.1, 16, 20, False, True) +newSample.addStack(0.0, 0.1, 20, 28, False, True) +dataSampleList.append(newSample) + +newSample = Sample(fullName = "lss.dr72dim1+dim2.dat", + nickName = "dim1+dim2", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + selFunFile = inputDataDir+"/czselfunc.all.dr72dim.dat", + zBoundary = (0.0, 0.1), + zRange = (0.0, 0.1), + skyFraction = 0.19, + minVoidRadius = 4.723, + fakeDensity = 0.01, + profileBinSize = "auto", + volumeLimited = True, + includeInHubble = True, + partOfCombo = False, + isCombo = True, + comboList= ("lss.dr72dim1.dat", "lss.dr72dim2.dat")) +newSample.addStack(0.0, 0.1, 8, 12, True, False) +newSample.addStack(0.0, 0.1, 12, 16, True, False) +newSample.addStack(0.0, 0.1, 16, 20, True, False) +newSample.addStack(0.0, 0.1, 20, 28, True, False) +dataSampleList.append(newSample) + + +newSample = Sample(fullName = "lss.dr72bright1.dat", + nickName = "bright1", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + selFunFile = inputDataDir+"/czselfunc.all.dr72bright1.dat", + zBoundary = (0.0, 0.15), + zRange = (0.1, 0.15), + skyFraction = 0.19, + minVoidRadius = 6.763, + fakeDensity = 0.01, + volumeLimited = True, + includeInHubble = False, + partOfCombo = True, + isCombo = False, + comboList= None) +newSample.addStack(0.1, 0.2, 13, 20, False, True) +newSample.addStack(0.1, 0.2, 20, 28, False, True) +newSample.addStack(0.1, 0.2, 28, 36, False, True) +newSample.addStack(0.1, 0.2, 36, 44, False, True) +dataSampleList.append(newSample) + +newSample = Sample(fullName = "lss.dr72bright2.dat", + nickName = "bright2", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + selFunFile = inputDataDir+"/czselfunc.all.dr72bright2.dat", + zBoundary = (0.0, 0.2), + zRange = (0.15, 0.2), + skyFraction = 0.19, + minVoidRadius = 10.844, + fakeDensity = 0.001, + volumeLimited = True, + includeInHubble = False, + partOfCombo = True, + isCombo = False, + comboList= None) +newSample.addStack(0.1, 0.2, 13, 20, False, True) +newSample.addStack(0.1, 0.2, 20, 28, False, True) +newSample.addStack(0.1, 0.2, 28, 36, False, True) +newSample.addStack(0.1, 0.2, 36, 44, False, True) +dataSampleList.append(newSample) + +newSample = Sample(fullName = "lss.dr72bright1+bright2.dat", + nickName = "bright1+bright2", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + zBoundary = (0.0, 0.2), + zRange = (0.1, 0.2), + skyFraction = 0.19, + minVoidRadius = -1, + fakeDensity = -1, + profileBinSize = "auto", + volumeLimited = True, + includeInHubble = True, + partOfCombo = False, + isCombo = True, + comboList= ("lss.dr72bright1.dat", "lss.dr72bright2.dat")) +newSample.addStack(0.1, 0.2, 13, 20, True, False) +newSample.addStack(0.1, 0.2, 20, 28, True, False) +newSample.addStack(0.1, 0.2, 28, 36, True, False) +newSample.addStack(0.1, 0.2, 36, 44, True, False) +dataSampleList.append(newSample) + +newSample = Sample(fullName = "lss.dr72lrgdim.dat", + nickName = "lrgdim", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + selFunFile = inputDataDir+"/czselfunc.all.dr72lrgdim.dat", + zBoundary = (0.16, 0.36), + zRange = (0.16, 0.36), + skyFraction = 0.19, + minVoidRadius = 24, + fakeDensity = 0.001, + profileBinSize = 2, # KEEP + volumeLimited = True, + includeInHubble = True, + partOfCombo = False, + isCombo = False, + comboList= None) +newSample.addStack(0.16, 0.36, 54, 70, True, False, zMinPlot=0.2) +newSample.addStack(0.16, 0.36, 70, 106, True, False, zMinPlot=0.2) +dataSampleList.append(newSample) + +newSample = Sample(fullName = "lss.dr72lrgbright.dat", + nickName = "lrgbright", + dataType = "observation", + maskFile = inputDataDir+"/healpix/rast_window_512.fits", + selFunFile = inputDataDir+"/czselfunc.all.dr72lrgbright.dat", + zBoundary = (0.36, 0.44), + zRange = (0.36, 0.44), + skyFraction = 0.19, + minVoidRadius = 38, + fakeDensity = 0.001, + profileBinSize = "auto", + volumeLimited = True, + includeInHubble = False, + partOfCombo = False, + isCombo = False, + comboList= None) +newSample.addStack(0.36, 0.44, 76, 92, False, False) +newSample.addStack(0.36, 0.44, 92, 108, False, False) +dataSampleList.append(newSample) + + diff --git a/pipeline/datasets/example_simulation.py b/pipeline/datasets/example_simulation.py new file mode 100644 index 0000000..242cc18 --- /dev/null +++ b/pipeline/datasets/example_simulation.py @@ -0,0 +1,144 @@ +#+ +# VIDE -- Void IDEntification pipeline -- ./pipeline/datasets/mergertree.py +# Copyright (C) 2010-2013 Guilhem Lavaux +# Copyright (C) 2011-2013 P. M. Sutter +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +#+ +import os + +# ----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- +# CONFIGURATION + +startCatalogStage = 0 +endCatalogStage = 0 +continueRun = False + +startAPStage = 1 +endAPStage = 2 + +# directory for the input simulation/observational particle files +catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/mergertree1024/" + +# path to HOD code +hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x" + +# where to put the final void catalog, figures, and output logs +voidOutputDir = os.getenv("HOME")+"/workspace/Voids/mergertree1024/" +figDir = os.getenv("PWD")+"/../figs/mergertree1024/" +logDir = os.getenv("PWD")+"/../logs/mergertree1024/" + +# where to place the pipeline scripts +scriptDir = os.getenv("PWD")+"/mergertree1024/" + +# simulation or observation? +dataType = "simulation" + +# available formats for simulation: gadget, mergertree +dataFormat = "sdf" +dataUnit = 1 # as multiple of Mpc/h + +# place particles on the lightcone? +useLightCone = False + +# also do peculiar velocities? +doPecVel = False + +# common filename of particle files +particleFileBase = "mf_4s_1G_1k_NNNNN" +particleFileDummy = 'NNNNN' + +# list of file numbers for the particle files +# to get particle file name, we take particleFileBase+fileNum +fileNums = ["1.000"] + +# redshift of each file in the above list +redshifts = ["0.0"] + +# how many independent slices along the z-axis? +numSlices = 1 +#numSlices = 4 +numAPSlices = 1 + +# how many subdivisions along the x- and y- axis? +# ( = 2 will make 4 subvolumes for each slice, = 3 will make 9, etc.) +numSubvolumes = 1 + +# prefix to give all outputs +prefix = "mt_" + +# list of desired subsamples - these are in unts of h Mpc^-3! +subSamples = [0.1, 0.05, 0.02, 0.01, 0.004, 0.002, 0.001, 0.0003, 0.0001] +#doSubSampling = False # do the subsampling in preparation script? +doSubSampling = True # do the subsampling in preparation script? + +# common filename of halo files, leave blank to ignore halos +haloFileBase = "mf_4s_1G_1k_bgc2_NNNNN.sdf" +haloFileDummy = 'NNNNN' + +# minimum halo mass cuts to apply for the halo catalog +# use "none" to get all halos +minHaloMasses = ["none", 1.2e13] +#minHaloMasses = [7.0e11, 1.2e13] + +# locations of data in the halo catalog +haloFileMCol = 6 +haloFileXCol = 0 +haloFileYCol = 1 +haloFileZCol = 2 +haloFileVXCol = 3 +haloFileVYCol = 4 +haloFileVZCol = 5 +haloFileColSep = ',' +haloFileNumComLines = 0 + +# adjust these two parameters given the memory contraints on your system: +# numZobovDivisions: how many sub-volumes per dimension will zobov process +# numZobovThreads: how many sub-volumes to process at once? +numZobovDivisions = 4 +numZobovThreads = 4 + +# simulation information +numPart = 1024*1024*1024 +lbox = 999.983 # Mpc/h +omegaM = 0.2847979853038958 +hubble = 0.6962 + +#galDens = 0.000225 +hodParmList = [ + {'name' : "LowRes", #BOSS: Manera et al. 2012, eq. 26 + 'Mmin' : 0.0, + 'M1' : 1.e14, + 'sigma_logM' : 0.596, + 'alpha' : 1.0127, + 'Mcut' : 1.19399e13, + 'galDens' : 0.0002, + }, + + {'name' : "HighRes", + 'Mmin' : 0.0, + #'Mmin' : 1.99525e12, + 'M1' : 3.80189e13, + 'sigma_logM' : 0.21, + 'alpha' : 1.12, + 'Mcut' : 6.91831e11, + 'galDens' : 0.02, + } +] + +# END CONFIGURATION +# ----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- diff --git a/python_tools/void_python_tools/apTools/chi2/__init__.py b/python_tools/void_python_tools/apTools/chi2/__init__.py index 5d9f6d2..c85f49f 100644 --- a/python_tools/void_python_tools/apTools/chi2/__init__.py +++ b/python_tools/void_python_tools/apTools/chi2/__init__.py @@ -17,6 +17,4 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #+ -from velocityProfileFitNative import * -from likelihood import * from cosmologyTools import * diff --git a/python_tools/void_python_tools/apTools/chi2/cosmologyTools.py b/python_tools/void_python_tools/apTools/chi2/cosmologyTools.py index 8337a9b..9b01534 100644 --- a/python_tools/void_python_tools/apTools/chi2/cosmologyTools.py +++ b/python_tools/void_python_tools/apTools/chi2/cosmologyTools.py @@ -24,7 +24,7 @@ import numpy as np import scipy.integrate as integrate from void_python_tools.backend import * -__all__=['expansion', 'angularDiameter', 'expectedStretch', 'aveStretch', 'aveExpansion', 'aveStretchCone', 'aveWeightedStretch'] +__all__=['expansion', 'angularDiameter', 'aveExpansion'] # returns 1/E(z) for the given cosmology def expansion(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0): @@ -41,114 +41,7 @@ def eosDE(z, w0 = -1.0, wa = 0.0): def angularDiameter(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0): da = integrate.quad(expansion, 0.0, z, args=(Om, Ot, w0, wa))[0] return da - -# returns expected void stretch for the given cosmology -def expectedStretch(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0): - ez = 1./expansion(z, Om=Om, Ot=Ot, w0=w0, wa=wa) - da = angularDiameter(z, Om=Om, Ot=Ot, w0=w0, wa=wa) - return ez*da/z - -# ----------------------------------------------------------------------------- -# returns average expected void stretch for a given redshift range -# assuming a cone -def aveStretchCone(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0, - w0 = -1.0, wa = 0.0): - #print "assuming observation!", skyFrac - if zStart == 0.0: zStart = 1.e-6 - - h1 = zStart - h2 = zEnd - - r1 = skyFrac * 4* np.pi * zStart**2 - r2 = skyFrac * 4 * np.pi * zEnd**2 - - # surface area of a slice within a cone - def coneSlice(x, h, r): - return np.pi * (r/h*x)**2 - - def coneFunc(z, h, r, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0): - return coneSlice(z, h, r) * expectedStretch(z, Om, Ot, w0, wa) - - aveHigh = integrate.quad(coneFunc, 0.0, zEnd, args=(h2, r2, Om, Ot, w0, wa), full_output=1)[0] - aveLow = integrate.quad(coneFunc, 0.0, zStart, args=(h1, r1, Om, Ot, w0, wa), full_output=1)[0] - volumeHigh = integrate.quad(coneSlice, 0.0, zEnd, args=(h2, r2))[0] - volumeLow = integrate.quad(coneSlice, 0.0, zStart, args=(h1, r1))[0] - - return (aveHigh-aveLow)/(volumeHigh-volumeLow) - -# returns average expected void stretch for a given redshift range -def aveStretch(sample, zStart, zEnd, - Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0): - if zStart == 0.0: zStart = 1.e-6 - - if sample.dataType == "observation": - stretch = aveStretchCone(zStart, zEnd, - skyFrac=sample.skyFraction, Om=Om, Ot=Ot, - w0=w0, wa=wa) - else: - ave = integrate.quad(expectedStretch, zStart, zEnd, - args=(Om, Ot, w0, wa))[0] - ave /= (zEnd-zStart) - stretch = ave - - # if in comoving space, calculate stretch for fiducial cosmology - # and take relative amount - if not sample.useLightCone or sample.useComoving: - if sample.dataType == "observation": - stretchFid = aveStretchCone(zStart, zEnd, - skyFrac=sample.skyFraction, Om=sample.omegaM, Ot=Ot, - w0=w0, wa=wa) - else: - ave = integrate.quad(expectedStretch, zStart, zEnd, - args=(sample.omegaM, Ot, w0, wa))[0] - ave /= (zEnd-zStart) - stretchFid = ave - - stretch = stretchFid/stretch - - return stretch - - -# ----------------------------------------------------------------------------- -# returns average expected void stretch for a given redshift range -# weighted by an actual void distribution -def aveWeightedStretch(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0, - w0 = -1.0, wa = 0.0, dist=None, bins=None, - useComoving=True, OmFid=None): - if zStart == 0.0: zStart = 1.e-6 - - def weightedSlice(x): - return np.interp(x, bins[:-1], dist) - - def weightedFunc(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0): - return expectedStretch(z, Om, Ot, w0, wa) *\ - weightedSlice(z) - - ave = integrate.quad(weightedFunc, zStart, zEnd, args=(Om, Ot, w0, wa), - full_output=1)[0] - volume = integrate.quad(weightedSlice, zStart, zEnd, full_output=1)[0] - - if volume == 0.0: volume = 1.0 - stretch = ave/volume - - # if in comoving space, calculate stretch for fiducial cosmology - # and take relative amount - if useComoving: - ave = integrate.quad(weightedFunc, zStart, zEnd, args=(OmFid, - Ot, w0, wa), - full_output=1)[0] - volume = integrate.quad(weightedSlice, zStart, zEnd, full_output=1)[0] - - if volume == 0.0: volume = 1.0 - stretchFid = ave/volume - - if stretchFid != 0.0: - stretch = stretchFid/stretch - - return stretch - - # ----------------------------------------------------------------------------- # returns average expected expansion for a given redshift range def aveExpansion(zStart, zEnd, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0): diff --git a/python_tools/void_python_tools/apTools/profiles/__init__.py b/python_tools/void_python_tools/apTools/profiles/__init__.py index 14c0fa1..6015350 100644 --- a/python_tools/void_python_tools/apTools/profiles/__init__.py +++ b/python_tools/void_python_tools/apTools/profiles/__init__.py @@ -17,10 +17,4 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #+ -from build import * -from draw import * -from fit import * -from inertia import * -from mcmc import * -from generateExpFigure import * from getSurveyProps import * diff --git a/python_tools/void_python_tools/backend/__init__.py b/python_tools/void_python_tools/backend/__init__.py index 998fc76..db12e12 100644 --- a/python_tools/void_python_tools/backend/__init__.py +++ b/python_tools/void_python_tools/backend/__init__.py @@ -19,4 +19,3 @@ #+ from classes import * from launchers import * -from catalogPrep import * diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index f961d65..1a56372 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -95,8 +95,6 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, log = open(logFile, 'w') subprocess.call([binPath, arg1], stdout=log, stderr=log) log.close() - #cmd = "%s --configFile=%s >& %s" % (binPath,parmFile,logFile) - #os.system(cmd) if jobSuccessful(logFile, "Done!\n"): print "done" else: @@ -118,7 +116,6 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, if os.access("mask_index.txt", os.F_OK): os.system("mv %s %s" % ("mask_index.txt", zobovDir)) os.system("mv %s %s" % ("total_particles.txt", zobovDir)) - #os.system("mv %s %s" % ("sample_info.txt", zobovDir)) if os.access("galaxies.txt", os.F_OK): os.system("mv %s %s" % ("galaxies.txt", zobovDir)) @@ -214,18 +211,14 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, file(parmFile, mode="w").write(conf) if (prevSubSample == -1): - #cmd = "%s --configFile=%s &> %s" % (binPath,parmFile,logFile) cmd = "%s --configFile=%s" % (binPath,parmFile) log = open(logFile, 'w') else: - #cmd = "%s --configFile=%s &>> %s" % (binPath,parmFile,logFile) cmd = "%s --configFile=%s" % (binPath,parmFile) log = open(logFile, 'a') arg1 = "--configFile=%s" % parmFile subprocess.call(cmd, stdout=log, stderr=log, shell=True) - #subprocess.call([binPath, arg1], stdout=log, stderr=log) log.close() - #os.system(cmd) # remove intermediate files if (prevSubSample != -1): @@ -329,9 +322,6 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None, else: maskIndex = -1 maxDen = 0.2 - # TEST - #maxDen = 1.e99 - # END TEST if not (continueRun and jobSuccessful(logFile, "Done!\n")): for fileName in glob.glob(zobovDir+"/part._"+sampleName+".*"): @@ -343,11 +333,6 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None, if os.access(zobovDir+"/voidDesc_"+sampleName+".out", os.F_OK): os.unlink(zobovDir+"/voidDesc_"+sampleName+".out") - #cmd = "%s/vozinit %s 0.1 1.0 %g %s %g %s %s %s &> %s" % \ - # (binPath, datafile, numZobovDivisions, \ - # "_"+sampleName, numZobovThreads, \ - # binPath, zobovDir, maskIndex, logFile) - #os.system(cmd) cmd = [binPath+"/vozinit", datafile, "0.1", "1.0", str(numZobovDivisions), \ "_"+sampleName, str(numZobovThreads), \ binPath, zobovDir, str(maskIndex)] @@ -355,23 +340,11 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None, subprocess.call(cmd, stdout=log, stderr=log) log.close() - #cmd = "./%s >> %s 2>&1" % (vozScript, logFile) - #os.system(cmd) cmd = ["./%s" % vozScript] log = open(logFile, 'a') subprocess.call(cmd, stdout=log, stderr=log) log.close() -# cmd = "%s/jozov %s %s %s %s %s %g %s >> %s 2>&1" % \ - #cmd = "%s/../c_tools/zobov2/jozov2/jozov2 %s %s %s %s %s %g %s >> %s 2>&1" % \ - # (binPath, \ - # zobovDir+"/adj_"+sampleName+".dat", \ - # zobovDir+"/vol_"+sampleName+".dat", \ - # zobovDir+"/voidPart_"+sampleName+".dat", \ - # zobovDir+"/voidZone_"+sampleName+".dat", \ - # zobovDir+"/voidDesc_"+sampleName+".out", \ - # maxDen, maskIndex, logFile) - #os.system(cmd) cmd = [binPath+"../c_tools/zobov2/jozov2/jozov2", \ zobovDir+"/adj_"+sampleName+".dat", \ zobovDir+"/vol_"+sampleName+".dat", \ @@ -420,16 +393,6 @@ def launchPrune(sample, binPath, mockIndex = -1 maxDen = 0.2 observationLine = "" - # TEST - #maxDen = 1.e99 - # END TEST - - #periodicLine = " --periodic='" - #if sample.numSubvolumes == 1: periodicLine += "xy" - #if np.abs(sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \ - # sample.boxLen) <= 1.e-1: - # periodicLine += "z" - #periodicLine += "' " periodicLine = " --periodic='" + getPeriodic(sample) + "'" @@ -474,11 +437,6 @@ def launchPrune(sample, binPath, cmd += " --omegaM=" + str(sample.omegaM) cmd += " --outputDir=" + zobovDir cmd += " --sampleName=" + str(sampleName) - #cmd += " &> " + logFile - #f=file("run_prune.sh",mode="w") - #f.write(cmd) - #f.close() - #os.system(cmd) log = open(logFile, 'w') subprocess.call(cmd, stdout=log, stderr=log, shell=True) log.close() @@ -505,11 +463,6 @@ def launchVoidOverlap(sample1, sample2, sample1Dir, sample2Dir, sampleName2 = sample2.fullName periodicLine = " --periodic='" + getPeriodic(sample1) + "' " - #if sample1.dataType != "observation": - # if sample1.numSubvolumes == 1: periodicLine += "xy" - # if np.abs(sample1.zBoundaryMpc[1] - sample1.zBoundaryMpc[0] - sample1.boxLen) <= 1.e-1: - # periodicLine += "z" - #periodicLine += "' " if strictMatch: matchPrefix = "" @@ -572,1278 +525,6 @@ def launchVoidOverlap(sample1, sample2, sample1Dir, sample2Dir, print "already done!" -# ----------------------------------------------------------------------------- -def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, - voidDir=None, freshStack=True, runSuffix=None, - zobovDir=None, useLightCone=None, useComoving=None, - omegaM=None, - INCOHERENT=False, ranSeed=None, summaryFile=None, - continueRun=None, dataType=None, prefixRun="", - idList=None, rescaleOverride=None): - - sampleName = sample.fullName - - if idList != None: - customLine = "selected" - else: - customLine = "" - - runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin, - stack.rMax, thisDataPortion, - customLine=customLine) - - logFile = logDir+("/%sstack_"%prefixRun)+sampleName+"_"+runSuffix+".out" - - treeFile = voidDir+"/tree.data" - - if (freshStack) and os.access(treeFile, os.F_OK): - os.unlink(treeFile) - - centralRadius = stack.rMin * 0.25 - - # restrict to relevant ranges of sample - zMin = max(sample.zRange[0], stack.zMin) - zMax = min(sample.zRange[1], stack.zMax) - if useComoving or not useLightCone: - zMin = LIGHT_SPEED/100.*vp.angularDiameter(zMin, Om=omegaM) - zMax = LIGHT_SPEED/100.*vp.angularDiameter(zMax, Om=omegaM) - else: - zMin *= LIGHT_SPEED/100. - zMax *= LIGHT_SPEED/100. - - if dataType == "observation": - obsFlag = "observation" - else: - obsFlag = "" - - #Rextracut = stack.rMin*3 + 1 - #Rcircular = stack.rMin*3 + 2 - Rextracut = stack.rMax*3 + 1 - Rcircular = stack.rMax*3 + 2 - - if dataType == "observation": - maskIndex = open(zobovDir+"/mask_index.txt", "r").read() - totalPart = open(zobovDir+"/total_particles.txt", "r").read() - maxDen = 0.2*float(maskIndex)/float(totalPart) - else: - maskIndex = 999999999 - maxDen = 0.2 - # TEST - #maxDen = 1.e99 - # END TEST - - if INCOHERENT: - nullTestFlag = "INCOHERENT" - else: - nullTestFlag = "" - - if rescaleOverride == None: rescaleOverride = stack.rescaleMode - if rescaleOverride == "rmax": - rescaleFlag = "rescale" - else: - rescaleFlag = "" - - if stack.maxVoids != -1: - maxVoidsFlag = "maxVoids="+str(stack.maxVoids) - else: - maxVoidsFlag = "" - - idListFile = "idlist.temp" - if idList != None: - idListFlag = "idList " + idListFile - idFile = open(idListFile, 'w') - for id in idList: idFile.write(str(id)+"\n") - idFile.close() - rMinToUse = 0. - rMaxToUse = 100000. - centralRadius = rMaxToUse - else: - idListFlag = "" - rMinToUse = stack.rMin - rMaxToUse = stack.rMax - - periodicLine = "periodic='" + getPeriodic(sample) + "'" - #if sample.dataType == "observation": - # periodicLine = "periodic=''" - #else: - # periodicLine = "periodic='" - # if sample.numSubvolumes == 1: periodicLine += "xy" - # if np.abs(sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \ - # sample.boxLen) <= 1.e-1: - # periodicLine += "z" - # periodicLine += "' " - - launchDir = os.getcwd() - os.chdir(voidDir) - - conf=""" - desc %s - partzone %s - zonevoid %s - volumefile %s - Rmin %g - Rmax %g - particles %s - extraInfo %s - densityThreshold %g - centralRadius %g - edgeAvoidance %g - Circular %g - Zmin %g - Zmax %g - %s - %s - ranSeed %d - dataPortion %s - barycenters %s - boundaryDistances %s - %s - %s - %s - %s - """ % \ - (zobovDir+"/voidDesc_"+thisDataPortion+"_"+sampleName+".out", - zobovDir+"/voidPart_"+sampleName+".dat", - zobovDir+"/voidZone_"+sampleName+".dat", - zobovDir+"/vol_"+sampleName+".dat", - rMinToUse, - rMaxToUse, - zobovDir+("/%szobov_slice_"%prefixRun)+sampleName, - zobovDir+"/zobov_slice_"+sampleName+".par", - maxDen, - centralRadius, - Rextracut, - Rcircular, - zMin, - zMax, - obsFlag, - nullTestFlag, - ranSeed, - thisDataPortion, - zobovDir+"/barycenters_"+thisDataPortion+"_"+sampleName+".out", - zobovDir+"/boundaryDistances_"+thisDataPortion+"_"+sampleName+".out", - rescaleFlag, - maxVoidsFlag, - idListFlag, - periodicLine - ) - - parmFile = os.getcwd()+("/%sstack_"%prefixRun)+sample.fullName+".par" - fp = file(parmFile, mode="w") - fp.write(conf) - - # continue stacking if requested; pull files to local directory - if os.access(treeFile, os.F_OK): - os.system("mv %s %s" % (treeFile, "./"+tree.data)) - fp.write("loadSearchTree\n") - fp.write("getTree\n") - fp.write("doExtraction\n") - else: - fp.write("dumpSearchTree\n") - fp.write("dumpTree\n") - fp.write("doExtraction\n") - fp.close() - - jobString = " "+runSuffix+":" - - if not (continueRun and jobSuccessful(logFile, "Done!\n")): - #cmd = "%s --configFile=%s &> %s" % \ - # (binPath, parmFile, logFile) - #os.system(cmd) - cmd = "%s --configFile=%s" % \ - (binPath, parmFile) - log = open(logFile, 'w') - subprocess.call(cmd, stdout=log, stderr=log, shell=True) - log.close() - - if jobSuccessful(logFile, "Done!\n"): - print jobString, "Stacking voids done" - else: - print jobString, "Stacking voids FAILED!" - exit(-1) - - else: - print jobString, "Stacking voids already done!" - if os.access(parmFile, os.F_OK): os.unlink(parmFile) - return - - minDist = None - maxDist = None - numVoids = None - numParticles = None - for line in open(logFile): - if "Minimum void distance is" in line: - line = line.split() - minDist = float(line[4])/3000 - - if "Maximum void distance is" in line: - line = line.split() - maxDist = float(line[4])/3000 - - if "Selected" in line: - line = line.split() - numVoids = line[1] - - if "Final zobov stack size" in line: - line = line.split() - numParticles = line[4] - - open(voidDir+"/num_voids.txt", "w").write(numVoids) - open(voidDir+"/num_particles.txt", "w").write(numParticles) - - if os.access(voidDir+"/NOVOID", os.F_OK): - os.unlink(voidDir+"/NOVOID") - - if (numVoids == "0"): - print jobString, "No voids found; skipping!" - fp = open(voidDir+"/NOVOID", "w") - fp.write("no voids found\n") - fp.close() - return - - emptyStack = False - for line in open(logFile): - if "EMPTY STACK" in line: - emptyStack = True - if emptyStack: - print jobString, "Stack is empty; skipping!" - fp = open(voidDir+"/NOVOID", "w") - fp.write("empty stack\n") - fp.close() - return - - summaryLine = runSuffix + " " + \ - thisDataPortion + " " + \ - str(stack.zMin) + " " + \ - str(stack.zMax) + \ - " " + str(numVoids) + " " + str(minDist) + \ - " " + str(maxDist) - if summaryFile != None: - open(summaryFile, "a").write(summaryLine+"\n") - - # write out individual normalizations for each void - if os.access("tree.data", os.F_OK): - for line in open(zobovDir+"/sample_info.txt"): - if "Number of real" in line: - numParticles = line.split()[-1] - if "Estimated volume" in line: - boxVol = line.split()[-1] - nbar = 1.0 - normalization = float(numParticles)/float(boxVol)/nbar - dataTemp = file("centers.txt", "r").readlines() - fp = file("normalizations.txt", "w") - for iVoid in xrange(len(dataTemp)): - fp.write(str(normalization)+"\n") - fp.close() - - #os.system("mv %s %s" % ("tree.data", treeFile)) - #os.system("mv %s %s" % ("void_indexes.txt", voidDir+"/")) - #os.system("mv %s %s" % ("posx.nc", voidDir+"/")) - #os.system("mv %s %s" % ("posy.nc", voidDir+"/")) - #os.system("mv %s %s" % ("posz.nc", voidDir+"/")) - #os.system("mv %s %s" % ("z_void_indexes.txt", voidDir+"/")) - #os.system("mv %s %s" % ("z_posx.nc", voidDir+"/")) - #os.system("mv %s %s" % ("z_posy.nc", voidDir+"/")) - #os.system("mv %s %s" % ("z_posz.nc", voidDir+"/")) - #os.system("mv %s %s" % ("redshifts.nc", voidDir+"/")) - #os.system("mv %s %s" % ("indexes.nc", voidDir+"/")) - #os.system("mv %s %s" % ("kdtree_stackvoids.dat", voidDir+"/")) - #os.system("mv %s %s" % ("centers.txt", voidDir+"/")) - #os.system("mv %s %s" % ("z_centers.txt", voidDir+"/")) - #os.system("mv %s %s" % ("sky_positions.txt", voidDir+"/")) - #os.system("mv %s %s" % ("check.txt", voidDir+"/")) - #os.system("mv %s %s" % ("tracer.txt", voidDir+"/")) - #os.system("mv %s %s" % ("normalizations.txt", voidDir+"/")) - #os.system("mv %s %s" % ("boundaryDistances.txt", voidDir+"/")) - - if os.access(idListFile, os.F_OK): os.unlink(idListFile) - - if os.access(parmFile, os.F_OK): os.unlink(parmFile) - - os.chdir(launchDir) - - return - -# ----------------------------------------------------------------------------- -def launchCombine(sample, stack, voidDir=None, logFile=None, - zobovDir=None, workDir=None, thisDataPortion=None): - - sampleName = sample.fullName - - runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin, - stack.rMax, thisDataPortion) - jobString = " "+runSuffix+":" - - sys.stdout = open(logFile, 'w') - #sys.stderr = open(logFile, 'a') - - if os.access(voidDir+"/num_voids.txt", os.F_OK): - os.unlink(voidDir+"/num_voids.txt") - - doneGalUpdate = os.access(zobovDir+"donegalupdate", os.F_OK) - - for comboName in sample.comboList: - if not os.access(zobovDir+"/galaxies.txt", os.F_OK): - shutil.copy(workDir+"/sample_"+comboName+"/galaxies.txt", zobovDir) - elif not doneGalUpdate: - dataTemp = file(workDir+"/sample_"+comboName+"/galaxies.txt", - "r").read() - file(zobovDir+"/galaxies.txt", "a").write(dataTemp) - - sourceStackDir = workDir+"/sample_"+comboName+"/stacks_"+\ - runSuffix - - if os.access(sourceStackDir+"/NOVOID", os.F_OK): - continue - - if not os.access(voidDir+"/num_voids.txt", os.F_OK): - # just copy - shutil.copy(sourceStackDir+"/num_voids.txt", voidDir) - shutil.copy(sourceStackDir+"/num_particles.txt", voidDir) - shutil.copy(sourceStackDir+"/posx.nc", voidDir) - shutil.copy(sourceStackDir+"/posy.nc", voidDir) - shutil.copy(sourceStackDir+"/posz.nc", voidDir) - shutil.copy(sourceStackDir+"/z_posx.nc", voidDir) - shutil.copy(sourceStackDir+"/z_posy.nc", voidDir) - shutil.copy(sourceStackDir+"/z_posz.nc", voidDir) - shutil.copy(sourceStackDir+"/indexes.nc", voidDir) - shutil.copy(sourceStackDir+"/redshifts.nc", voidDir) - shutil.copy(sourceStackDir+"/centers.txt", voidDir) - shutil.copy(sourceStackDir+"/void_indexes.txt", voidDir) - shutil.copy(sourceStackDir+"/z_centers.txt", voidDir) - shutil.copy(sourceStackDir+"/z_void_indexes.txt", voidDir) - shutil.copy(sourceStackDir+"/sky_positions.txt", voidDir) - shutil.copy(sourceStackDir+"/normalizations.txt", voidDir) - shutil.copy(sourceStackDir+"/boundaryDistances.txt", voidDir) - - else: - # append data - dataTemp = file(voidDir+"/num_voids.txt", "r").readlines() - dataTemp2 = file(sourceStackDir+"/num_voids.txt", "r").\ - readlines() - dataTemp = np.array(dataTemp, dtype='i') - dataTemp2 = np.array(dataTemp2, dtype='i') - dataTemp += dataTemp2 - dataTemp = str(dataTemp[0]) - file(voidDir+"/num_voids.txt", "w").write(str(dataTemp)) - - dataTemp = file(voidDir+"/num_particles.txt", "r").readlines() - dataTemp2 = file(sourceStackDir+"/num_particles.txt", "r").\ - readlines() - dataTemp = np.array(dataTemp, dtype='i') - dataTemp2 = np.array(dataTemp2, dtype='i') - dataTemp += dataTemp2 - dataTemp = str(dataTemp[0]) - file(voidDir+"/num_particles.txt", "w").write(str(dataTemp)) - - dataTemp = file(sourceStackDir+"/z_centers.txt", "r").read() - file(voidDir+"/z_centers.txt", "a").write(dataTemp) - - dataTemp = file(sourceStackDir+"/centers.txt", "r").read() - file(voidDir+"/centers.txt", "a").write(dataTemp) - - dataTemp = file(sourceStackDir+"/normalizations.txt", "r").read() - file(voidDir+"/normalizations.txt", "a").write(dataTemp) - - dataTemp = file(sourceStackDir+"/boundaryDistances.txt","r").read() - file(voidDir+"/boundaryDistances.txt", "a").write(dataTemp) - - dataTemp = file(sourceStackDir+"/sky_positions.txt", "r").\ - read() - file(voidDir+"/sky_positions.txt", "a").write(dataTemp) - - idxTemp = file(sourceStackDir+"/z_void_indexes.txt", "r").\ - readlines() - idxTemp = np.array(idxTemp, dtype='i') - fp = NetCDFFile(voidDir+"/z_posx.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - idxTemp[:] += len(dataTemp) - fp = open(voidDir+"/z_void_indexes.txt", "a") - for idx in idxTemp: - fp.write(str(idx)+"\n") - fp.close() - dataTemp = () - - fp = NetCDFFile(voidDir+"/z_posx.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/z_posx.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/z_posx.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - fp = NetCDFFile(voidDir+"/z_posy.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/z_posy.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/z_posy.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - fp = NetCDFFile(voidDir+"/z_posz.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/z_posz.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/z_posz.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - idxTemp = file(sourceStackDir+"/void_indexes.txt", "r").\ - readlines() - idxTemp = np.array(idxTemp, dtype='i') - fp = NetCDFFile(voidDir+"/posx.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - idxTemp[:] += len(dataTemp) - fp = open(voidDir+"/void_indexes.txt", "a") - for idx in idxTemp: - fp.write(str(idx)+"\n") - fp.close() - dataTemp = () - - fp = NetCDFFile(voidDir+"/posx.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/posx.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/posx.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - fp = NetCDFFile(voidDir+"/posy.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/posy.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/posy.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - fp = NetCDFFile(voidDir+"/posz.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/posz.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/posz.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - - fp = NetCDFFile(voidDir+"/redshifts.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/redshifts.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/redshifts.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - fp = NetCDFFile(voidDir+"/indexes.nc") - dataTemp = fp.variables['array'][0:] - fp.close() - fp = NetCDFFile(sourceStackDir+"/indexes.nc") - dataTemp2 = fp.variables['array'][0:] - fp.close() - dataTemp = np.append(dataTemp, dataTemp2) - outFile = NetCDFFile(voidDir+"/indexes.nc", mode='w') - outFile.createDimension("dim", len(dataTemp)) - v = outFile.createVariable("array", ncFloat, ("dim",)) - v[:] = dataTemp - outFile.close() - - # add NOVOID if necessary - if not os.access(voidDir+"/centers.txt", os.F_OK): - fp = open(voidDir+"/NOVOID", "w") - fp.write("no voids in combo\n") - fp.close() - - fp = open(zobovDir+"/donegalupdate", "w") - fp.close() - - print "Done!" - - sys.stdout = sys.__stdout__ - #sys.stderr = sys.__stderr__ - - if jobSuccessful(logFile, "Done!\n"): - print jobString, "Combining stacks done" - else: - print jobString, "Combining stacks FAILED!" - exit(-1) - - #else: - # print "already done!" - -# ----------------------------------------------------------------------------- -def launchProfile(sample, stack, voidDir=None, logFile=None, continueRun=None, - thisDataPortion=None): - - sampleName = sample.fullName - - runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin, - stack.rMax, thisDataPortion) - jobString = " "+runSuffix+":" - - if os.access(voidDir+"/NOVOID", os.F_OK): - print jobString, "Profile no stack here; skipping!" - return - - numVoids = open(voidDir+"/num_voids.txt", "r").readline() - numParticles = open(voidDir+"/num_particles.txt", "r").readline() - - #Rextracut = stack.rMin*3 + 1 - #Rcircular = stack.rMin*2 + 2 - Rextracut = stack.rMax*3 + 1 - Rcircular = stack.rMax*3 + 2 - - if not (continueRun and jobSuccessful(logFile, "Done!\n")): - profileParmFile = voidDir+"/params.txt" - fp = open(profileParmFile, "w") - fp.write(str(stack.rMin)+"\n") - fp.write(str(stack.rMax)+"\n") - fp.write(str(Rcircular)+"\n") - fp.write(numVoids+"\n") - fp.close() - - #if sample.zRange[0] > stack.zMax or sample.zRange[1] < stack.zMin: - # print "outside sample redshift range; skipping!" - # fp = open(voidDir+"/NOVOID", "w") - # fp.write("outside z range: profile\n") - # fp.close() - # return - - if sample.profileBinSize == "auto": - #density = 0.5 * 50 / Rcircular / 2 - density = stack.rMin / 5. - #density = stack.rMax / 5. - else: - density = sample.profileBinSize - - sys.stdout = open(logFile, 'w') - #sys.stderr = open(logFile, 'a') - vp.build_1d_profile(base_dir=voidDir, density=density, - rescaleMode=stack.rescaleMode) - vp.build_2d_profile(base_dir=voidDir, density=density) - sys.stdout = sys.__stdout__ - #sys.stderr = sys.__stderr__ - - if jobSuccessful(logFile, "Done!\n"): - print jobString, "Profiling stacks done (N_v=%s, N_p=%s)" % \ - (numVoids,numParticles) - else: - print jobString, "Profiling stacks FAILED!" - exit(-1) - - else: - print jobString, "Profiling stacks already done!" - - -# ----------------------------------------------------------------------------- -def launchFit(sample, stack, useComoving=None, - logFile=None, voidDir=None, figDir=None, - continueRun=None, thisDataPortion=None, - fittingMode=None, minVoidsToFit=None, - minPartToFit=None): - - sampleName = sample.fullName - - runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin, - stack.rMax, thisDataPortion) - - jobString = " "+runSuffix+":" - - if not (continueRun and jobSuccessful(logFile, "Done!\n")): - if os.access(voidDir+"/NOVOID", os.F_OK): - print jobString, "Fitting no voids here; skipping!" - return - - numVoids = int(open(voidDir+"/num_voids.txt", "r").readline()) - if numVoids < minVoidsToFit: - print jobString, "Fitting not enough voids to fit; skipping!" - fp = open(voidDir+"/NOFIT", "w") - fp.write("not enough voids: %d \n" % numVoids) - fp.close() - return - - numPart = int(open(voidDir+"/num_particles.txt", "r").readline()) - if numPart < minPartToFit: - print jobString, "Fitting not enough particles to fit; skipping!" - fp = open(voidDir+"/NOFIT", "w") - fp.write("not enough particles: %d \n" % numPart) - fp.close() - return - - #if stack.zMin < sample.zRange[0] or stack.zMax > sample.zRange[1]: - # print "outside sample redshift range; skipping!" - # return - - if sample.partOfCombo or not sample.includeInHubble: - print jobString, "Fitting sample not needed for further analysis; skipping!" - fp = open(voidDir+"/NOFIT", "w") - fp.write("sample not needed for hubble\n") - fp.close() - return - - if not stack.includeInHubble: - print jobString, "Fitting radius not needed for further analysis; skipping!" - return - - if not stack.includeInHubble: - print jobString, "Fitting redshift not needed for further analysis; skipping!" - return - - if os.access(figDir+"/stackedVoid_"+sampleName+"_"+\ - runSuffix+".eps", os.F_OK): - os.unlink(figDir+"/stackedVoid_"+sampleName+"_"+\ - runSuffix+".eps") - - sys.stdout = open(logFile, 'w') - #sys.stderr = open(logFile, 'a') - - badChain = True - ntries = 0 - maxtries = 5 - while badChain: - ntries += 1 - - if fittingMode == "mcmc": - Rexpect = (stack.rMin+stack.rMax)/2 - #Rtruncate = stack.rMax*2. # TEST - Rtruncate = stack.rMax*3. # TEST - #Rtruncate = stack.rMin*3. + 1 # TEST - if sample.dataType == "observation": - ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect, - nbar_init=1.0, - Niter=300000, - Nburn=100000, - Rextracut=Rtruncate) - else: - ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect, - nbar_init=1.0, - Niter=300000, - Nburn=100000, - Rextracut=Rtruncate) - #print "TEST", stack.rMax, args[0][0], args[0][1], args[0][2] - badChain = (args[0][0] > 0.5 or args[0][1] > 2.*stack.rMax or \ - args[1] > 2.0 or \ - args[0][2] > 2.*stack.rMax) and \ - (ntries < maxtries) - elif fittingMode == "inertia": - ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, mode="2d", nBootstraps=100, rMaxInertia=0.7) - badChain = False - - ## rescale to expected stretch if using comoving coords - #if useComoving or not sample.useLightCone: - # exp = args[1] - # expError = args[2] - - #np.save(voidDir+"/chain.npy", ret) - np.savetxt(voidDir+"/fits.out", fits) - - plotTitle = "Sample: "+sample.nickName+\ - ", z = "+str(stack.zMin)+"-"+\ - str(stack.zMax)+", R = "+str(stack.rMin)+"-"+\ - str(stack.rMax)+r" $h^{-1}$ Mpc" - - showRatio = (ntries <= maxtries) - showContours = (ntries <= maxtries) - - if stack.rescaleMode == "rmax": - rescaleFactor = stack.rMax - else: - rescaleFactor = 1.0 - - vp.draw_fit(*args, delta_tick=0.2, vmax=1.4, delta_v=0.01, - nocontour=True, model_max=1.0, delta_model=0.1, - plotTitle=plotTitle, show_ratio=showRatio, - showContours=showContours, - rescaleFactor=rescaleFactor) - figure(1).savefig(figDir+"/stackedVoid_"+sampleName+\ - "_"+runSuffix+".eps") - figure(1).savefig(figDir+"/stackedVoid_"+sampleName+\ - "_"+runSuffix+".pdf") - figure(1).savefig(figDir+"/stackedVoid_"+sampleName+\ - "_"+runSuffix+".png") - # record the measured stretch - exp = args[1] - expError = args[2] - outline = str(exp) + " " + str(expError) - open(voidDir+"/expansion.out", "w").write(outline) - - print "Done!" - sys.stdout = sys.__stdout__ - #sys.stderr = sys.__stderr__ - - if os.access(voidDir+"/NOFIT", os.F_OK): - os.unlink(voidDir+"/NOFIT") - if ntries > maxtries: - print jobString, "No reliable fit found; skipping!" - fp = open(voidDir+"/NOFIT", "w") - fp.write("bad ellipticity fit\n") - fp.close() - #return - - else: - if jobSuccessful(logFile, "Done!\n"): - print jobString, "Fitting done (%d tries)" % ntries - else: - print jobString, "Fitting FAILED!" - exit(-1) - - else: - print jobString, "already done!" - - -# ----------------------------------------------------------------------------- -def launchHubble(dataPortions=None, dataSampleList=None, logDir=None, - INCOHERENT=None, workDir=None, figDir=None, errorBars=None, - ZOBOV_PATH=None, continueRun=None, voidDir=None, - doPlot = True, setName=None): - - for thisDataPortion in dataPortions: - print " For data portion", thisDataPortion - sys.stdout.flush() - - # merge the bins from all samples - numSamplesForHubble = 0 - maxRBins = 0 - maxZBins = 0 - allRBins = [] - allZBins = [] - - for sample in dataSampleList: - if sample.includeInHubble: - numSamplesForHubble += 1 - - for stack in sample.getHubbleStacks(): - alreadyHere = False - for stackCheck in allRBins: - if stack.rMin == stackCheck.rMin and stack.rMax==stackCheck.rMax: - alreadyHere = True - break - if not alreadyHere: - allRBins.append(stack) - - alreadyHere = False - for stackCheck in allZBins: - if stack.zMin == stackCheck.zMin and stack.zMax==stackCheck.zMax: - alreadyHere = True - break - if not alreadyHere: - allZBins.append(stack) - - allExpList = np.zeros((numSamplesForHubble,len(allRBins),len(allZBins),5)) - allExpList[:] = -1 - aveDistList = np.zeros((len(allZBins),3)) - - iSample = -1 - for sample in dataSampleList: - if not sample.includeInHubble: - continue - - iSample += 1 - - zobovDir = sample.zobovDir - sampleName = sample.fullName - - print " For data set", sampleName, "...", - sys.stdout.flush() - - logFile = logDir+"/hubble_"+sampleName+"_"+thisDataPortion+".out" - - #AVEDIST_PATH = ZOBOV_PATH+"/mytools/computeAverageDistortionPMS" - - # compute the average stretch in each redshift bin for cleaner-looking - # (not not fully accurate) plots - for (iZBin, stack) in enumerate(sample.getUniqueZBins()): - - aveDist = vp.aveStretch(sample, stack.zMin, stack.zMax, - Om=sample.omegaM) - - aveDistList[iZBin, 0] = stack.zMin - aveDistList[iZBin, 1] = aveDist - aveDistList[iZBin, 2] = 0.00125 - - numFits = 0 - fp = file(zobovDir+'/calculatedExpansions_'+thisDataPortion+'.txt', - mode="w") - - expList = np.zeros((1, len(sample.getUniqueRBins()), - len(sample.getUniqueZBins()), 3)) - - for (iZBin, zBin) in enumerate(sample.getUniqueZBins()): - for (iR, rBin) in enumerate(sample.getUniqueRBins()): - - runSuffix = getStackSuffix(zBin.zMin, zBin.zMax, rBin.rMin, - rBin.rMax, thisDataPortion) - - expFile = zobovDir+"/stacks_"+runSuffix+"/expansion.out" - if not (os.access(zobovDir+"/stacks_"+runSuffix+"/NOVOID", \ - os.F_OK) or - os.access(zobovDir+"/stacks_"+runSuffix+"/NOFIT", \ - os.F_OK) or - not os.access(expFile, os.F_OK)): - exp = float(open(expFile, "r").readline().split()[0]) - expError = float(open(expFile, "r").readline().split()[1]) - else: - exp = -1 - expError = 0 - - expList[0, iR, iZBin, 0] = exp - expList[0, iR, iZBin, 1] = expError - - # TODO - # compute the per-stack stretch for liklihood calculation - #runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin, - # stack.rMax, thisDataPortion) - #voidDir = zobovDir+"/stacks_" + runSuffix - #centersFile = voidDir+"/centers.txt" - #voidRedshifts = np.loadtxt(centersFile)[:,5] - #aveDist = vp.aveWeightedStretchCone(stack.zMin, stack.zMax, - # skyFrac = sample.skyFraction, - # voidRedshifts=voidRedshifts) - - aveDist = vp.aveStretch(sample, zBin.zMin, zBin.zMax, - Om=sample.omegaM) - - expList[0, iR, iZBin, 2] = aveDist - - voidDir = zobovDir+"/stacks_" + runSuffix - numVoids = open(voidDir+"/num_voids.txt", "r").readline() - numParticles = open(voidDir+"/num_particles.txt", "r").readline() - - for (iZCheck,zBinCheck) in enumerate(allZBins): - for (iRCheck,rBinCheck) in enumerate(allRBins): - if zBin.zMin == zBinCheck.zMin and zBin.zMax == zBinCheck.zMax: - if rBin.rMin == rBinCheck.rMin and rBin.rMax == rBinCheck.rMax: - aveDist = vp.aveStretch(sample, zBin.zMin, zBin.zMax, - Om=sample.omegaM) - - allExpList[iSample, iRCheck, iZCheck, 0] = exp - allExpList[iSample, iRCheck, iZCheck, 1] = expError - allExpList[iSample, iRCheck, iZCheck, 3] = numVoids - allExpList[iSample, iRCheck, iZCheck, 4] = numParticles - - fp.write(str(exp) + " " + str(expError) + " "+ " "+ \ - str(aveDist) + " -1 " + runSuffix+"\n") - numFits += 1 - - fp.close() - - rlist = np.zeros((len(sample.getUniqueRBins()),2)) - for (iR,rBin) in enumerate(sample.getUniqueRBins()): - rlist[iR, 0] = rBin.rMin - rlist[iR, 1] = rBin.rMax - - zbase = np.zeros((len(sample.getUniqueZBins()),2)) - for (iZ,zBin) in enumerate(sample.getUniqueZBins()): - zbase[iZBin,0] = zBin.zMinPlot - zbase[iZBin,1] = zBin.zMaxPlot - #np.savetxt(zobovDir+'/zbase_'+thisDataPortion+'.txt', zbase) - - if not (continueRun and jobSuccessful(logFile, "Done!\n")): - sys.stdout = open(logFile, 'w') - #sys.stderr = open(logFile, 'a') - plotTitle = "Sample: "+sample.nickName+", "+thisDataPortion+" voids" - #if doPlot: - #vp.do_all_obs(sample, zbase, expList, workDir+"/avedistortion_", - #vp.do_all_obs(sample, zbase, expList, aveDistList, - # rlist, plotTitle=plotTitle, plotAve=True) - #figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\ - # thisDataPortion+\ - # ".eps",bbox_inches='tight') - #figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\ - # thisDataPortion+\ - # ".pdf",bbox_inches='tight') - #figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\ - # thisDataPortion+\ - # ".png",bbox_inches='tight') - #else: - print "Skipping plot" - print "Done!" - sys.stdout = sys.__stdout__ - #sys.stderr = sys.__stderr__ - - if jobSuccessful(logFile, "Done!\n"): - print "done" - else: - print "FAILED!" - exit(-1) - - else: - print "already done!" - - # now do all samples in one plot - print " For data set combined...", - sys.stdout.flush() - - logFile = logDir+"/hubble_combined_"+sampleName+"_"+thisDataPortion+".out" - - if not (continueRun and jobSuccessful(logFile, "Done!\n")): - - if errorBars == "ESTIMATED": - # replace calculated errors with estimated ones - errorBarFile = workDir+"/calculatedErrorBars_"+thisDataPortion+".txt" - if os.access(errorBarFile, os.F_OK): - print "REPLACING ERROR BARS!", - sys.stdout.flush() - fp = file(errorBarFile, mode="r") - ignoreLine = fp.readline() - for iZ in xrange(len(allExpList[0,0,:,0])): - ignoreLine = fp.readline() - - for iZ in xrange(len(allExpList[0,0,:,0])): - for iSample in xrange(len(allExpList[:,0,0,0])): - for iR in xrange(len(allExpList[0,:,0,0])): - line = fp.readline().split() - allExpList[iSample,iR,iZ,2] = allExpList[iSample,iR,iZ,1] - allExpList[iSample,iR,iZ,1] = float(line[1]) - allExpList[iSample,iR,iZ,3] = float(line[2]) - allExpList[iSample,iR,iZ,4] = float(line[3]) - - fp.close() - - rlist = np.zeros((len(allRBins),2)) - for (iR,rBin) in enumerate(allRBins): - rlist[iR, 0] = rBin.rMin - rlist[iR, 1] = rBin.rMax - - zbase = np.zeros((len(allZBins),2)) - plotZmax = 0.0 - plotZmin = 1.e99 - for (iZ,zBin) in enumerate(allZBins): - zbase[iZ,0] = zBin.zMinPlot - zbase[iZ,1] = zBin.zMaxPlot - - if zBin.zMaxPlot > plotZmax: plotZmax = zBin.zMaxPlot - if zBin.zMinPlot < plotZmin: plotZmin = zBin.zMinPlot - - aveDist = vp.aveStretch(sample, zBin.zMin, zBin.zMax, - Om=sample.omegaM) - - aveDistList[iZ, 0] = zBin.zMin - aveDistList[iZ, 1] = aveDist - aveDistList[iZ, 2] = 0.00125 - if plotZmax > 1.5: plotZmax = 1.5 - - - shortSampleNames = list() - for sample in dataSampleList: - if sample.includeInHubble: - shortSampleNames.append(sample.nickName) - sys.stdout = open(logFile, 'w') - #sys.stderr = open(logFile, 'a') - if doPlot: - if INCOHERENT: - #plotTitle = "all samples, incoherent "+\ - # thisDataPortion+" voids" - plotTitle = '' - else: - #plotTitle = "all samples, "+thisDataPortion+" voids" - plotTitle = setName - #plotTitle = '' - vp.do_all_obs(sample, zbase, allExpList, aveDistList, - rlist, plotTitle=plotTitle, sampleNames=shortSampleNames, - plotAve=True, mulfac = 1.0, biasLine = 1.16, - plotZmin=plotZmin, plotZmax=plotZmax, - plotOm1=True) - figure(1).savefig(figDir+"/hubble_combined_"+setName+"_"+ \ - thisDataPortion+\ - ".eps",bbox_inches='tight') - figure(1).savefig(figDir+"/hubble_combined_"+setName+"_"+ \ - thisDataPortion+\ - ".pdf",bbox_inches='tight') - figure(1).savefig(figDir+"/hubble_combined_"+setName+"_"+ \ - thisDataPortion+\ - ".png",bbox_inches='tight') - - if INCOHERENT: - plotTitle = "all samples, incoherent "+\ - thisDataPortion+" voids (systematics corrected)" - else: - #plotTitle = "all samples, "+thisDataPortion+\ - # " voids (systematics corrected)" - plotTitle = setName + "(systematics corrected)" - vp.do_all_obs(sample, zbase, allExpList, aveDistList, - rlist, plotTitle=plotTitle, sampleNames=shortSampleNames, - plotAve=True, mulfac = 1.16, - plotZmin=plotZmin, plotZmax=plotZmax, - plotOm1=True) - figure(1).savefig(figDir+"/hubble_combined_"+setName+"_"+\ - thisDataPortion+\ - "_debiased.eps",bbox_inches='tight') - figure(1).savefig(figDir+"/hubble_combined_"+setName+"_"+\ - thisDataPortion+\ - "_debiased.pdf",bbox_inches='tight') - figure(1).savefig(figDir+"/hubble_combined_"+setName+"_"+\ - thisDataPortion+\ - "_debiased.png",bbox_inches='tight') - else: - print "Skipping plot" - print "Done!" - sys.stdout = sys.__stdout__ - #sys.stderr = sys.__stderr__ - - # save all expansion data to a single file - fp = file(workDir+'/calculatedExpansions_'+thisDataPortion+'.txt', - mode="w") - fp.write(str(numSamplesForHubble) + " " + - str(len(allRBins)) + " " + str(len(allZBins)) + "\n") - for zBin in allZBins: - fp.write(str(zBin.zMin) + " " + str(zBin.zMax) + "\n") - - for iZ in xrange(len(allExpList[0,0,:,0])): - for iSample in xrange(len(allExpList[:,0,0,0])): - for iR in xrange(len(allExpList[0,:,0,0])): - fp.write(str(allExpList[iSample,iR,iZ,0]) + " " +\ - str(allExpList[iSample,iR,iZ,1]) + " " +\ - str(allExpList[iSample,iR,iZ,2]) + " " +\ - str(allExpList[iSample,iR,iZ,3]) + " " +\ - str(allExpList[iSample,iR,iZ,4]) + "\n") - fp.close() - - fp = file(workDir+'/calculatedExpansions_reduced_'+thisDataPortion+'.txt', - mode="w") - for iZ in xrange(len(allExpList[0,0,:,0])): - for iSample in xrange(len(allExpList[:,0,0,0])): - for iR in xrange(len(allExpList[0,:,0,0])): - if (allExpList[iSample,iR,iZ,0] == -1): continue - fp.write(str(allExpList[iSample,iR,iZ,0]) + " " +\ - str(allExpList[iSample,iR,iZ,1]) + " " +\ - str(allExpList[iSample,iR,iZ,2]) + " " +\ - str(allExpList[iSample,iR,iZ,3]) + " " +\ - str(allExpList[iSample,iR,iZ,4]) + "\n") - fp.close() - - - # save all void distribution data to a single file - fp = file(workDir+'/voidDistributions_'+thisDataPortion+'.txt', - mode="w") - fp.write(str(numSamplesForHubble) + " " + - str(len(allRBins)) + " " + str(len(allZBins)) + "\n") - for zBin in allZBins: - fp.write(str(zBin.zMin) + " " + str(zBin.zMax) + "\n") - - for zBin in allZBins: - for sample in dataSampleList: - if not sample.includeInHubble: continue - for rBin in allRBins: - runSuffix = getStackSuffix(zBin.zMin, zBin.zMax, rBin.rMin, - rBin.rMax, thisDataPortion) - voidDir = sample.zobovDir+"/stacks_" + runSuffix - centersFile = voidDir+"/centers.txt" - if os.access(centersFile, os.F_OK): - voidRedshifts = np.loadtxt(centersFile) - if voidRedshifts.ndim > 1: - voidRedshifts = voidRedshifts[:,5] - np.savetxt(fp, voidRedshifts[None]) - else: - if (len(voidRedshifts) > 0): - voidRedshifts = voidRedshifts[5] - np.savetxt(fp, voidRedshifts[None]) - else: - fp.write("-1\n") - #fp.write(str(len(voidRedshifts))+" ") - #np.savetxt(fp, voidRedshifts[None]) - else: - fp.write("-1\n") - fp.close() - - - if jobSuccessful(logFile, "Done!\n"): - print "done" - else: - print "FAILED!" - exit(-1) - - else: - print "already done!" - -# ----------------------------------------------------------------------------- -def launchLikelihood(dataSampleList=None, - dataPortions=None, logDir=None, workDir=None, - continueRun=False): - - # check for comoving from first sample - useComoving = not dataSampleList[0].useLightCone or \ - dataSampleList[0].useComoving - - for thisDataPortion in dataPortions: - print " For data portion", thisDataPortion, "...", - sys.stdout.flush() - - logFile = logDir+"/likelihood_"+thisDataPortion+".out" - - if not (continueRun and jobSuccessful(logFile, "Done!\n")): - - #sys.stdout = open(logFile, 'w') - #sys.stderr = open(logFile, 'a') - - if dataSampleList[0].usePecVel: - biasLevel = 1.16 - else: - biasLevel = 1.0 - - vp.build1dLikelihood(dataSampleList[0], - workDir+"/calculatedExpansions_"+\ - thisDataPortion+".txt", - workDir+"/voidDistributions_" + \ - thisDataPortion+".txt", - nbins = 20, - OmStart = 0.0, - OmEnd = 1.0, - #biasStart = 1.0, - #biasEnd = 1.32, - biasStart = biasLevel, - biasEnd = biasLevel, - outputBase = workDir+"/1dlikelihoods_"+thisDataPortion+"_", - useBinAve = False, - useComoving = useComoving, - OmFid=dataSampleList[0].omegaM) - - #sys.stdout = sys.__stdout__ - #sys.stderr = sys.__stderr__ - - if jobSuccessful(logFile, "Done!\n"): - print "done" - else: - print "FAILED!" - exit(-1) - - else: - print "already done!" - - -# ----------------------------------------------------------------------------- -def launchEstimateErrorBars(workDir=None, nullDir=None, numNulls=None, - dataPortion=None, nullDirList=None): - - IVALUE = 0 - IERROR = 1 - - # open first null file to get sizes - if nullDirList == None: - nullFile = nullDir + "/" + str(1) + "/calculatedExpansions_" + \ - str(dataPortion) + ".txt" - else: - nullFile = nullDirList[0] + "/calculatedExpansions_" + str(dataPortion) + \ - ".txt" - numNulls = len(nullDirList) - - fp = open(nullFile, "r") - binLine = fp.readline() - numSamples = int(binLine.split(" ")[0]) - numRBins = int(binLine.split(" ")[1]) - numZBins = int(binLine.split(" ")[2]) - numBins = numRBins * numSamples * numZBins - nRTotal = numRBins * numSamples - fp.close() - - # load null data - nullData = np.zeros([numNulls, numBins, 2]) - - for iNull in xrange(numNulls): - if nullDirList == None: - nullFile = nullDir + "/" + str(iNull) + "/calculatedExpansions_" + \ - str(dataPortion) + ".txt" - else: - nullFile = nullDirList[iNull] + "/calculatedExpansions_" + \ - str(dataPortion) + ".txt" - - fp = open(nullFile, "r") - - binLine = fp.readline() - numSamples = int(binLine.split(" ")[0]) - numRBins = int(binLine.split(" ")[1]) - numZBins = int(binLine.split(" ")[2]) - numBins = numRBins * numSamples * numZBins - - zList = np.zeros( [numZBins,2] ) - for iZ in xrange(int(numZBins)): - line = fp.readline().split() - zList[iZ,0] = float(line[0]) - zList[iZ,1] = float(line[1]) - - for iZ in xrange(numZBins): - iRTotal = 0 - for iSample in xrange(numSamples): - for iR in xrange(numRBins): - line = fp.readline().split() - nullData[iNull,iRTotal*numZBins+iZ,IVALUE] = float(line[0]) - nullData[iNull,iRTotal*numZBins+iZ,IERROR] = float(line[1]) - iRTotal += 1 - fp.close() - - # use 68% limits to get error bars - errorBars = np.zeros([numBins, 2]) - for iBin in xrange(numBins): - binData = nullData[:,iBin,IVALUE] - validData = binData[:] > -1 - binData = binData[validData] - binData = np.sort(binData) - - if np.size(binData) > 0: - errorBars[iBin, 0] = binData[int(0.16*np.size(binData))] - errorBars[iBin, 1] = binData[int(0.84*np.size(binData))] - - mean = (errorBars[iBin, 1] + errorBars[iBin, 0])/2 - sig = (errorBars[iBin, 1] - errorBars[iBin, 0])/2 - errorBars[iBin, 0] = mean - errorBars[iBin, 1] = sig - else: - errorBars[iBin, :] = -1 - - # writes out error bars - fp = file(workDir+"/calculatedErrorBars_" + \ - str(dataPortion) + ".txt", mode="w") - fp.write(str(numSamples) + " " + str(numRBins) + " " + str(numZBins) + "\n") - for iZ in xrange(numZBins): - fp.write(str(zList[iZ,0]) + " " + str(zList[iZ,1]) + "\n") - - for iZ in xrange(numZBins): - iRTotal = 0 - for iSample in xrange(numSamples): - for iR in xrange(numRBins): - fp.write(str(errorBars[iRTotal*numZBins+iZ,0]) + " " + \ - str(errorBars[iRTotal*numZBins+iZ,1]) + "\n") - iRTotal += 1 - fp.close() - - - - # ----------------------------------------------------------------------------- def launchVelocityStack(sample, stack, binPath, velField_file, diff --git a/python_tools/void_python_tools/partUtil/partUtil.py b/python_tools/void_python_tools/partUtil/partUtil.py index 37309ab..4f695e1 100644 --- a/python_tools/void_python_tools/partUtil/partUtil.py +++ b/python_tools/void_python_tools/partUtil/partUtil.py @@ -34,7 +34,7 @@ ncFloat = 'f8' # ----------------------------------------------------------------------------- def loadPart(sampleDir): - #print " Loading particle data..." + print " Loading particle data..." sys.stdout.flush() with open(sampleDir+"/sample_info.dat", 'rb') as input: diff --git a/python_tools/void_python_tools/xcor/xcorlib.py b/python_tools/void_python_tools/xcor/xcorlib.py index d70ebad..535c90d 100644 --- a/python_tools/void_python_tools/xcor/xcorlib.py +++ b/python_tools/void_python_tools/xcor/xcorlib.py @@ -2,72 +2,12 @@ import numpy as np def cic( x, Lbox, Lboxcut = 0, Nmesh = 128, weights = None ): - if weights == None: weights = 1 - wm = np.mean(weights) - ws = np.mean(weights**2) - - d = np.mod(x/(Lbox+2*Lboxcut)*Nmesh,1) - - box = ([Lboxcut,Lbox+Lboxcut],[Lboxcut,Lbox+Lboxcut],[Lboxcut,Lbox+Lboxcut]) - - rho = np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*(1-d[:,1])*(1-d[:,2]))[0] \ - + np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*(1-d[:,1])*(1-d[:,2]))[0],1,0) \ - + np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*d[:,1]*(1-d[:,2]))[0],1,1) \ - + np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*(1-d[:,1])*d[:,2])[0],1,2) \ - + np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*d[:,1]*(1-d[:,2]))[0],1,0),1,1) \ - + np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*(1-d[:,1])*d[:,2])[0],1,0),1,2) \ - + np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*d[:,1]*d[:,2])[0],1,1),1,2) \ - + np.roll(np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*d[:,1]*d[:,2])[0],1,0),1,1),1,2) - - rho /= wm - - rho = rho/rho.mean() - 1. - - return (rho, wm, ws) - + return def powcor( d1, d2, Lbox, Nmesh = 128, Nbin = 100, scale = 'lin', cor = False ): - # Fourier transform - d1 = np.fft.fftn(d1) - d2 = np.fft.fftn(d2) - - # CIC correction - wid = np.indices(np.shape(d1)) - Nmesh/2 - #wid[np.where(wid >= Nmesh/2)] -= Nmesh - wid = wid*np.pi/Nmesh + 1e-100 - wcic = np.prod(np.sin(wid)/wid,0)**2 - - # Shell average power spectrum - dk = 2*np.pi/Lbox - Pk = np.conj(d1)*d2*(Lbox/Nmesh**2)**3 - Pk = np.fft.fftshift(Pk)/wcic**2 - - (Nm, km, Pkm, SPkm) = shellavg(np.real(Pk), dk, Nmesh, Nbin = Nbin, xmin = 0., xmax = Nmesh*dk/2, scale = scale) - - # Inverse Fourier transform and shell average correlation function - if cor == True: - dx = Lbox/Nmesh - Xr = np.fft.ifftshift(np.fft.ifftn(np.fft.ifftshift(Pk)))*(Nmesh/Lbox)**3 - - (Nmx, rm, Xrm, SXrm) = shellavg(np.real(Xr), dx, Nmesh, Nbin = Nbin, xmin = dx, xmax = 140., scale = scale) - - return ((Nm, km, Pkm, SPkm),(Nmx, rm, Xrm, SXrm)) - - else: return (Nm, km, Pkm, SPkm) - + return def shellavg( f, dx, Nmesh, Nbin = 100, xmin = 0., xmax = 1., scale = 'lin' ): - - x = np.indices(np.shape(f)) - Nmesh/2 - #x[np.where(x >= Nmesh/2)] -= Nmesh - x = dx*np.sqrt(np.sum(x**2,0)) - if scale == 'lin': bins = xmin+(xmax-xmin)* (np.arange(Nbin+1)/float(Nbin)) - if scale == 'log': bins = xmin*(xmax/xmin)**(np.arange(Nbin+1)/float(Nbin)) - Nm = np.histogram(x, bins = bins)[0] - xm = np.histogram(x, bins = bins, weights = x)[0]/Nm - fm = np.histogram(x, bins = bins, weights = f)[0]/Nm - fs = np.sqrt((np.histogram(x, bins = bins, weights = f**2)[0]/Nm - fm**2)/(Nm-1)) - - return (Nm, xm, fm, fs) + return diff --git a/zobov/CMakeLists.txt b/zobov/CMakeLists.txt new file mode 100644 index 0000000..9c749b5 --- /dev/null +++ b/zobov/CMakeLists.txt @@ -0,0 +1,16 @@ + +add_executable(voz1b1 voz1b1.c readfiles.c vozutil.c voz.h) +target_link_libraries(voz1b1 ${QHULL_LIBRARY} ${MATH_LIB}) + + +add_executable(jozov jozov.c findrtop.c) +target_link_libraries(jozov ${MATH_LIB}) + +add_executable(vozinit vozinit.c readfiles.c) +target_link_libraries(vozinit ${MATH_LIB}) + + +add_executable(voztie voztie.c readfiles.c) +target_link_libraries(voztie ${MATH_LIB}) + +