This commit is contained in:
P.M. Sutter 2015-10-27 10:23:22 -04:00
commit f190968a81
14 changed files with 556915 additions and 64 deletions

View file

@ -10,6 +10,7 @@ check_c_compiler_flag(-march=native SUPPORT_ARCH_NATIVE )
find_library(MATH_LIB m)
find_library(DL_LIB dl)
macro(add_genopt _sourcelist _ggofile _basefile)

View file

@ -32,7 +32,7 @@ ENDIF (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH
SET(ZOB_LIBS zobovTool
${COSMOTOOL_LIBRARY} ${GSL_LIBRARIES}
${NETCDF_LIBRARIES})
${NETCDF_LIBRARIES} ${DL_LIB})
include_directories(

View file

@ -920,7 +920,22 @@ int main(int argc, char **argv) {
}
}
voids.resize(iGood);
printf(" 4th filter: rejected %d below redshift boundaries\n", numNearZ);
/*
iGood = 0;
for (iVoid = 0; iVoid < voids.size(); iVoid++) {
// just in case
if (args.isObservation_flag &&
voids[iVoid].redshift > args.zMax_arg) {
numNearZ++;
} else {
voids[iGood++] = voids[iVoid];
}
}
voids.resize(iGood);
*/
printf(" 4th filter: rejected %d outside redshift boundaries\n", numNearZ);
// take only top-level voids
numAreParents = 0;
@ -1155,12 +1170,19 @@ void outputVoids(string outputDir, string sampleName, string prefix,
outVoid.level,
outVoid.numChildren,
outVoid.centralDen);
double phi = atan2(outVoid.macrocenter[1]-boxLen[1]/2.,
outVoid.macrocenter[0]-boxLen[0]/2.);
if (phi < 0) phi += 2.*M_PI;
double RA = phi * 180./M_PI;
double theta = acos((outVoid.macrocenter[2]-boxLen[2]/2.) /
outVoid.redshiftInMpc);
double dec = (M_PI/2. - theta) * 180./M_PI;
fprintf(fpSkyPositions, "%.2f %.2f %.5f %.2f %d\n",
atan((outVoid.macrocenter[1]-boxLen[1]/2.) /
(outVoid.macrocenter[0]-boxLen[0]/2.)) * 180/M_PI + 180,
asin((outVoid.macrocenter[2]-boxLen[2]/2.) /
outVoid.redshiftInMpc) * 180/M_PI,
RA,
dec,
outVoid.redshift,
outVoid.radius,
outVoid.voidID);

164647
examples/example_observation.dat Executable file

File diff suppressed because it is too large Load diff

Binary file not shown.

392171
examples/example_simulation_z0.0.dat Executable file

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,8 @@ This file has been developped by P. M. Sutter.
#include <stdio.h>
#include <stdlib.h>
#include <stddef.h>
#include <string.h>
#include <string>
#include <cstring>
#include "hdf5_flash.h"
#include "H5Cpp.h"

View file

@ -27,7 +27,7 @@ IF(INTERNAL_GENGETOPT)
ENDIF(INTERNAL_GENGETOPT)
IF(INTERNAL_HDF5)
SET(HDF5_URL "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.9/src/hdf5-1.8.9.tar.gz" CACHE STRING "URL to download HDF5 from")
SET(HDF5_URL "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.15/src/hdf5-1.8.15.tar.gz" CACHE STRING "URL to download HDF5 from")
mark_as_advanced(HDF5_URL)
ENDIF(INTERNAL_HDF5)
@ -121,6 +121,7 @@ else(INTERNAL_HDF5)
find_library(HDF5HL_LIBRARY hdf5_hl)
endif (INTERNAL_HDF5)
SET(CONFIGURE_CPP_FLAGS "${CONFIGURE_CPP_FLAGS} -I${HDF5_INCLUDE_PATH}")
SET(CONFIGURE_LIBS_FLAGS "${DL_LIB}")
mark_as_advanced(HDF5_INCLUDE_PATH HDF5_LIBRARY HDF5_CPP_LIBRARY HDF5HL_LIBRARY HDF5HL_CPP_LIBRARY)
###############
@ -133,7 +134,7 @@ if (INTERNAL_NETCDF)
SET(NETCDF_BIN_DIR ${CMAKE_BINARY_DIR}/ext_build/netcdf)
SET(CONFIGURE_CPP_FLAGS "${CONFIGURE_CPP_FLAGS} -I${NETCDF_BIN_DIR}/include")
SET(CONFIGURE_LDFLAGS "${CONFIGURE_LDFLAGS} -L${NETCDF_BIN_DIR}/lib")
SET(EXTRA_NC_FLAGS CPPFLAGS=${CONFIGURE_CPP_FLAGS} LDFLAGS=${CONFIGURE_LDFLAGS})
SET(EXTRA_NC_FLAGS CPPFLAGS=${CONFIGURE_CPP_FLAGS} LDFLAGS=${CONFIGURE_LDFLAGS} LIBS=${CONFIGURE_LIBS_FLAGS})
ExternalProject_Add(netcdf
DEPENDS ${hdf5_built}
PREFIX ${BUILD_PREFIX}/netcdf-prefix
@ -239,7 +240,7 @@ ExternalProject_Add(cosmotool
-DGSLCBLAS_LIBRARY=${GSLCBLAS_LIBRARY}
-DNETCDF_LIBRARY=${NETCDF_LIBRARY}
-DNETCDFCPP_LIBRARY=${NETCDFCPP_LIBRARY}
-DENABLE_SHARP=OFF
-DENABLE_SHARP=OFF
)
SET(COSMOTOOL_LIBRARY ${CMAKE_BINARY_DIR}/ext_build/cosmotool/lib/libCosmoTool.a)
set(COSMOTOOL_INCLUDE_PATH ${CMAKE_BINARY_DIR}/ext_build/cosmotool/include)

View file

@ -33,21 +33,17 @@ continueRun = False
startCatalogStage = 1
endCatalogStage = 3
# a global name to give
#catalogName = "lcdm"
# directory for input data files
inputDataDir = os.getenv("HOME")+"/workspace/Voids/catalogs/nyuvagc/"
inputDataDir = os.getenv("PWD")+"/../examples/"
# void catalog output directory
workDir = os.getenv("HOME")+"/workspace/Voids/sdss_dr7LCDM/"
workDir = os.getenv("PWD")+"/../examples/example_observation/"
# output directory for log files
logDir = os.getenv("PWD")+"/../logs/sdss_dr7LCDM"
logDir = os.getenv("PWD")+"/../logs/example_observation/"
# output directory for figures
figDir = os.getenv("PWD")+"/../figs/sdss_dr7LCDM"
figDir = os.getenv("PWD")+"/../figs/example_observation/"
# you need to set these manually: point to ZOBOV and C_TOOLS in VIDE directory
ZOBOV_PATH = os.getenv("PWD")+"/../zobov/"
@ -68,38 +64,39 @@ dataSampleList = []
# define your volume-limited samples
newSample = Sample(
# path to galaxy file is inputDataDir+dataFile
dataFile = "filename.dat"
dataFile = "example_observation.dat",
# full name for this sample
fullName = "lss.dr72dim1.dat",
fullName = "example_observation",
# a convenient nickname
nickName = "dim1",
nickName = "exobs",
# don't change this
dataType = "observation",
# assume volume-limites?
# assume sample is volume-limited?
volumeLimited = True,
# HEALpix mask file
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
maskFile = inputDataDir+"/example_observation_mask.fits",
# radial selection function (if not volume limited)
selFunFile = None,
# max and min redshifts of galaxies in your sample
zBoundary = (0.0, 0.05),
zBoundary = (0.0, 0.15),
# max and min redshifts where you want to find voids
zRange = (0.0, 0.05),
zRange = (0.1, 0.15),
# leave this at -1 for mean particle separation, or
# specify your own in Mpc/h
# leave this at -1 for mean particle separation,
# or specify your own in Mpc/h
minVoidRadius = -1,
# density of mock particles in cubic Mpc/h
fakeDensity = 0.01,
# (make this as high as you can afford)
fakeDensity = 0.05,
)
dataSampleList.append(newSample)

View file

@ -33,26 +33,26 @@ continueRun = False
startCatalogStage = 1
endCatalogStage = 3
# directory for the input simulation/observational particle files
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/mergertree1024/"
# directory for the input simulation files
catalogDir = os.getenv("PWD")+"/../examples/"
# void catalog output directory
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/sim/"
voidOutputDir = os.getenv("PWD")+"/../examples/example_simulation/"
# output directory for log files
logDir = os.getenv("PWD")+"/../logs/sim/"
logDir = os.getenv("PWD")+"/../logs/example_simulation/"
# output directory for figures
figDir = os.getenv("PWD")+"/../figs/sim/"
figDir = os.getenv("PWD")+"/../figs/example_simulation/"
# where to place the pipeline scripts
scriptDir = os.getenv("PWD")+"/sim/"
scriptDir = os.getenv("PWD")+"/example_simulation/"
# don't change
dataType = "simulation"
# available formats for simulation: gadget, sdf, multidark
dataFormat = "sdf"
dataFormat = "multidark"
# units of position in Mpc/h
dataUnit = 1
@ -84,19 +84,19 @@ numSubvolumes = 1
# Particles
# common filename of particle files
particleFileBase = "mf_4s_1G_1k_NNNNN"
particleFileBase = "example_simulation_NNNN.dat"
# this flag will be replaced by values in fileNums list below
particleFileDummy = 'NNNNN'
particleFileDummy = 'NNNN'
# list of file numbers for the particle files
fileNums = ["1.000"]
fileNums = ["z0.0"]
# redshift of each file in the above fileNums list
redshifts = ["0.0"]
# list of desired subsamples - these are in unts of h Mpc^-3!
subSamples = [1.0, 0.5]
subSamples = [1.0]
# if True, do the subsampling in preparation (available for sdf and multidark)
doSubSamplingInPrep = False
@ -112,14 +112,17 @@ shiftSimZ = False
# Halos
# common filename of halo files, leave blank to ignore halos
haloFileBase = "mf_4s_1G_1k_bgc2_NNNNN.sdf"
haloFileBase = ""
#haloFileBase = "mf_4s_1G_1k_bgc2_NNNNN.sdf"
# this flag will be replaced by values in fileNums list above
haloFileDummy = 'NNNNN'
haloFileDummy = ''
#haloFileDummy = 'NNNNN'
# minimum halo mass cuts to apply for the halo catalog
# use "none" to get all halos
minHaloMasses = ["none", 1.2e13]
minHaloMasses = []
#minHaloMasses = ["none", 1.2e13]
# locations of data in the halo catalog
haloFileMCol = 6 # mass
@ -147,14 +150,14 @@ hubble = 0.6962 # h_0
# each of the HOD sets will be applied to each halo catalog defined above
hodParmList = [
{'name' : "LowRes", #BOSS: Manera et al. 2012, eq. 26
'Mmin' : 0.0,
'M1' : 1.e14,
'sigma_logM' : 0.596,
'alpha' : 1.0127,
'Mcut' : 1.19399e13,
'galDens' : 0.0002,
},
#{'name' : "LowRes", #BOSS: Manera et al. 2012, eq. 26
# 'Mmin' : 0.0,
# 'M1' : 1.e14,
# 'sigma_logM' : 0.596,
# 'alpha' : 1.0127,
# 'Mcut' : 1.19399e13,
# 'galDens' : 0.0002,
#},
]
# END CONFIGURATION

0
pipeline/generateCatalog.py Normal file → Executable file
View file

View file

@ -37,7 +37,7 @@ from netCDF4 import Dataset
from void_python_tools.backend.classes import *
import pickle
import void_python_tools.apTools as vp
import scipy.interpolate
import scipy.interpolate as interpolate
NetCDFFile = Dataset
ncFloat = 'f8' # Double precision
@ -374,7 +374,7 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None,
File.close()
# load redshifts
partFile = sampleDir+"/zobov_slice_"+sample.fullName
partFile = zobovDir+"/zobov_slice_"+sample.fullName
File = file(partFile)
chk = np.fromfile(File, dtype=np.int32,count=1)
Np = np.fromfile(File, dtype=np.int32,count=1)
@ -414,16 +414,20 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None,
# build selection function interpolation
selfuncData = np.genfromtxt(sample.selFunFile)
selfunc = interpolate.interp1d(selfuncData[:,0], selfuncData[:,1],
kind='cubic')
kind='cubic', bounds_error=False,
fill_value=1.0)
# re-weight and write
vols *= selfunc(redshifts)
## TEST
#redshifts /= 10000.
for i in xrange(len(vols)):
vols[i] *= selfunc(redshifts[i])
volFile = zobovDir+"/vol_"+sampleName+".dat"
volFile = zobovDir+"/vol_weighted_"+sampleName+".dat"
File = file(volFile, 'w')
chk.astype('np.int32').tofile(File)
vols.astype('np.float32').tofile(File)
numPartTot.astype(np.int32).tofile(File)
vols.astype(np.float32).tofile(File)
volFileToUse = zobovDir+"/vol_weighted"+sampleName+".dat"
volFileToUse = zobovDir+"/vol_weighted_"+sampleName+".dat"
else:
volFileToUse = zobovDir+"/vol_"+sampleName+".dat"

View file

@ -3,8 +3,8 @@ add_executable(voz1b1 voz1b1.c readfiles.c vozutil.c voz.h)
target_link_libraries(voz1b1 ${QHULL_LIBRARY} ${MATH_LIB})
add_executable(jozov jozov.c findrtop.c)
target_link_libraries(jozov ${MATH_LIB})
#add_executable(jozov jozov.c findrtop.c)
#target_link_libraries(jozov ${MATH_LIB})
add_executable(vozinit vozinit.c readfiles.c)
target_link_libraries(vozinit ${MATH_LIB})

View file

@ -173,10 +173,14 @@ int main(int argc, char *argv[]) {
for (b[0]=0;b[0]<numdiv; b[0]++) {
for (b[1] = 0; b[1] < numdiv; b[1]++) {
for (b[2] = 0; b[2] < numdiv; b[2]++) {
//fprintf(scr,"%s/../c_tools/zobov2/voz1b1/voz1b1_2 %s %f %f %f %f %s %d %d %d %d %d %d %s&\n",
// vobozPath,
// posfile,border,boxsize,boxsize,boxsize,suffix,numdiv,numdiv, numdiv,b[0],b[1],b[2],
// outDir);
fprintf(scr,"%s/voz1b1 %s %f %f %s %d %d %d %d %s&\n",
vobozPath,
posfile,border,boxsize,suffix,numdiv,b[0],b[1],b[2],
outDir);
vobozPath,
posfile,border,boxsize,suffix,numdiv,b[0],b[1],b[2],
outDir);
p++;
if ((p == numThreads)) { fprintf(scr, "wait\n"); p = 0; }
}