mirror of
https://bitbucket.org/cosmicvoids/vide_public.git
synced 2025-07-04 15:21:11 +00:00
Merge branch 'master' of https://bitbucket.org/cosmicvoids/vide_public
This commit is contained in:
commit
f190968a81
14 changed files with 556915 additions and 64 deletions
|
@ -10,6 +10,7 @@ check_c_compiler_flag(-march=native SUPPORT_ARCH_NATIVE )
|
||||||
|
|
||||||
|
|
||||||
find_library(MATH_LIB m)
|
find_library(MATH_LIB m)
|
||||||
|
find_library(DL_LIB dl)
|
||||||
|
|
||||||
macro(add_genopt _sourcelist _ggofile _basefile)
|
macro(add_genopt _sourcelist _ggofile _basefile)
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ ENDIF (CAIROMMCONFIG_INCLUDE_PATH AND CAIROMM_INCLUDE_PATH AND SIGC_INCLUDE_PATH
|
||||||
|
|
||||||
SET(ZOB_LIBS zobovTool
|
SET(ZOB_LIBS zobovTool
|
||||||
${COSMOTOOL_LIBRARY} ${GSL_LIBRARIES}
|
${COSMOTOOL_LIBRARY} ${GSL_LIBRARIES}
|
||||||
${NETCDF_LIBRARIES})
|
${NETCDF_LIBRARIES} ${DL_LIB})
|
||||||
|
|
||||||
|
|
||||||
include_directories(
|
include_directories(
|
||||||
|
|
|
@ -920,7 +920,22 @@ int main(int argc, char **argv) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
voids.resize(iGood);
|
voids.resize(iGood);
|
||||||
printf(" 4th filter: rejected %d below redshift boundaries\n", numNearZ);
|
|
||||||
|
/*
|
||||||
|
iGood = 0;
|
||||||
|
for (iVoid = 0; iVoid < voids.size(); iVoid++) {
|
||||||
|
// just in case
|
||||||
|
if (args.isObservation_flag &&
|
||||||
|
voids[iVoid].redshift > args.zMax_arg) {
|
||||||
|
numNearZ++;
|
||||||
|
} else {
|
||||||
|
voids[iGood++] = voids[iVoid];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
voids.resize(iGood);
|
||||||
|
*/
|
||||||
|
|
||||||
|
printf(" 4th filter: rejected %d outside redshift boundaries\n", numNearZ);
|
||||||
|
|
||||||
// take only top-level voids
|
// take only top-level voids
|
||||||
numAreParents = 0;
|
numAreParents = 0;
|
||||||
|
@ -1156,11 +1171,18 @@ void outputVoids(string outputDir, string sampleName, string prefix,
|
||||||
outVoid.numChildren,
|
outVoid.numChildren,
|
||||||
outVoid.centralDen);
|
outVoid.centralDen);
|
||||||
|
|
||||||
|
double phi = atan2(outVoid.macrocenter[1]-boxLen[1]/2.,
|
||||||
|
outVoid.macrocenter[0]-boxLen[0]/2.);
|
||||||
|
if (phi < 0) phi += 2.*M_PI;
|
||||||
|
double RA = phi * 180./M_PI;
|
||||||
|
|
||||||
|
double theta = acos((outVoid.macrocenter[2]-boxLen[2]/2.) /
|
||||||
|
outVoid.redshiftInMpc);
|
||||||
|
double dec = (M_PI/2. - theta) * 180./M_PI;
|
||||||
|
|
||||||
fprintf(fpSkyPositions, "%.2f %.2f %.5f %.2f %d\n",
|
fprintf(fpSkyPositions, "%.2f %.2f %.5f %.2f %d\n",
|
||||||
atan((outVoid.macrocenter[1]-boxLen[1]/2.) /
|
RA,
|
||||||
(outVoid.macrocenter[0]-boxLen[0]/2.)) * 180/M_PI + 180,
|
dec,
|
||||||
asin((outVoid.macrocenter[2]-boxLen[2]/2.) /
|
|
||||||
outVoid.redshiftInMpc) * 180/M_PI,
|
|
||||||
outVoid.redshift,
|
outVoid.redshift,
|
||||||
outVoid.radius,
|
outVoid.radius,
|
||||||
outVoid.voidID);
|
outVoid.voidID);
|
||||||
|
|
164647
examples/example_observation.dat
Executable file
164647
examples/example_observation.dat
Executable file
File diff suppressed because it is too large
Load diff
BIN
examples/example_observation_mask.fits
Executable file
BIN
examples/example_observation_mask.fits
Executable file
Binary file not shown.
392171
examples/example_simulation_z0.0.dat
Executable file
392171
examples/example_simulation_z0.0.dat
Executable file
File diff suppressed because it is too large
Load diff
3
external/cosmotool/src/h5_readFlash.hpp
vendored
3
external/cosmotool/src/h5_readFlash.hpp
vendored
|
@ -14,7 +14,8 @@ This file has been developped by P. M. Sutter.
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#include <string.h>
|
#include <string>
|
||||||
|
#include <cstring>
|
||||||
#include "hdf5_flash.h"
|
#include "hdf5_flash.h"
|
||||||
#include "H5Cpp.h"
|
#include "H5Cpp.h"
|
||||||
|
|
||||||
|
|
7
external/external_build.cmake
vendored
7
external/external_build.cmake
vendored
|
@ -27,7 +27,7 @@ IF(INTERNAL_GENGETOPT)
|
||||||
ENDIF(INTERNAL_GENGETOPT)
|
ENDIF(INTERNAL_GENGETOPT)
|
||||||
|
|
||||||
IF(INTERNAL_HDF5)
|
IF(INTERNAL_HDF5)
|
||||||
SET(HDF5_URL "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.9/src/hdf5-1.8.9.tar.gz" CACHE STRING "URL to download HDF5 from")
|
SET(HDF5_URL "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.15/src/hdf5-1.8.15.tar.gz" CACHE STRING "URL to download HDF5 from")
|
||||||
mark_as_advanced(HDF5_URL)
|
mark_as_advanced(HDF5_URL)
|
||||||
ENDIF(INTERNAL_HDF5)
|
ENDIF(INTERNAL_HDF5)
|
||||||
|
|
||||||
|
@ -121,6 +121,7 @@ else(INTERNAL_HDF5)
|
||||||
find_library(HDF5HL_LIBRARY hdf5_hl)
|
find_library(HDF5HL_LIBRARY hdf5_hl)
|
||||||
endif (INTERNAL_HDF5)
|
endif (INTERNAL_HDF5)
|
||||||
SET(CONFIGURE_CPP_FLAGS "${CONFIGURE_CPP_FLAGS} -I${HDF5_INCLUDE_PATH}")
|
SET(CONFIGURE_CPP_FLAGS "${CONFIGURE_CPP_FLAGS} -I${HDF5_INCLUDE_PATH}")
|
||||||
|
SET(CONFIGURE_LIBS_FLAGS "${DL_LIB}")
|
||||||
mark_as_advanced(HDF5_INCLUDE_PATH HDF5_LIBRARY HDF5_CPP_LIBRARY HDF5HL_LIBRARY HDF5HL_CPP_LIBRARY)
|
mark_as_advanced(HDF5_INCLUDE_PATH HDF5_LIBRARY HDF5_CPP_LIBRARY HDF5HL_LIBRARY HDF5HL_CPP_LIBRARY)
|
||||||
|
|
||||||
###############
|
###############
|
||||||
|
@ -133,7 +134,7 @@ if (INTERNAL_NETCDF)
|
||||||
SET(NETCDF_BIN_DIR ${CMAKE_BINARY_DIR}/ext_build/netcdf)
|
SET(NETCDF_BIN_DIR ${CMAKE_BINARY_DIR}/ext_build/netcdf)
|
||||||
SET(CONFIGURE_CPP_FLAGS "${CONFIGURE_CPP_FLAGS} -I${NETCDF_BIN_DIR}/include")
|
SET(CONFIGURE_CPP_FLAGS "${CONFIGURE_CPP_FLAGS} -I${NETCDF_BIN_DIR}/include")
|
||||||
SET(CONFIGURE_LDFLAGS "${CONFIGURE_LDFLAGS} -L${NETCDF_BIN_DIR}/lib")
|
SET(CONFIGURE_LDFLAGS "${CONFIGURE_LDFLAGS} -L${NETCDF_BIN_DIR}/lib")
|
||||||
SET(EXTRA_NC_FLAGS CPPFLAGS=${CONFIGURE_CPP_FLAGS} LDFLAGS=${CONFIGURE_LDFLAGS})
|
SET(EXTRA_NC_FLAGS CPPFLAGS=${CONFIGURE_CPP_FLAGS} LDFLAGS=${CONFIGURE_LDFLAGS} LIBS=${CONFIGURE_LIBS_FLAGS})
|
||||||
ExternalProject_Add(netcdf
|
ExternalProject_Add(netcdf
|
||||||
DEPENDS ${hdf5_built}
|
DEPENDS ${hdf5_built}
|
||||||
PREFIX ${BUILD_PREFIX}/netcdf-prefix
|
PREFIX ${BUILD_PREFIX}/netcdf-prefix
|
||||||
|
@ -239,7 +240,7 @@ ExternalProject_Add(cosmotool
|
||||||
-DGSLCBLAS_LIBRARY=${GSLCBLAS_LIBRARY}
|
-DGSLCBLAS_LIBRARY=${GSLCBLAS_LIBRARY}
|
||||||
-DNETCDF_LIBRARY=${NETCDF_LIBRARY}
|
-DNETCDF_LIBRARY=${NETCDF_LIBRARY}
|
||||||
-DNETCDFCPP_LIBRARY=${NETCDFCPP_LIBRARY}
|
-DNETCDFCPP_LIBRARY=${NETCDFCPP_LIBRARY}
|
||||||
-DENABLE_SHARP=OFF
|
-DENABLE_SHARP=OFF
|
||||||
)
|
)
|
||||||
SET(COSMOTOOL_LIBRARY ${CMAKE_BINARY_DIR}/ext_build/cosmotool/lib/libCosmoTool.a)
|
SET(COSMOTOOL_LIBRARY ${CMAKE_BINARY_DIR}/ext_build/cosmotool/lib/libCosmoTool.a)
|
||||||
set(COSMOTOOL_INCLUDE_PATH ${CMAKE_BINARY_DIR}/ext_build/cosmotool/include)
|
set(COSMOTOOL_INCLUDE_PATH ${CMAKE_BINARY_DIR}/ext_build/cosmotool/include)
|
||||||
|
|
|
@ -33,21 +33,17 @@ continueRun = False
|
||||||
startCatalogStage = 1
|
startCatalogStage = 1
|
||||||
endCatalogStage = 3
|
endCatalogStage = 3
|
||||||
|
|
||||||
# a global name to give
|
|
||||||
#catalogName = "lcdm"
|
|
||||||
|
|
||||||
|
|
||||||
# directory for input data files
|
# directory for input data files
|
||||||
inputDataDir = os.getenv("HOME")+"/workspace/Voids/catalogs/nyuvagc/"
|
inputDataDir = os.getenv("PWD")+"/../examples/"
|
||||||
|
|
||||||
# void catalog output directory
|
# void catalog output directory
|
||||||
workDir = os.getenv("HOME")+"/workspace/Voids/sdss_dr7LCDM/"
|
workDir = os.getenv("PWD")+"/../examples/example_observation/"
|
||||||
|
|
||||||
# output directory for log files
|
# output directory for log files
|
||||||
logDir = os.getenv("PWD")+"/../logs/sdss_dr7LCDM"
|
logDir = os.getenv("PWD")+"/../logs/example_observation/"
|
||||||
|
|
||||||
# output directory for figures
|
# output directory for figures
|
||||||
figDir = os.getenv("PWD")+"/../figs/sdss_dr7LCDM"
|
figDir = os.getenv("PWD")+"/../figs/example_observation/"
|
||||||
|
|
||||||
# you need to set these manually: point to ZOBOV and C_TOOLS in VIDE directory
|
# you need to set these manually: point to ZOBOV and C_TOOLS in VIDE directory
|
||||||
ZOBOV_PATH = os.getenv("PWD")+"/../zobov/"
|
ZOBOV_PATH = os.getenv("PWD")+"/../zobov/"
|
||||||
|
@ -68,38 +64,39 @@ dataSampleList = []
|
||||||
# define your volume-limited samples
|
# define your volume-limited samples
|
||||||
newSample = Sample(
|
newSample = Sample(
|
||||||
# path to galaxy file is inputDataDir+dataFile
|
# path to galaxy file is inputDataDir+dataFile
|
||||||
dataFile = "filename.dat"
|
dataFile = "example_observation.dat",
|
||||||
|
|
||||||
# full name for this sample
|
# full name for this sample
|
||||||
fullName = "lss.dr72dim1.dat",
|
fullName = "example_observation",
|
||||||
|
|
||||||
# a convenient nickname
|
# a convenient nickname
|
||||||
nickName = "dim1",
|
nickName = "exobs",
|
||||||
|
|
||||||
# don't change this
|
# don't change this
|
||||||
dataType = "observation",
|
dataType = "observation",
|
||||||
|
|
||||||
# assume volume-limites?
|
# assume sample is volume-limited?
|
||||||
volumeLimited = True,
|
volumeLimited = True,
|
||||||
|
|
||||||
# HEALpix mask file
|
# HEALpix mask file
|
||||||
maskFile = inputDataDir+"/healpix/rast_window_512.fits",
|
maskFile = inputDataDir+"/example_observation_mask.fits",
|
||||||
|
|
||||||
# radial selection function (if not volume limited)
|
# radial selection function (if not volume limited)
|
||||||
selFunFile = None,
|
selFunFile = None,
|
||||||
|
|
||||||
# max and min redshifts of galaxies in your sample
|
# max and min redshifts of galaxies in your sample
|
||||||
zBoundary = (0.0, 0.05),
|
zBoundary = (0.0, 0.15),
|
||||||
|
|
||||||
# max and min redshifts where you want to find voids
|
# max and min redshifts where you want to find voids
|
||||||
zRange = (0.0, 0.05),
|
zRange = (0.1, 0.15),
|
||||||
|
|
||||||
# leave this at -1 for mean particle separation, or
|
# leave this at -1 for mean particle separation,
|
||||||
# specify your own in Mpc/h
|
# or specify your own in Mpc/h
|
||||||
minVoidRadius = -1,
|
minVoidRadius = -1,
|
||||||
|
|
||||||
# density of mock particles in cubic Mpc/h
|
# density of mock particles in cubic Mpc/h
|
||||||
fakeDensity = 0.01,
|
# (make this as high as you can afford)
|
||||||
|
fakeDensity = 0.05,
|
||||||
|
|
||||||
)
|
)
|
||||||
dataSampleList.append(newSample)
|
dataSampleList.append(newSample)
|
||||||
|
|
|
@ -33,26 +33,26 @@ continueRun = False
|
||||||
startCatalogStage = 1
|
startCatalogStage = 1
|
||||||
endCatalogStage = 3
|
endCatalogStage = 3
|
||||||
|
|
||||||
# directory for the input simulation/observational particle files
|
# directory for the input simulation files
|
||||||
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/mergertree1024/"
|
catalogDir = os.getenv("PWD")+"/../examples/"
|
||||||
|
|
||||||
# void catalog output directory
|
# void catalog output directory
|
||||||
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/sim/"
|
voidOutputDir = os.getenv("PWD")+"/../examples/example_simulation/"
|
||||||
|
|
||||||
# output directory for log files
|
# output directory for log files
|
||||||
logDir = os.getenv("PWD")+"/../logs/sim/"
|
logDir = os.getenv("PWD")+"/../logs/example_simulation/"
|
||||||
|
|
||||||
# output directory for figures
|
# output directory for figures
|
||||||
figDir = os.getenv("PWD")+"/../figs/sim/"
|
figDir = os.getenv("PWD")+"/../figs/example_simulation/"
|
||||||
|
|
||||||
# where to place the pipeline scripts
|
# where to place the pipeline scripts
|
||||||
scriptDir = os.getenv("PWD")+"/sim/"
|
scriptDir = os.getenv("PWD")+"/example_simulation/"
|
||||||
|
|
||||||
# don't change
|
# don't change
|
||||||
dataType = "simulation"
|
dataType = "simulation"
|
||||||
|
|
||||||
# available formats for simulation: gadget, sdf, multidark
|
# available formats for simulation: gadget, sdf, multidark
|
||||||
dataFormat = "sdf"
|
dataFormat = "multidark"
|
||||||
|
|
||||||
# units of position in Mpc/h
|
# units of position in Mpc/h
|
||||||
dataUnit = 1
|
dataUnit = 1
|
||||||
|
@ -84,19 +84,19 @@ numSubvolumes = 1
|
||||||
# Particles
|
# Particles
|
||||||
|
|
||||||
# common filename of particle files
|
# common filename of particle files
|
||||||
particleFileBase = "mf_4s_1G_1k_NNNNN"
|
particleFileBase = "example_simulation_NNNN.dat"
|
||||||
|
|
||||||
# this flag will be replaced by values in fileNums list below
|
# this flag will be replaced by values in fileNums list below
|
||||||
particleFileDummy = 'NNNNN'
|
particleFileDummy = 'NNNN'
|
||||||
|
|
||||||
# list of file numbers for the particle files
|
# list of file numbers for the particle files
|
||||||
fileNums = ["1.000"]
|
fileNums = ["z0.0"]
|
||||||
|
|
||||||
# redshift of each file in the above fileNums list
|
# redshift of each file in the above fileNums list
|
||||||
redshifts = ["0.0"]
|
redshifts = ["0.0"]
|
||||||
|
|
||||||
# list of desired subsamples - these are in unts of h Mpc^-3!
|
# list of desired subsamples - these are in unts of h Mpc^-3!
|
||||||
subSamples = [1.0, 0.5]
|
subSamples = [1.0]
|
||||||
|
|
||||||
# if True, do the subsampling in preparation (available for sdf and multidark)
|
# if True, do the subsampling in preparation (available for sdf and multidark)
|
||||||
doSubSamplingInPrep = False
|
doSubSamplingInPrep = False
|
||||||
|
@ -112,14 +112,17 @@ shiftSimZ = False
|
||||||
# Halos
|
# Halos
|
||||||
|
|
||||||
# common filename of halo files, leave blank to ignore halos
|
# common filename of halo files, leave blank to ignore halos
|
||||||
haloFileBase = "mf_4s_1G_1k_bgc2_NNNNN.sdf"
|
haloFileBase = ""
|
||||||
|
#haloFileBase = "mf_4s_1G_1k_bgc2_NNNNN.sdf"
|
||||||
|
|
||||||
# this flag will be replaced by values in fileNums list above
|
# this flag will be replaced by values in fileNums list above
|
||||||
haloFileDummy = 'NNNNN'
|
haloFileDummy = ''
|
||||||
|
#haloFileDummy = 'NNNNN'
|
||||||
|
|
||||||
# minimum halo mass cuts to apply for the halo catalog
|
# minimum halo mass cuts to apply for the halo catalog
|
||||||
# use "none" to get all halos
|
# use "none" to get all halos
|
||||||
minHaloMasses = ["none", 1.2e13]
|
minHaloMasses = []
|
||||||
|
#minHaloMasses = ["none", 1.2e13]
|
||||||
|
|
||||||
# locations of data in the halo catalog
|
# locations of data in the halo catalog
|
||||||
haloFileMCol = 6 # mass
|
haloFileMCol = 6 # mass
|
||||||
|
@ -147,14 +150,14 @@ hubble = 0.6962 # h_0
|
||||||
|
|
||||||
# each of the HOD sets will be applied to each halo catalog defined above
|
# each of the HOD sets will be applied to each halo catalog defined above
|
||||||
hodParmList = [
|
hodParmList = [
|
||||||
{'name' : "LowRes", #BOSS: Manera et al. 2012, eq. 26
|
#{'name' : "LowRes", #BOSS: Manera et al. 2012, eq. 26
|
||||||
'Mmin' : 0.0,
|
# 'Mmin' : 0.0,
|
||||||
'M1' : 1.e14,
|
# 'M1' : 1.e14,
|
||||||
'sigma_logM' : 0.596,
|
# 'sigma_logM' : 0.596,
|
||||||
'alpha' : 1.0127,
|
# 'alpha' : 1.0127,
|
||||||
'Mcut' : 1.19399e13,
|
# 'Mcut' : 1.19399e13,
|
||||||
'galDens' : 0.0002,
|
# 'galDens' : 0.0002,
|
||||||
},
|
#},
|
||||||
]
|
]
|
||||||
|
|
||||||
# END CONFIGURATION
|
# END CONFIGURATION
|
||||||
|
|
0
pipeline/generateCatalog.py
Normal file → Executable file
0
pipeline/generateCatalog.py
Normal file → Executable file
|
@ -37,7 +37,7 @@ from netCDF4 import Dataset
|
||||||
from void_python_tools.backend.classes import *
|
from void_python_tools.backend.classes import *
|
||||||
import pickle
|
import pickle
|
||||||
import void_python_tools.apTools as vp
|
import void_python_tools.apTools as vp
|
||||||
import scipy.interpolate
|
import scipy.interpolate as interpolate
|
||||||
|
|
||||||
NetCDFFile = Dataset
|
NetCDFFile = Dataset
|
||||||
ncFloat = 'f8' # Double precision
|
ncFloat = 'f8' # Double precision
|
||||||
|
@ -374,7 +374,7 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None,
|
||||||
File.close()
|
File.close()
|
||||||
|
|
||||||
# load redshifts
|
# load redshifts
|
||||||
partFile = sampleDir+"/zobov_slice_"+sample.fullName
|
partFile = zobovDir+"/zobov_slice_"+sample.fullName
|
||||||
File = file(partFile)
|
File = file(partFile)
|
||||||
chk = np.fromfile(File, dtype=np.int32,count=1)
|
chk = np.fromfile(File, dtype=np.int32,count=1)
|
||||||
Np = np.fromfile(File, dtype=np.int32,count=1)
|
Np = np.fromfile(File, dtype=np.int32,count=1)
|
||||||
|
@ -414,16 +414,20 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None,
|
||||||
# build selection function interpolation
|
# build selection function interpolation
|
||||||
selfuncData = np.genfromtxt(sample.selFunFile)
|
selfuncData = np.genfromtxt(sample.selFunFile)
|
||||||
selfunc = interpolate.interp1d(selfuncData[:,0], selfuncData[:,1],
|
selfunc = interpolate.interp1d(selfuncData[:,0], selfuncData[:,1],
|
||||||
kind='cubic')
|
kind='cubic', bounds_error=False,
|
||||||
|
fill_value=1.0)
|
||||||
# re-weight and write
|
# re-weight and write
|
||||||
vols *= selfunc(redshifts)
|
## TEST
|
||||||
|
#redshifts /= 10000.
|
||||||
|
for i in xrange(len(vols)):
|
||||||
|
vols[i] *= selfunc(redshifts[i])
|
||||||
|
|
||||||
volFile = zobovDir+"/vol_"+sampleName+".dat"
|
volFile = zobovDir+"/vol_weighted_"+sampleName+".dat"
|
||||||
File = file(volFile, 'w')
|
File = file(volFile, 'w')
|
||||||
chk.astype('np.int32').tofile(File)
|
numPartTot.astype(np.int32).tofile(File)
|
||||||
vols.astype('np.float32').tofile(File)
|
vols.astype(np.float32).tofile(File)
|
||||||
|
|
||||||
volFileToUse = zobovDir+"/vol_weighted"+sampleName+".dat"
|
volFileToUse = zobovDir+"/vol_weighted_"+sampleName+".dat"
|
||||||
else:
|
else:
|
||||||
volFileToUse = zobovDir+"/vol_"+sampleName+".dat"
|
volFileToUse = zobovDir+"/vol_"+sampleName+".dat"
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,8 @@ add_executable(voz1b1 voz1b1.c readfiles.c vozutil.c voz.h)
|
||||||
target_link_libraries(voz1b1 ${QHULL_LIBRARY} ${MATH_LIB})
|
target_link_libraries(voz1b1 ${QHULL_LIBRARY} ${MATH_LIB})
|
||||||
|
|
||||||
|
|
||||||
add_executable(jozov jozov.c findrtop.c)
|
#add_executable(jozov jozov.c findrtop.c)
|
||||||
target_link_libraries(jozov ${MATH_LIB})
|
#target_link_libraries(jozov ${MATH_LIB})
|
||||||
|
|
||||||
add_executable(vozinit vozinit.c readfiles.c)
|
add_executable(vozinit vozinit.c readfiles.c)
|
||||||
target_link_libraries(vozinit ${MATH_LIB})
|
target_link_libraries(vozinit ${MATH_LIB})
|
||||||
|
|
|
@ -173,10 +173,14 @@ int main(int argc, char *argv[]) {
|
||||||
for (b[0]=0;b[0]<numdiv; b[0]++) {
|
for (b[0]=0;b[0]<numdiv; b[0]++) {
|
||||||
for (b[1] = 0; b[1] < numdiv; b[1]++) {
|
for (b[1] = 0; b[1] < numdiv; b[1]++) {
|
||||||
for (b[2] = 0; b[2] < numdiv; b[2]++) {
|
for (b[2] = 0; b[2] < numdiv; b[2]++) {
|
||||||
|
//fprintf(scr,"%s/../c_tools/zobov2/voz1b1/voz1b1_2 %s %f %f %f %f %s %d %d %d %d %d %d %s&\n",
|
||||||
|
// vobozPath,
|
||||||
|
// posfile,border,boxsize,boxsize,boxsize,suffix,numdiv,numdiv, numdiv,b[0],b[1],b[2],
|
||||||
|
// outDir);
|
||||||
fprintf(scr,"%s/voz1b1 %s %f %f %s %d %d %d %d %s&\n",
|
fprintf(scr,"%s/voz1b1 %s %f %f %s %d %d %d %d %s&\n",
|
||||||
vobozPath,
|
vobozPath,
|
||||||
posfile,border,boxsize,suffix,numdiv,b[0],b[1],b[2],
|
posfile,border,boxsize,suffix,numdiv,b[0],b[1],b[2],
|
||||||
outDir);
|
outDir);
|
||||||
p++;
|
p++;
|
||||||
if ((p == numThreads)) { fprintf(scr, "wait\n"); p = 0; }
|
if ((p == numThreads)) { fprintf(scr, "wait\n"); p = 0; }
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue