mirror of
https://bitbucket.org/cosmicvoids/vide_public.git
synced 2025-07-05 07:41:11 +00:00
added a-p analysis script to repo; some minor bug fixes to adjust tools to work with simulation data
This commit is contained in:
parent
de6dbd3051
commit
21431f9a31
6 changed files with 41 additions and 1500 deletions
|
@ -1,18 +0,0 @@
|
||||||
SET(QHULL_BASE_PATH CACHE PATH "Qhull base path")
|
|
||||||
|
|
||||||
find_path(QHULL_INCLUDE_PATH qhull_a.h HINTS ${QHULL_BASE_PATH}/src/libqhull)
|
|
||||||
find_path(QHULL_CPP_INCLUDE_PATH Qhull.h HINTS ${QHULL_BASE_PATH}/src/libqhullcpp)
|
|
||||||
find_library(QHULL_LIBRARY qhull_p HINTS ${QHULL_BASE_PATH}/lib)
|
|
||||||
find_library(QHULL_CPP_LIBRARY qhullcpp HINTS ${QHULL_BASE_PATH}/lib)
|
|
||||||
find_library(QHULL_P_LIBRARY qhullstatic_p HINTS ${QHULL_BASE_PATH}/lib)
|
|
||||||
|
|
||||||
if ((NOT QHULL_INCLUDE_PATH) OR (NOT QHULL_CPP_LIBRARY))
|
|
||||||
message(SEND_ERROR "Qhull library not found")
|
|
||||||
endif((NOT QHULL_INCLUDE_PATH) OR (NOT QHULL_CPP_LIBRARY))
|
|
||||||
|
|
||||||
set(QHULL_INCLUDES ${QHULL_INCLUDE_PATH} ${QHULL_INCLUDE_PATH}/.. ${QHULL_CPP_INCLUDE_PATH} ${QHULL_BASE_PATH}/src)
|
|
||||||
set(QHULL_LIBRARIES ${QHULL_CPP_LIBRARY} ${QHULL_P_LIBRARY})
|
|
||||||
|
|
||||||
add_definitions(-Dqh_QHpointer)
|
|
||||||
|
|
||||||
mark_as_advanced(QHULL_INCLUDE_PATH QHULL_CPP_INCLUDE_PATH QHULL_LIBRARY QHULL_CPP_LIBRARY QHULL_P_LIBRARY)
|
|
|
@ -391,6 +391,7 @@ void makeBox(SimuData *simu, double *efac, SimuData *&boxed, generateMock_info&
|
||||||
f.add_att("range_y_max", ranges[1][1]);
|
f.add_att("range_y_max", ranges[1][1]);
|
||||||
f.add_att("range_z_min", ranges[2][0]);
|
f.add_att("range_z_min", ranges[2][0]);
|
||||||
f.add_att("range_z_max", ranges[2][1]);
|
f.add_att("range_z_max", ranges[2][1]);
|
||||||
|
f.add_att("mask_index", -1);
|
||||||
|
|
||||||
NcDim *NumPart_dim = f.add_dim("numpart_dim", boxed->NumPart);
|
NcDim *NumPart_dim = f.add_dim("numpart_dim", boxed->NumPart);
|
||||||
NcVar *v = f.add_var("particle_ids", ncInt, NumPart_dim);
|
NcVar *v = f.add_var("particle_ids", ncInt, NumPart_dim);
|
||||||
|
|
|
@ -23,9 +23,8 @@ if (len(sys.argv) > 1):
|
||||||
filename = sys.argv[1]
|
filename = sys.argv[1]
|
||||||
print " Loading parameters from", filename
|
print " Loading parameters from", filename
|
||||||
if not os.access(filename, os.F_OK):
|
if not os.access(filename, os.F_OK):
|
||||||
print " Cannot find parameter file!"
|
print " Cannot find parameter file %s!" % filename
|
||||||
exit(-1)
|
exit(-1)
|
||||||
#parms = __import__(filename[:-3], globals(), locals(), ['*'])
|
|
||||||
parms = imp.load_source("name", filename)
|
parms = imp.load_source("name", filename)
|
||||||
globals().update(vars(parms))
|
globals().update(vars(parms))
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -196,8 +196,10 @@ newSample.addStack({zMin}, {zMax}, {minRadius}+18, {minRadius}+24, True, False)
|
||||||
|
|
||||||
mySubvolume = "%d%d" % (iX, iY)
|
mySubvolume = "%d%d" % (iX, iY)
|
||||||
|
|
||||||
sampleName = getSampleName(prefix, base, redshift, useVel,
|
sampleName = getSampleName(prefix, base, sliceMin, useVel,
|
||||||
iSlice=iSlice, iVol=mySubvolume)
|
iSlice=-1, iVol=mySubvolume)
|
||||||
|
#sampleName = getSampleName(prefix, base, redshift, useVel,
|
||||||
|
# iSlice=iSlice, iVol=mySubvolume)
|
||||||
|
|
||||||
scriptFile.write(sampleInfo.format(dataFile=dataFileName,
|
scriptFile.write(sampleInfo.format(dataFile=dataFileName,
|
||||||
dataFormat=dataFormat,
|
dataFormat=dataFormat,
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -150,6 +150,9 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None,
|
||||||
else:
|
else:
|
||||||
print "already done!"
|
print "already done!"
|
||||||
|
|
||||||
|
if os.access("comoving_distance.txt", os.F_OK):
|
||||||
|
os.system("mv %s %s" % ("comoving_distance.txt", zobovDir))
|
||||||
|
|
||||||
if os.access(parmFile, os.F_OK):
|
if os.access(parmFile, os.F_OK):
|
||||||
os.unlink(parmFile)
|
os.unlink(parmFile)
|
||||||
|
|
||||||
|
@ -460,10 +463,11 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
||||||
return
|
return
|
||||||
|
|
||||||
# figure out box volume and average density
|
# figure out box volume and average density
|
||||||
|
if sample.dataType == "observation":
|
||||||
maskFile = sample.maskFile
|
maskFile = sample.maskFile
|
||||||
sulFunFile = sample.selFunFile
|
sulFunFile = sample.selFunFile
|
||||||
|
|
||||||
if not os.access(sample.selFunFile, os.F_OK) and not volumeLimited:
|
if not os.access(sample.selFunFile, os.F_OK) and not sample.volumeLimited:
|
||||||
print " Cannot find", selFunFile, "!"
|
print " Cannot find", selFunFile, "!"
|
||||||
exit(-1)
|
exit(-1)
|
||||||
|
|
||||||
|
@ -485,9 +489,11 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
||||||
|
|
||||||
boxVol = props[0]
|
boxVol = props[0]
|
||||||
nbar = props[1]
|
nbar = props[1]
|
||||||
|
|
||||||
if sample.volumeLimited:
|
if sample.volumeLimited:
|
||||||
nbar = 1.0
|
nbar = 1.0
|
||||||
|
else:
|
||||||
|
nbar = 1.0
|
||||||
|
boxVol = sample.boxLen**3
|
||||||
|
|
||||||
summaryLine = runSuffix + " " + \
|
summaryLine = runSuffix + " " + \
|
||||||
thisDataPortion + " " + \
|
thisDataPortion + " " + \
|
||||||
|
@ -1173,7 +1179,11 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None,
|
||||||
voidDir = sample.zobovDir+"/stacks_" + runSuffix
|
voidDir = sample.zobovDir+"/stacks_" + runSuffix
|
||||||
centersFile = voidDir+"/centers.txt"
|
centersFile = voidDir+"/centers.txt"
|
||||||
if os.access(centersFile, os.F_OK):
|
if os.access(centersFile, os.F_OK):
|
||||||
voidRedshifts = np.loadtxt(centersFile)[:,5]
|
voidRedshifts = np.loadtxt(centersFile)
|
||||||
|
if voidRedshifts.ndim > 1:
|
||||||
|
voidRedshifts = voidRedshifts[:,5]
|
||||||
|
else:
|
||||||
|
voidRedshifts = voidRedshifts[5]
|
||||||
#fp.write(str(len(voidRedshifts))+" ")
|
#fp.write(str(len(voidRedshifts))+" ")
|
||||||
np.savetxt(fp, voidRedshifts[None])
|
np.savetxt(fp, voidRedshifts[None])
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue