mirror of
https://bitbucket.org/cosmicvoids/vide_public.git
synced 2025-07-05 07:41:11 +00:00
significant improvements and streamlining of a-p analysis
This commit is contained in:
parent
b8ad7f8752
commit
3f5a767b48
4 changed files with 159 additions and 116 deletions
|
@ -230,7 +230,7 @@ newSample = Sample(dataFile = "{dataFile}",
|
||||||
profileBinSize = "auto",
|
profileBinSize = "auto",
|
||||||
includeInHubble = True,
|
includeInHubble = True,
|
||||||
partOfCombo = False,
|
partOfCombo = False,
|
||||||
{autoStack},
|
{autoStack}
|
||||||
numAPSlices = {numAPSlices},
|
numAPSlices = {numAPSlices},
|
||||||
isCombo = False,
|
isCombo = False,
|
||||||
boxLen = {boxLen},
|
boxLen = {boxLen},
|
||||||
|
@ -360,7 +360,7 @@ newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, Fal
|
||||||
|
|
||||||
autoStack = ""
|
autoStack = ""
|
||||||
if "autoNumInStack" in stackMode or "autoPartInStack" in stackMode:
|
if "autoNumInStack" in stackMode or "autoPartInStack" in stackMode:
|
||||||
autoStack = stackMode
|
autoStack = stackMode+","
|
||||||
scriptFile.write(sampleInfo.format(dataFile=dataFileName,
|
scriptFile.write(sampleInfo.format(dataFile=dataFileName,
|
||||||
dataFormat=dataFormat,
|
dataFormat=dataFormat,
|
||||||
dataUnit=dataUnit,
|
dataUnit=dataUnit,
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import scipy.integrate as integrate
|
import scipy.integrate as integrate
|
||||||
|
from void_python_tools.backend import *
|
||||||
|
|
||||||
__all__=['expansion', 'angularDiameter', 'expectedStretch', 'aveStretch', 'aveExpansion', 'aveStretchCone', 'aveWeightedStretch']
|
__all__=['expansion', 'angularDiameter', 'expectedStretch', 'aveStretch', 'aveExpansion', 'aveStretchCone', 'aveWeightedStretch']
|
||||||
|
|
||||||
|
@ -48,19 +49,12 @@ def expectedStretch(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
|
||||||
da = angularDiameter(z, Om=Om, Ot=Ot, w0=w0, wa=wa)
|
da = angularDiameter(z, Om=Om, Ot=Ot, w0=w0, wa=wa)
|
||||||
return ez*da/z
|
return ez*da/z
|
||||||
|
|
||||||
|
|
||||||
# returns average expected void stretch for a given redshift range
|
|
||||||
def aveStretch(zStart, zEnd, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
|
|
||||||
if zStart == 0.0: zStart = 1.e-6
|
|
||||||
ave = integrate.quad(expectedStretch, zStart, zEnd, args=(Om, Ot, w0, wa))[0]
|
|
||||||
ave /= (zEnd-zStart)
|
|
||||||
return ave
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# returns average expected void stretch for a given redshift range
|
# returns average expected void stretch for a given redshift range
|
||||||
# assuming a cone
|
# assuming a cone
|
||||||
def aveStretchCone(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0,
|
def aveStretchCone(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0,
|
||||||
w0 = -1.0, wa = 0.0):
|
w0 = -1.0, wa = 0.0):
|
||||||
|
#print "assuming observation!", skyFrac
|
||||||
if zStart == 0.0: zStart = 1.e-6
|
if zStart == 0.0: zStart = 1.e-6
|
||||||
|
|
||||||
h1 = zStart
|
h1 = zStart
|
||||||
|
@ -83,6 +77,39 @@ def aveStretchCone(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0,
|
||||||
|
|
||||||
return (aveHigh-aveLow)/(volumeHigh-volumeLow)
|
return (aveHigh-aveLow)/(volumeHigh-volumeLow)
|
||||||
|
|
||||||
|
# returns average expected void stretch for a given redshift range
|
||||||
|
def aveStretch(sample, zStart, zEnd,
|
||||||
|
Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
|
||||||
|
if zStart == 0.0: zStart = 1.e-6
|
||||||
|
|
||||||
|
if sample.dataType == "observation":
|
||||||
|
stretch = aveStretchCone(zStart, zEnd,
|
||||||
|
skyFrac=sample.skyFraction, Om=Om, Ot=Ot,
|
||||||
|
w0=w0, wa=wa)
|
||||||
|
else:
|
||||||
|
ave = integrate.quad(expectedStretch, zStart, zEnd,
|
||||||
|
args=(Om, Ot, w0, wa))[0]
|
||||||
|
ave /= (zEnd-zStart)
|
||||||
|
stretch = ave
|
||||||
|
|
||||||
|
# if in comoving space, calculate stretch for fiducial cosmology
|
||||||
|
# and take relative amount
|
||||||
|
if not sample.useLightCone or sample.useComoving:
|
||||||
|
if sample.dataType == "observation":
|
||||||
|
stretchFid = aveStretchCone(zStart, zEnd,
|
||||||
|
skyFrac=sample.skyFraction, Om=sample.omegaM, Ot=Ot,
|
||||||
|
w0=w0, wa=wa)
|
||||||
|
else:
|
||||||
|
ave = integrate.quad(expectedStretch, zStart, zEnd,
|
||||||
|
args=(sample.omegaM, Ot, w0, wa))[0]
|
||||||
|
ave /= (zEnd-zStart)
|
||||||
|
stretchFid = ave
|
||||||
|
|
||||||
|
stretch = stretchFid/stretch
|
||||||
|
|
||||||
|
return stretch
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# returns average expected void stretch for a given redshift range
|
# returns average expected void stretch for a given redshift range
|
||||||
# weighted by an actual void distribution
|
# weighted by an actual void distribution
|
||||||
|
|
|
@ -71,7 +71,7 @@ class Sample:
|
||||||
zBoundaryMpc = (0., 300)
|
zBoundaryMpc = (0., 300)
|
||||||
zRange = (0.0, 0.1)
|
zRange = (0.0, 0.1)
|
||||||
omegaM = 0.27
|
omegaM = 0.27
|
||||||
minVoidRadius = 5
|
minVoidRadius = -1
|
||||||
fakeDensity = 0.01
|
fakeDensity = 0.01
|
||||||
profileBinSize = 2 # Mpc
|
profileBinSize = 2 # Mpc
|
||||||
autoNumInStack = -1 # set to >0 to automatically generate stacks of size N
|
autoNumInStack = -1 # set to >0 to automatically generate stacks of size N
|
||||||
|
@ -96,7 +96,7 @@ class Sample:
|
||||||
def __init__(self, dataFile="", fullName="", dataUnit=1,
|
def __init__(self, dataFile="", fullName="", dataUnit=1,
|
||||||
nickName="", maskFile="", selFunFile="",
|
nickName="", maskFile="", selFunFile="",
|
||||||
zBoundary=(), zRange=(), zBoundaryMpc=(),
|
zBoundary=(), zRange=(), zBoundaryMpc=(),
|
||||||
minVoidRadius=0, fakeDensity=0.01, volumeLimited=True,
|
minVoidRadius=-1, fakeDensity=0.01, volumeLimited=True,
|
||||||
numAPSlices=1,
|
numAPSlices=1,
|
||||||
includeInHubble=True, partOfCombo=False, isCombo=False,
|
includeInHubble=True, partOfCombo=False, isCombo=False,
|
||||||
comboList=(), profileBinSize=2.0, skyFraction=0.19,
|
comboList=(), profileBinSize=2.0, skyFraction=0.19,
|
||||||
|
|
|
@ -438,6 +438,18 @@ def launchPrune(sample, binPath,
|
||||||
else:
|
else:
|
||||||
useComovingFlag = ""
|
useComovingFlag = ""
|
||||||
|
|
||||||
|
if sample.minVoidRadius == -1:
|
||||||
|
minRadius = -1
|
||||||
|
for line in open(zobovDir+"/sample_info.txt"):
|
||||||
|
if "Estimated mean tracer separation" in line:
|
||||||
|
minRadius = float(line.split()[5])
|
||||||
|
break
|
||||||
|
if minRadius == -1:
|
||||||
|
print "Could not grab mean tracer separation!?"
|
||||||
|
exit(-1)
|
||||||
|
else:
|
||||||
|
minRadius = sample.minVoidRadius
|
||||||
|
|
||||||
if not (continueRun and (jobSuccessful(logFile, "NetCDF: Not a valid ID\n") \
|
if not (continueRun and (jobSuccessful(logFile, "NetCDF: Not a valid ID\n") \
|
||||||
or jobSuccessful(logFile, "Done!\n"))):
|
or jobSuccessful(logFile, "Done!\n"))):
|
||||||
cmd = binPath
|
cmd = binPath
|
||||||
|
@ -454,7 +466,7 @@ def launchPrune(sample, binPath,
|
||||||
cmd += " --maxCentralDen=" + str(maxDen)
|
cmd += " --maxCentralDen=" + str(maxDen)
|
||||||
cmd += " --zMin=" + str(sample.zRange[0])
|
cmd += " --zMin=" + str(sample.zRange[0])
|
||||||
cmd += " --zMax=" + str(sample.zRange[1])
|
cmd += " --zMax=" + str(sample.zRange[1])
|
||||||
cmd += " --rMin=" + str(sample.minVoidRadius)
|
cmd += " --rMin=" + str(minRadius)
|
||||||
cmd += " --numVoids=" + str(numVoids)
|
cmd += " --numVoids=" + str(numVoids)
|
||||||
cmd += observationLine
|
cmd += observationLine
|
||||||
cmd += periodicLine
|
cmd += periodicLine
|
||||||
|
@ -595,8 +607,6 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
||||||
if useComoving or not useLightCone:
|
if useComoving or not useLightCone:
|
||||||
zMin = LIGHT_SPEED/100.*vp.angularDiameter(zMin, Om=omegaM)
|
zMin = LIGHT_SPEED/100.*vp.angularDiameter(zMin, Om=omegaM)
|
||||||
zMax = LIGHT_SPEED/100.*vp.angularDiameter(zMax, Om=omegaM)
|
zMax = LIGHT_SPEED/100.*vp.angularDiameter(zMax, Om=omegaM)
|
||||||
print min(sample.zRange[1],stack.zMax)*LIGHT_SPEED/100., zMax, sample.zBoundaryMpc[1]
|
|
||||||
print max(sample.zRange[0],stack.zMin)*LIGHT_SPEED/100., zMin, sample.zBoundaryMpc[0]
|
|
||||||
else:
|
else:
|
||||||
zMin *= LIGHT_SPEED/100.
|
zMin *= LIGHT_SPEED/100.
|
||||||
zMax *= LIGHT_SPEED/100.
|
zMax *= LIGHT_SPEED/100.
|
||||||
|
@ -608,6 +618,8 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
||||||
|
|
||||||
Rextracut = stack.rMin*3 + 1
|
Rextracut = stack.rMin*3 + 1
|
||||||
Rcircular = stack.rMin*3 + 2
|
Rcircular = stack.rMin*3 + 2
|
||||||
|
#Rextracut = stack.rMax*3 + 1
|
||||||
|
#Rcircular = stack.rMax*3 + 2
|
||||||
|
|
||||||
if dataType == "observation":
|
if dataType == "observation":
|
||||||
maskIndex = open(zobovDir+"/mask_index.txt", "r").read()
|
maskIndex = open(zobovDir+"/mask_index.txt", "r").read()
|
||||||
|
@ -765,7 +777,7 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
||||||
line = line.split()
|
line = line.split()
|
||||||
numVoids = line[1]
|
numVoids = line[1]
|
||||||
|
|
||||||
if "Number of particles =" in line:
|
if "Final zobov stack size" in line:
|
||||||
line = line.split()
|
line = line.split()
|
||||||
numParticles = line[4]
|
numParticles = line[4]
|
||||||
|
|
||||||
|
@ -793,51 +805,6 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
||||||
fp.close()
|
fp.close()
|
||||||
return
|
return
|
||||||
|
|
||||||
# figure out box volume and average density
|
|
||||||
if sample.dataType == "observation":
|
|
||||||
maskFile = sample.maskFile
|
|
||||||
sulFunFile = sample.selFunFile
|
|
||||||
|
|
||||||
if not os.access(sample.selFunFile, os.F_OK) and not sample.volumeLimited:
|
|
||||||
print " Cannot find", selFunFile, "!"
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
sys.stdout = open(logFile, 'a')
|
|
||||||
#sys.stderr = open(logFile, 'a')
|
|
||||||
zMin = sample.zRange[0]
|
|
||||||
zMax = sample.zRange[1]
|
|
||||||
if not sample.volumeLimited:
|
|
||||||
props = vp.getSurveyProps(maskFile, stack.zMin,
|
|
||||||
stack.zMax, zMin, zMax, "all",
|
|
||||||
selectionFuncFile=sample.selFunFile)
|
|
||||||
else:
|
|
||||||
zMinForVol = sample.zBoundary[0]
|
|
||||||
zMaxForVol = sample.zBoundary[1]
|
|
||||||
props = vp.getSurveyProps(maskFile, zMinForVol,
|
|
||||||
zMaxForVol, zMin, zMax, "all")
|
|
||||||
props = ((1.0,1.0))
|
|
||||||
|
|
||||||
sys.stdout = sys.__stdout__
|
|
||||||
#sys.stderr = sys.__stderr__
|
|
||||||
|
|
||||||
boxVol = props[0]
|
|
||||||
nbar = props[1]
|
|
||||||
if sample.volumeLimited:
|
|
||||||
nbar = 1.0
|
|
||||||
else:
|
|
||||||
nbar = 1.0
|
|
||||||
|
|
||||||
iX = float(sample.mySubvolume[0])
|
|
||||||
iY = float(sample.mySubvolume[1])
|
|
||||||
xMin = iX/sample.numSubvolumes * sample.boxLen
|
|
||||||
yMin = iY/sample.numSubvolumes * sample.boxLen
|
|
||||||
xMax = (iX+1)/sample.numSubvolumes * sample.boxLen
|
|
||||||
yMax = (iY+1)/sample.numSubvolumes * sample.boxLen
|
|
||||||
zMin = sample.zBoundaryMpc[0]
|
|
||||||
zMax = sample.zBoundaryMpc[1]
|
|
||||||
|
|
||||||
boxVol = (xMax-xMin)*(yMax-yMin)*(zMax-zMin)
|
|
||||||
|
|
||||||
summaryLine = runSuffix + " " + \
|
summaryLine = runSuffix + " " + \
|
||||||
thisDataPortion + " " + \
|
thisDataPortion + " " + \
|
||||||
str(stack.zMin) + " " + \
|
str(stack.zMin) + " " + \
|
||||||
|
@ -847,8 +814,14 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
||||||
if summaryFile != None:
|
if summaryFile != None:
|
||||||
open(summaryFile, "a").write(summaryLine+"\n")
|
open(summaryFile, "a").write(summaryLine+"\n")
|
||||||
|
|
||||||
# move local outputs to storage directory
|
# write out individual normalizations for each void
|
||||||
if os.access("tree.data", os.F_OK):
|
if os.access("tree.data", os.F_OK):
|
||||||
|
for line in open(zobovDir+"/sample_info.txt"):
|
||||||
|
if "Number of real" in line:
|
||||||
|
numParticles = line.split()[-1]
|
||||||
|
if "Estimated volume" in line:
|
||||||
|
boxVol = line.split()[-1]
|
||||||
|
nbar = 1.0
|
||||||
normalization = float(numParticles)/float(boxVol)/nbar
|
normalization = float(numParticles)/float(boxVol)/nbar
|
||||||
dataTemp = file("centers.txt", "r").readlines()
|
dataTemp = file("centers.txt", "r").readlines()
|
||||||
fp = file("normalizations.txt", "w")
|
fp = file("normalizations.txt", "w")
|
||||||
|
@ -1144,7 +1117,9 @@ def launchProfile(sample, stack, voidDir=None, logFile=None, continueRun=None,
|
||||||
numParticles = open(voidDir+"/num_particles.txt", "r").readline()
|
numParticles = open(voidDir+"/num_particles.txt", "r").readline()
|
||||||
|
|
||||||
Rextracut = stack.rMin*3 + 1
|
Rextracut = stack.rMin*3 + 1
|
||||||
Rcircular = stack.rMin*3 + 2
|
Rcircular = stack.rMin*2 + 2
|
||||||
|
#Rextracut = stack.rMax*3 + 1
|
||||||
|
#Rcircular = stack.rMax*3 + 2
|
||||||
|
|
||||||
if not (continueRun and jobSuccessful(logFile, "Done!\n")):
|
if not (continueRun and jobSuccessful(logFile, "Done!\n")):
|
||||||
profileParmFile = voidDir+"/params.txt"
|
profileParmFile = voidDir+"/params.txt"
|
||||||
|
@ -1163,7 +1138,9 @@ def launchProfile(sample, stack, voidDir=None, logFile=None, continueRun=None,
|
||||||
# return
|
# return
|
||||||
|
|
||||||
if sample.profileBinSize == "auto":
|
if sample.profileBinSize == "auto":
|
||||||
density = 0.5 * 50 / Rcircular / 2
|
#density = 0.5 * 50 / Rcircular / 2
|
||||||
|
density = stack.rMax / 5.
|
||||||
|
#density = stack.rMax / 10.
|
||||||
else:
|
else:
|
||||||
density = sample.profileBinSize
|
density = sample.profileBinSize
|
||||||
|
|
||||||
|
@ -1176,7 +1153,8 @@ def launchProfile(sample, stack, voidDir=None, logFile=None, continueRun=None,
|
||||||
#sys.stderr = sys.__stderr__
|
#sys.stderr = sys.__stderr__
|
||||||
|
|
||||||
if jobSuccessful(logFile, "Done!\n"):
|
if jobSuccessful(logFile, "Done!\n"):
|
||||||
print jobString, "Profiling stacks done, (N_v=", numVoids,")"
|
print jobString, "Profiling stacks done (N_v=%s, N_p=%s)" % \
|
||||||
|
(numVoids,numParticles)
|
||||||
else:
|
else:
|
||||||
print jobString, "Profiling stacks FAILED!"
|
print jobString, "Profiling stacks FAILED!"
|
||||||
exit(-1)
|
exit(-1)
|
||||||
|
@ -1203,13 +1181,21 @@ def launchFit(sample, stack, useComoving=None,
|
||||||
return
|
return
|
||||||
|
|
||||||
numVoids = int(open(voidDir+"/num_voids.txt", "r").readline())
|
numVoids = int(open(voidDir+"/num_voids.txt", "r").readline())
|
||||||
if numVoids < 10:
|
if numVoids < 1:
|
||||||
print jobString, "Fitting not enough voids to fit; skipping!"
|
print jobString, "Fitting not enough voids to fit; skipping!"
|
||||||
fp = open(voidDir+"/NOFIT", "w")
|
fp = open(voidDir+"/NOFIT", "w")
|
||||||
fp.write("not enough voids: %d \n" % numVoids)
|
fp.write("not enough voids: %d \n" % numVoids)
|
||||||
fp.close()
|
fp.close()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
numPart = int(open(voidDir+"/num_particles.txt", "r").readline())
|
||||||
|
if numPart < 1:
|
||||||
|
print jobString, "Fitting not enough particles to fit; skipping!"
|
||||||
|
fp = open(voidDir+"/NOFIT", "w")
|
||||||
|
fp.write("not enough particles: %d \n" % numPart)
|
||||||
|
fp.close()
|
||||||
|
return
|
||||||
|
|
||||||
#if stack.zMin < sample.zRange[0] or stack.zMax > sample.zRange[1]:
|
#if stack.zMin < sample.zRange[0] or stack.zMax > sample.zRange[1]:
|
||||||
# print "outside sample redshift range; skipping!"
|
# print "outside sample redshift range; skipping!"
|
||||||
# return
|
# return
|
||||||
|
@ -1243,23 +1229,28 @@ def launchFit(sample, stack, useComoving=None,
|
||||||
while badChain:
|
while badChain:
|
||||||
ntries += 1
|
ntries += 1
|
||||||
|
|
||||||
#Rexpect = (stack.rMin+stack.rMax)/2
|
Rexpect = (stack.rMin+stack.rMax)/2
|
||||||
#Rtruncate = stack.rMin*3. + 1 # TEST
|
#Rtruncate = stack.rMax*2. # TEST
|
||||||
#if sample.dataType == "observation":
|
#Rtruncate = stack.rMax*3. # TEST
|
||||||
# ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect,
|
Rtruncate = stack.rMin*3. + 1 # TEST
|
||||||
# Niter=300000,
|
if sample.dataType == "observation":
|
||||||
# Nburn=100000,
|
ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect,
|
||||||
# Rextracut=Rtruncate)
|
nbar_init=1.0,
|
||||||
#else:
|
Niter=300000,
|
||||||
# ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect,
|
Nburn=100000,
|
||||||
# Niter=300000,
|
Rextracut=Rtruncate)
|
||||||
# Nburn=100000,
|
else:
|
||||||
# Rextracut=Rtruncate)
|
ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect,
|
||||||
#badChain = (args[0][0] > 0.5 or args[0][1] > 2.*stack.rMax or \
|
nbar_init=1.0,
|
||||||
# args[0][2] > 2.*stack.rMax) and \
|
Niter=300000,
|
||||||
# (ntries < maxtries)
|
Nburn=100000,
|
||||||
ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, mode="2d", nBootstraps=500, rMaxInertia=0.7)
|
Rextracut=Rtruncate)
|
||||||
badChain = False
|
print "TEST", stack.rMax, args[0][0], args[0][1], args[0][2]
|
||||||
|
badChain = (args[0][0] > 0.5 or args[0][1] > 2.*stack.rMax or \
|
||||||
|
args[0][2] > 2.*stack.rMax) and \
|
||||||
|
(ntries < maxtries)
|
||||||
|
#ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, mode="2d", nBootstraps=100, rMaxInertia=0.7)
|
||||||
|
#badChain = False
|
||||||
|
|
||||||
## rescale to expected stretch if using comoving coords
|
## rescale to expected stretch if using comoving coords
|
||||||
#if useComoving or not sample.useLightCone:
|
#if useComoving or not sample.useLightCone:
|
||||||
|
@ -1282,7 +1273,7 @@ def launchFit(sample, stack, useComoving=None,
|
||||||
else:
|
else:
|
||||||
rescaleFactor = 1.0
|
rescaleFactor = 1.0
|
||||||
|
|
||||||
vp.draw_fit(*args, delta_tick=0.2, vmax=1.0, delta_v=0.01,
|
vp.draw_fit(*args, delta_tick=0.2, vmax=1.4, delta_v=0.01,
|
||||||
nocontour=True, model_max=1.0, delta_model=0.1,
|
nocontour=True, model_max=1.0, delta_model=0.1,
|
||||||
plotTitle=plotTitle, show_ratio=showRatio,
|
plotTitle=plotTitle, show_ratio=showRatio,
|
||||||
showContours=showContours,
|
showContours=showContours,
|
||||||
|
@ -1293,30 +1284,31 @@ def launchFit(sample, stack, useComoving=None,
|
||||||
"_"+runSuffix+".pdf")
|
"_"+runSuffix+".pdf")
|
||||||
figure(1).savefig(figDir+"/stackedVoid_"+sampleName+\
|
figure(1).savefig(figDir+"/stackedVoid_"+sampleName+\
|
||||||
"_"+runSuffix+".png")
|
"_"+runSuffix+".png")
|
||||||
|
|
||||||
print "Done!"
|
|
||||||
sys.stdout = sys.__stdout__
|
|
||||||
#sys.stderr = sys.__stderr__
|
|
||||||
if jobSuccessful(logFile, "Done!\n"):
|
|
||||||
print jobString, "Fitting done (", ntries, " tries)"
|
|
||||||
else:
|
|
||||||
print jobString, "Fitting FAILED!"
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
# record the measured stretch
|
# record the measured stretch
|
||||||
exp = args[1]
|
exp = args[1]
|
||||||
expError = args[2]
|
expError = args[2]
|
||||||
outline = str(exp) + " " + str(expError)
|
outline = str(exp) + " " + str(expError)
|
||||||
open(voidDir+"/expansion.out", "w").write(outline)
|
open(voidDir+"/expansion.out", "w").write(outline)
|
||||||
|
|
||||||
|
print "Done!"
|
||||||
|
sys.stdout = sys.__stdout__
|
||||||
|
#sys.stderr = sys.__stderr__
|
||||||
|
|
||||||
if os.access(voidDir+"/NOFIT", os.F_OK):
|
if os.access(voidDir+"/NOFIT", os.F_OK):
|
||||||
os.unlink(voidDir+"/NOFIT")
|
os.unlink(voidDir+"/NOFIT")
|
||||||
if ntries > maxtries:
|
if ntries >= maxtries:
|
||||||
print jobString, "No reliable fit found; skipping!"
|
print jobString, "No reliable fit found; skipping!"
|
||||||
fp = open(voidDir+"/NOFIT", "w")
|
fp = open(voidDir+"/NOFIT", "w")
|
||||||
fp.write("bad ellipticity fit\n")
|
fp.write("bad ellipticity fit\n")
|
||||||
fp.close()
|
fp.close()
|
||||||
return
|
#return
|
||||||
|
|
||||||
|
else:
|
||||||
|
if jobSuccessful(logFile, "Done!\n"):
|
||||||
|
print jobString, "Fitting done (%d tries)" % ntries
|
||||||
|
else:
|
||||||
|
print jobString, "Fitting FAILED!"
|
||||||
|
exit(-1)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print jobString, "already done!"
|
print jobString, "already done!"
|
||||||
|
@ -1360,7 +1352,7 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None,
|
||||||
if not alreadyHere:
|
if not alreadyHere:
|
||||||
allZBins.append(stack)
|
allZBins.append(stack)
|
||||||
|
|
||||||
allExpList = np.zeros((numSamplesForHubble,len(allRBins),len(allZBins),3))
|
allExpList = np.zeros((numSamplesForHubble,len(allRBins),len(allZBins),5))
|
||||||
allExpList[:] = -1
|
allExpList[:] = -1
|
||||||
aveDistList = np.zeros((len(allZBins),3))
|
aveDistList = np.zeros((len(allZBins),3))
|
||||||
|
|
||||||
|
@ -1436,6 +1428,10 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None,
|
||||||
|
|
||||||
expList[0, iR, iZBin, 2] = aveDist
|
expList[0, iR, iZBin, 2] = aveDist
|
||||||
|
|
||||||
|
voidDir = zobovDir+"/stacks_" + runSuffix
|
||||||
|
numVoids = open(voidDir+"/num_voids.txt", "r").readline()
|
||||||
|
numParticles = open(voidDir+"/num_particles.txt", "r").readline()
|
||||||
|
|
||||||
for (iZCheck,zBinCheck) in enumerate(allZBins):
|
for (iZCheck,zBinCheck) in enumerate(allZBins):
|
||||||
for (iRCheck,rBinCheck) in enumerate(allRBins):
|
for (iRCheck,rBinCheck) in enumerate(allRBins):
|
||||||
if zBin.zMin == zBinCheck.zMin and zBin.zMax == zBinCheck.zMax:
|
if zBin.zMin == zBinCheck.zMin and zBin.zMax == zBinCheck.zMax:
|
||||||
|
@ -1445,6 +1441,8 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None,
|
||||||
|
|
||||||
allExpList[iSample, iRCheck, iZCheck, 0] = exp
|
allExpList[iSample, iRCheck, iZCheck, 0] = exp
|
||||||
allExpList[iSample, iRCheck, iZCheck, 1] = expError
|
allExpList[iSample, iRCheck, iZCheck, 1] = expError
|
||||||
|
allExpList[iSample, iRCheck, iZCheck, 3] = numVoids
|
||||||
|
allExpList[iSample, iRCheck, iZCheck, 4] = numParticles
|
||||||
|
|
||||||
fp.write(str(exp) + " " + str(expError) + " "+ " "+ \
|
fp.write(str(exp) + " " + str(expError) + " "+ " "+ \
|
||||||
str(aveDist) + " -1 " + runSuffix+"\n")
|
str(aveDist) + " -1 " + runSuffix+"\n")
|
||||||
|
@ -1467,20 +1465,20 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None,
|
||||||
sys.stdout = open(logFile, 'w')
|
sys.stdout = open(logFile, 'w')
|
||||||
#sys.stderr = open(logFile, 'a')
|
#sys.stderr = open(logFile, 'a')
|
||||||
plotTitle = "Sample: "+sample.nickName+", "+thisDataPortion+" voids"
|
plotTitle = "Sample: "+sample.nickName+", "+thisDataPortion+" voids"
|
||||||
if doPlot:
|
#if doPlot:
|
||||||
#vp.do_all_obs(sample, zbase, expList, workDir+"/avedistortion_",
|
#vp.do_all_obs(sample, zbase, expList, workDir+"/avedistortion_",
|
||||||
vp.do_all_obs(sample, zbase, expList, aveDistList,
|
#vp.do_all_obs(sample, zbase, expList, aveDistList,
|
||||||
rlist, plotTitle=plotTitle, plotAve=True)
|
# rlist, plotTitle=plotTitle, plotAve=True)
|
||||||
figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\
|
#figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\
|
||||||
thisDataPortion+\
|
# thisDataPortion+\
|
||||||
".eps",bbox_inches='tight')
|
# ".eps",bbox_inches='tight')
|
||||||
figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\
|
#figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\
|
||||||
thisDataPortion+\
|
# thisDataPortion+\
|
||||||
".pdf",bbox_inches='tight')
|
# ".pdf",bbox_inches='tight')
|
||||||
figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\
|
#figure(1).savefig(figDir+"/hubble_"+setName+"_"+sampleName+"_"+\
|
||||||
thisDataPortion+\
|
# thisDataPortion+\
|
||||||
".png",bbox_inches='tight')
|
# ".png",bbox_inches='tight')
|
||||||
else:
|
#else:
|
||||||
print "Skipping plot"
|
print "Skipping plot"
|
||||||
print "Done!"
|
print "Done!"
|
||||||
sys.stdout = sys.__stdout__
|
sys.stdout = sys.__stdout__
|
||||||
|
@ -1520,6 +1518,8 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None,
|
||||||
line = fp.readline().split()
|
line = fp.readline().split()
|
||||||
allExpList[iSample,iR,iZ,2] = allExpList[iSample,iR,iZ,1]
|
allExpList[iSample,iR,iZ,2] = allExpList[iSample,iR,iZ,1]
|
||||||
allExpList[iSample,iR,iZ,1] = float(line[1])
|
allExpList[iSample,iR,iZ,1] = float(line[1])
|
||||||
|
allExpList[iSample,iR,iZ,3] = float(line[2])
|
||||||
|
allExpList[iSample,iR,iZ,4] = float(line[3])
|
||||||
|
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
@ -1617,9 +1617,25 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None,
|
||||||
for iR in xrange(len(allExpList[0,:,0,0])):
|
for iR in xrange(len(allExpList[0,:,0,0])):
|
||||||
fp.write(str(allExpList[iSample,iR,iZ,0]) + " " +\
|
fp.write(str(allExpList[iSample,iR,iZ,0]) + " " +\
|
||||||
str(allExpList[iSample,iR,iZ,1]) + " " +\
|
str(allExpList[iSample,iR,iZ,1]) + " " +\
|
||||||
str(allExpList[iSample,iR,iZ,2]) + "\n")
|
str(allExpList[iSample,iR,iZ,2]) + " " +\
|
||||||
|
str(allExpList[iSample,iR,iZ,3]) + " " +\
|
||||||
|
str(allExpList[iSample,iR,iZ,4]) + "\n")
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
fp = file(workDir+'/calculatedExpansions_reduced_'+thisDataPortion+'.txt',
|
||||||
|
mode="w")
|
||||||
|
for iZ in xrange(len(allExpList[0,0,:,0])):
|
||||||
|
for iSample in xrange(len(allExpList[:,0,0,0])):
|
||||||
|
for iR in xrange(len(allExpList[0,:,0,0])):
|
||||||
|
if (allExpList[iSample,iR,iZ,0] == -1): continue
|
||||||
|
fp.write(str(allExpList[iSample,iR,iZ,0]) + " " +\
|
||||||
|
str(allExpList[iSample,iR,iZ,1]) + " " +\
|
||||||
|
str(allExpList[iSample,iR,iZ,2]) + " " +\
|
||||||
|
str(allExpList[iSample,iR,iZ,3]) + " " +\
|
||||||
|
str(allExpList[iSample,iR,iZ,4]) + "\n")
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
|
||||||
# save all void distribution data to a single file
|
# save all void distribution data to a single file
|
||||||
fp = file(workDir+'/voidDistributions_'+thisDataPortion+'.txt',
|
fp = file(workDir+'/voidDistributions_'+thisDataPortion+'.txt',
|
||||||
mode="w")
|
mode="w")
|
||||||
|
@ -1823,8 +1839,8 @@ def launchVelocityStack(sample, stack, binPath,
|
||||||
# Rmax =
|
# Rmax =
|
||||||
|
|
||||||
centralRadius = stack.rMin * 0.25
|
centralRadius = stack.rMin * 0.25
|
||||||
Rextracut = stack.rMin*3 + 1
|
Rextracut = stack.rMax*3 + 1
|
||||||
Rcircular = stack.rMin*3 + 2
|
Rcircular = stack.rMax*3 + 2
|
||||||
|
|
||||||
parameters="--velocityField=%s --voidCenters=%s --Rmax=%e --L0=%e --numBins=%d" % (velField_file, voidCenters, Rmax, Boxsize, numBins)
|
parameters="--velocityField=%s --voidCenters=%s --Rmax=%e --L0=%e --numBins=%d" % (velField_file, voidCenters, Rmax, Boxsize, numBins)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue