From 73c6aedfc283ba55a0b1d3403fa74002711bf3e3 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Tue, 11 Mar 2014 12:49:02 -0500 Subject: [PATCH] much more flexible for handling different stacking and AP approaches --- python_tools/pipeline_source/defaults.py | 5 + .../pipeline_source/prepareCatalogs.in.py | 87 +++++++------ .../void_python_tools/backend/classes.py | 13 +- .../void_python_tools/backend/launchers.py | 114 ++++++++++-------- 4 files changed, 132 insertions(+), 87 deletions(-) diff --git a/python_tools/pipeline_source/defaults.py b/python_tools/pipeline_source/defaults.py index 422797c..f675ff3 100644 --- a/python_tools/pipeline_source/defaults.py +++ b/python_tools/pipeline_source/defaults.py @@ -36,6 +36,11 @@ dataPortions = ["central"] # auto for AP analysis, "fixed" for catalog comparison stackMode = "fixed" +maxVoidsInStack = -1 +stackRescaleMode = "rv" +fittingMode = "mcmc" +minVoidsToFit = 20 +minPartToFit = 2 # directory for the input simulation/observational particle files catalogDir = os.getenv("HOME")+"/workspace/Voids/catalog/" diff --git a/python_tools/pipeline_source/prepareCatalogs.in.py b/python_tools/pipeline_source/prepareCatalogs.in.py index c9705d6..b718fe3 100644 --- a/python_tools/pipeline_source/prepareCatalogs.in.py +++ b/python_tools/pipeline_source/prepareCatalogs.in.py @@ -188,6 +188,11 @@ bias = 1.16 dataPortions = {dataPortions} dataSampleList = [] + +# for the A-P test +minVoidsToFit = {minVoidsToFit} +minPartToFit = {minPartToFit} +fittingMode = '{fittingMode}' """ scriptFile.write(header.format(startCatalogStage=startCatalogStage, @@ -195,7 +200,11 @@ dataSampleList = [] startAPStage=startAPStage, endAPStage=endAPStage, continueRun=continueRun, - dataPortions=dataPortions)) + dataPortions=dataPortions, + minVoidsToFit=minVoidsToFit, + minPartToFit=minPartToFit, + fittingMode=fittingMode, + )) dataInfo = """ setName = "{setName}" @@ -245,38 +254,38 @@ dataSampleList.append(newSample) if stackMode == "fixed": stackInfo = """ # {zMin}, {zMax}, {minRadius} -newSample.addStack(0.0, 5.0, 5 , 10, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 10, 15, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 15, 20, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 20, 25, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 25, 30, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 30, 35, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 35, 40, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 40, 45, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 45, 50, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 50, 55, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 55, 60, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 60, 65, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 65, 70, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 70, 75, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 75, 80, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 80, 85, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 85, 90, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 90, 95, False, False, rescaleMode="rv") -newSample.addStack(0.0, 5.0, 95, 100, False, False, rescaleMode="rv") +newSample.addStack(0.0, 5.0, 5 , 10, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 10, 15, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 15, 20, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 20, 25, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 25, 30, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 30, 35, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 35, 40, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 40, 45, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 45, 50, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 50, 55, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 55, 60, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 60, 65, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 65, 70, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 70, 75, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 75, 80, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 80, 85, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 85, 90, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 90, 95, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack(0.0, 5.0, 95, 100, False, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) """ elif stackMode == "sample_thick": stackInfo = """ # {zMin}, {zMax}, {minRadius} -newSample.addStack({zMin}, {zMax}, 10, 20, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 20, 30, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 30, 40, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 40, 50, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 50, 60, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 60, 70, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 70, 80, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 80, 90, True, False, rescaleMode="rmax") -newSample.addStack({zMin}, {zMax}, 90, 100, True, False, rescaleMode="rmax") +newSample.addStack({zMin}, {zMax}, 10, 20, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 20, 30, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 30, 40, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 40, 50, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 50, 60, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 60, 70, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 70, 80, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 80, 90, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 90, 100, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) """ elif stackMode == "log": @@ -289,18 +298,18 @@ newSample.addStack({zMin}, {zMax}, 90, 100, True, False, rescaleMode="rmax") while rEnd < rMax: rEnd = (1+0.5*dlogR)*rStart/(1-0.5*dlogR) - stackInfo += """newSample.addStack({zMin}, {zMax}"""+ ", %g, %g, True, False, rescaleMode='rv')" % (rStart, rEnd) + stackInfo += """newSample.addStack({zMin}, {zMax}"""+ ", %g, %g, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids})" % (rStart, rEnd) rStart = rEnd elif stackMode == "auto": stackInfo = """ -newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+2, True, False, rescaleMode="rv") -newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+4, True, False, rescaleMode="rv") -newSample.addStack({zMin}, {zMax}, 2*{minRadius}+2, 2*{minRadius}+6, True, False, rescaleMode="rv") -newSample.addStack({zMin}, {zMax}, 2*{minRadius}+6, 2*{minRadius}+10, True, False, rescaleMode="rv") -newSample.addStack({zMin}, {zMax}, 2*{minRadius}+10, 2*{minRadius}+18, True, False, rescaleMode="rv") -newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, False, rescaleMode="rv") +newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+2, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+4, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 2*{minRadius}+2, 2*{minRadius}+6, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 2*{minRadius}+6, 2*{minRadius}+10, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 2*{minRadius}+10, 2*{minRadius}+18, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) +newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, False, rescaleMode='{rescaleMode}', maxVoids={maxVoids}) """ else: stackInfo = """ @@ -389,7 +398,11 @@ newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, Fal sliceAPMax = "%0.2f" % sliceAPMax scriptFile.write(stackInfo.format(zMin=sliceAPMin, zMax=sliceAPMax, - minRadius=minRadius)) + minRadius=minRadius, + rescaleMode=stackRescaleMode, + maxVoids=maxVoidsInStack, + fittingMode=fittingMode + )) scriptFile.close() diff --git a/python_tools/void_python_tools/backend/classes.py b/python_tools/void_python_tools/backend/classes.py index c5ffa6a..40d5b62 100644 --- a/python_tools/void_python_tools/backend/classes.py +++ b/python_tools/void_python_tools/backend/classes.py @@ -38,9 +38,11 @@ class Stack: needProfile = True rescaleMode = "rmax" # options: "rmax" to scale to largest void in stack # "rv" normalize each void + maxVoids = -1 # maximum number of voids to allow in the stack def __init__(self, zMin, zMax, rMin, rMax, includeInHubble, partOfCombo, - zMinPlot=None, needProfile=True, rescaleMode="rmax"): + zMinPlot=None, needProfile=True, rescaleMode="rmax", + maxVoids=-1, fittingMode="mcmc"): self.zMin = zMin self.zMax = zMax self.rMin = rMin @@ -50,6 +52,8 @@ class Stack: self.partOfCombo = partOfCombo self.needProfile = needProfile self.rescaleMode = rescaleMode + self.maxVoids = maxVoids + self.fittingMode = fittingMode if zMinPlot == None: self.zMinPlot = self.zMin @@ -143,11 +147,14 @@ class Sample: def addStack(self, zMin, zMax, rMin, rMax, includeInHubble, partOfCombo,zMinPlot=None, - needProfile=True, rescaleMode="rmax"): + needProfile=True, rescaleMode="rmax", + maxVoids=-1, fittingMode="mcmc"): self.stacks.append(Stack(zMin, zMax, rMin, rMax, includeInHubble, partOfCombo, zMinPlot=zMinPlot, needProfile=needProfile, - rescaleMode=rescaleMode)) + rescaleMode=rescaleMode, + maxVoids=maxVoids, + fittingMode=fittingMode)) def getHubbleStacks(self): stacksForHubble = [] diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index a113aba..e6b08da 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -616,10 +616,10 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, else: obsFlag = "" - Rextracut = stack.rMin*3 + 1 - Rcircular = stack.rMin*3 + 2 - #Rextracut = stack.rMax*3 + 1 - #Rcircular = stack.rMax*3 + 2 + #Rextracut = stack.rMin*3 + 1 + #Rcircular = stack.rMin*3 + 2 + Rextracut = stack.rMax*3 + 1 + Rcircular = stack.rMax*3 + 2 if dataType == "observation": maskIndex = open(zobovDir+"/mask_index.txt", "r").read() @@ -643,6 +643,11 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, else: rescaleFlag = "" + if stack.maxVoids != -1: + maxVoidsFlag = "maxVoids="+str(stack.maxVoids) + else: + maxVoidsFlag = "" + idListFile = "idlist.temp" if idList != None: idListFlag = "idList " + idListFile @@ -695,6 +700,7 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, %s %s %s + %s """ % \ (zobovDir+"/voidDesc_"+thisDataPortion+"_"+sampleName+".out", zobovDir+"/voidPart_"+sampleName+".dat", @@ -717,6 +723,7 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, zobovDir+"/barycenters_"+thisDataPortion+"_"+sampleName+".out", zobovDir+"/boundaryDistances_"+thisDataPortion+"_"+sampleName+".out", rescaleFlag, + maxVoidsFlag, idListFlag, periodicLine ) @@ -1116,10 +1123,10 @@ def launchProfile(sample, stack, voidDir=None, logFile=None, continueRun=None, numVoids = open(voidDir+"/num_voids.txt", "r").readline() numParticles = open(voidDir+"/num_particles.txt", "r").readline() - Rextracut = stack.rMin*3 + 1 - Rcircular = stack.rMin*2 + 2 - #Rextracut = stack.rMax*3 + 1 - #Rcircular = stack.rMax*3 + 2 + #Rextracut = stack.rMin*3 + 1 + #Rcircular = stack.rMin*2 + 2 + Rextracut = stack.rMax*3 + 1 + Rcircular = stack.rMax*3 + 2 if not (continueRun and jobSuccessful(logFile, "Done!\n")): profileParmFile = voidDir+"/params.txt" @@ -1139,8 +1146,8 @@ def launchProfile(sample, stack, voidDir=None, logFile=None, continueRun=None, if sample.profileBinSize == "auto": #density = 0.5 * 50 / Rcircular / 2 - density = stack.rMax / 5. - #density = stack.rMax / 10. + density = stack.rMin / 5. + #density = stack.rMax / 5. else: density = sample.profileBinSize @@ -1166,7 +1173,9 @@ def launchProfile(sample, stack, voidDir=None, logFile=None, continueRun=None, # ----------------------------------------------------------------------------- def launchFit(sample, stack, useComoving=None, logFile=None, voidDir=None, figDir=None, - continueRun=None, thisDataPortion=None): + continueRun=None, thisDataPortion=None, + fittingMode=None, minVoidsToFit=None, + minPartToFit=None): sampleName = sample.fullName @@ -1181,7 +1190,7 @@ def launchFit(sample, stack, useComoving=None, return numVoids = int(open(voidDir+"/num_voids.txt", "r").readline()) - if numVoids < 1: + if numVoids < minVoidsToFit: print jobString, "Fitting not enough voids to fit; skipping!" fp = open(voidDir+"/NOFIT", "w") fp.write("not enough voids: %d \n" % numVoids) @@ -1189,7 +1198,7 @@ def launchFit(sample, stack, useComoving=None, return numPart = int(open(voidDir+"/num_particles.txt", "r").readline()) - if numPart < 1: + if numPart < minPartToFit: print jobString, "Fitting not enough particles to fit; skipping!" fp = open(voidDir+"/NOFIT", "w") fp.write("not enough particles: %d \n" % numPart) @@ -1229,28 +1238,31 @@ def launchFit(sample, stack, useComoving=None, while badChain: ntries += 1 - Rexpect = (stack.rMin+stack.rMax)/2 - #Rtruncate = stack.rMax*2. # TEST - #Rtruncate = stack.rMax*3. # TEST - Rtruncate = stack.rMin*3. + 1 # TEST - if sample.dataType == "observation": - ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect, - nbar_init=1.0, - Niter=300000, - Nburn=100000, - Rextracut=Rtruncate) - else: - ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect, - nbar_init=1.0, - Niter=300000, - Nburn=100000, - Rextracut=Rtruncate) - print "TEST", stack.rMax, args[0][0], args[0][1], args[0][2] - badChain = (args[0][0] > 0.5 or args[0][1] > 2.*stack.rMax or \ - args[0][2] > 2.*stack.rMax) and \ - (ntries < maxtries) - #ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, mode="2d", nBootstraps=100, rMaxInertia=0.7) - #badChain = False + if fittingMode == "mcmc": + Rexpect = (stack.rMin+stack.rMax)/2 + #Rtruncate = stack.rMax*2. # TEST + Rtruncate = stack.rMax*3. # TEST + #Rtruncate = stack.rMin*3. + 1 # TEST + if sample.dataType == "observation": + ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect, + nbar_init=1.0, + Niter=300000, + Nburn=100000, + Rextracut=Rtruncate) + else: + ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect, + nbar_init=1.0, + Niter=300000, + Nburn=100000, + Rextracut=Rtruncate) + #print "TEST", stack.rMax, args[0][0], args[0][1], args[0][2] + badChain = (args[0][0] > 0.5 or args[0][1] > 2.*stack.rMax or \ + args[1] > 2.0 or \ + args[0][2] > 2.*stack.rMax) and \ + (ntries < maxtries) + elif fittingMode == "inertia": + ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, mode="2d", nBootstraps=100, rMaxInertia=0.7) + badChain = False ## rescale to expected stretch if using comoving coords #if useComoving or not sample.useLightCone: @@ -1296,7 +1308,7 @@ def launchFit(sample, stack, useComoving=None, if os.access(voidDir+"/NOFIT", os.F_OK): os.unlink(voidDir+"/NOFIT") - if ntries >= maxtries: + if ntries > maxtries: print jobString, "No reliable fit found; skipping!" fp = open(voidDir+"/NOFIT", "w") fp.write("bad ellipticity fit\n") @@ -1664,7 +1676,7 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None, else: fp.write("-1\n") #fp.write(str(len(voidRedshifts))+" ") - np.savetxt(fp, voidRedshifts[None]) + #np.savetxt(fp, voidRedshifts[None]) else: fp.write("-1\n") fp.close() @@ -1680,8 +1692,13 @@ def launchHubble(dataPortions=None, dataSampleList=None, logDir=None, print "already done!" # ----------------------------------------------------------------------------- -def launchLikelihood(dataPortions=None, logDir=None, workDir=None, - continueRun=False): +def launchLikelihood(dataSampleList=None, + dataPortions=None, logDir=None, workDir=None, + continueRun=False): + + # check for comoving from first sample + useComoving = not dataSampleList[0].useLightCone or \ + dataSampleList[0].useComoving for thisDataPortion in dataPortions: print " For data portion", thisDataPortion, "...", @@ -1691,24 +1708,27 @@ def launchLikelihood(dataPortions=None, logDir=None, workDir=None, if not (continueRun and jobSuccessful(logFile, "Done!\n")): - sys.stdout = open(logFile, 'w') + #sys.stdout = open(logFile, 'w') #sys.stderr = open(logFile, 'a') - vp.build1dLikelihood(workDir+"/calculatedExpansions_"+\ + vp.build1dLikelihood(dataSampleList[0], + workDir+"/calculatedExpansions_"+\ thisDataPortion+".txt", workDir+"/voidDistributions_" + \ thisDataPortion+".txt", nbins = 20, OmStart = 0.0, OmEnd = 1.0, - biasStart = 1.0, - biasEnd = 1.32, - #biasStart = 1.15, - #biasEnd = 1.17, + #biasStart = 1.0, + #biasEnd = 1.32, + biasStart = 1.16, + biasEnd = 1.16, outputBase = workDir+"/1dlikelihoods_"+thisDataPortion+"_", - useBinAve = False) + useBinAve = False, + useComoving = useComoving, + OmFid=dataSampleList[0].omegaM) - sys.stdout = sys.__stdout__ + #sys.stdout = sys.__stdout__ #sys.stderr = sys.__stderr__ if jobSuccessful(logFile, "Done!\n"):