Slight re-organization of C/C++ tools. Significant modifications to support observational data. Python and pipeline scripts added

This commit is contained in:
P.M. Sutter 2012-10-31 10:43:15 -05:00
parent 15496df4ff
commit 14abbc2018
42 changed files with 16252 additions and 557 deletions

10
python_tools/PKG-INFO Normal file
View file

@ -0,0 +1,10 @@
Metadata-Version: 1.0
Name: voidProject
Version: 1.0
Summary: UNKNOWN
Home-page: UNKNOWN
Author: UNKNOWN
Author-email: UNKNOWN
License: UNKNOWN
Description: UNKNOWN
Platform: UNKNOWN

14
python_tools/setup.py Normal file
View file

@ -0,0 +1,14 @@
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy as np
setup(
name='void_python_tools',
version='1.0',
cmdclass = {'build_ext': build_ext},
include_dirs = [np.get_include()],
packages=['void_python_tools','void_python_tools.backend','void_python_tools.apTools','void_python_tools.apTools.profiles','void_python_tools.apTools.chi2', 'void_python_tools.plotting'],
#ext_modules = [Extension("void_python_tools.chi2.velocityProfileFitNative", ["void_python_tools/chi2/velocityProfileFitNative.pyx"], libraries=["gsl", "gslcblas"]), Extension("void_python_tools.chi2.likelihoo", ["void_python_tools/chi2/likelihood.pyx"], libraries=["gsl", "gslcblas"])]
ext_modules = [Extension("void_python_tools.apTools.chi2.velocityProfileFitNative", ["void_python_tools/apTools/chi2/velocityProfileFitNative.pyx"], libraries=["gsl", "gslcblas"])]
)

View file

@ -0,0 +1,3 @@
from void_python_tools.backend import *
from void_python_tools.apTools import *
from void_python_tools.plotting import *

Binary file not shown.

View file

@ -0,0 +1,2 @@
from chi2 import *
from profiles import *

View file

@ -0,0 +1,3 @@
from velocityProfileFitNative import *
from likelihood import *
from cosmologyTools import *

View file

@ -0,0 +1,94 @@
# a suite of functions to compute expansion rates, angular diameter
# distances, and expected void stretching
import numpy as np
import scipy.integrate as integrate
__all__=['expansion', 'angularDiameter', 'expectedStretch', 'aveStretch', 'aveExpansion', 'aveStretchCone', 'aveWeightedStretch']
# returns 1/E(z) for the given cosmology
def expansion(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
wz = w0 + wa*z/(1+z)
ez = Om * (1+z)**3 + (Ot-Om)# * (1+z)**(3.+3*wz)
ez = 1./np.sqrt(ez)
return ez
# returns D_A(z) for the given cosmology
def angularDiameter(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
da = integrate.quad(expansion, 0.0, z, args=(Om, Ot, w0, wa))[0]
return da
# returns expected void stretch for the given cosmology
def expectedStretch(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
ez = 1./expansion(z, Om=Om, Ot=Ot, w0=w0, wa=wa)
da = angularDiameter(z, Om=Om, Ot=Ot, w0=w0, wa=wa)
return ez*da/z
# returns average expected void stretch for a given redshift range
def aveStretch(zStart, zEnd, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
if zStart == 0.0: zStart = 1.e-6
ave = integrate.quad(expectedStretch, zStart, zEnd, args=(Om, Ot, w0, wa))[0]
ave /= (zEnd-zStart)
return ave
# -----------------------------------------------------------------------------
# returns average expected void stretch for a given redshift range
# assuming a cone
def aveStretchCone(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0,
w0 = -1.0, wa = 0.0):
if zStart == 0.0: zStart = 1.e-6
h1 = zStart
h2 = zEnd
r1 = skyFrac * 4* np.pi * zStart**2
r2 = skyFrac * 4 * np.pi * zEnd**2
# surface area of a slice within a cone
def coneSlice(x, h, r):
return np.pi * (r/h*x)**2
def coneFunc(z, h, r, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
return coneSlice(z, h, r) * expectedStretch(z, Om, Ot, w0, wa)
aveHigh = integrate.quad(coneFunc, 0.0, zEnd, args=(h2, r2, Om, Ot, w0, wa), full_output=1)[0]
aveLow = integrate.quad(coneFunc, 0.0, zStart, args=(h1, r1, Om, Ot, w0, wa), full_output=1)[0]
volumeHigh = integrate.quad(coneSlice, 0.0, zEnd, args=(h2, r2))[0]
volumeLow = integrate.quad(coneSlice, 0.0, zStart, args=(h1, r1))[0]
return (aveHigh-aveLow)/(volumeHigh-volumeLow)
# -----------------------------------------------------------------------------
# returns average expected void stretch for a given redshift range
# weighted by an actual void distribution
def aveWeightedStretch(zStart, zEnd, skyFrac = 0.19, Om = 0.27, Ot = 1.0,
w0 = -1.0, wa = 0.0, dist=None, bins=None):
if zStart == 0.0: zStart = 1.e-6
def weightedSlice(x):
return np.interp(x, bins[:-1], dist)
def weightedFunc(z, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
return expectedStretch(z, Om, Ot, w0, wa) *\
weightedSlice(z)
ave = integrate.quad(weightedFunc, zStart, zEnd, args=(Om, Ot, w0, wa),
full_output=1)[0]
volume = integrate.quad(weightedSlice, zStart, zEnd, full_output=1)[0]
return ave/volume
# -----------------------------------------------------------------------------
# returns average expected expansion for a given redshift range
def aveExpansion(zStart, zEnd, Om = 0.27, Ot = 1.0, w0 = -1.0, wa = 0.0):
if zStart == 0.0: zStart = 1.e-6
ave = integrate.quad(expansion, zStart, zEnd, args=(Om, Ot, w0, wa))[0]
ave = (zEnd-zStart)/ave
return ave

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,6 @@
from build import *
from draw import *
from fit import *
from mcmc import *
from generateExpFigure import *
from getSurveyProps import *

View file

@ -0,0 +1,53 @@
import numpy as np
import healpy as healpy
import scipy.integrate
__all__=['getSurveyProps']
# returns the volume and galaxy density for a given redshit slice
def getSurveyProps(maskFile, zmin, zmax, selFunMin, selFunMax, portion, selectionFuncFile=None):
mask = healpy.read_map(maskFile)
area = (1.*np.size(np.where(mask > 0)) / np.size(mask)) * 4.*np.pi
zmin = zmin * 3000
zmax = zmax * 3000
volume = area * (zmax**3 - zmin**3) / 3
if selectionFuncFile != None:
selfunc = np.genfromtxt(selectionFuncFile)
selfunc = np.array(selfunc)
selfunc[:,0] = selfunc[:,0]/100.
selfuncUnity = selfunc
selfuncUnity[:,1] = 1.0
selfuncMin = selfunc[0,0]
selfuncMax = selfunc[-1,0]
selfuncDx = selfunc[1,0] - selfunc[0,0]
selfuncN = np.size(selfunc[:,0])
selFunMin *= 3000
selFunMax *= 3000
selFunMin = max(selFunMin, selfuncMin)
selFunMax = min(selFunMax, selfuncMax)
def f(z): return selfunc[np.ceil((z-selfuncMin)/selfuncDx), 1]*z**2
def fTotal(z): return selfuncUnity[np.ceil((z-selfuncMin)/selfuncDx), 1]*z**2
zrange = np.linspace(selFunMin, selFunMax)
nbar = scipy.integrate.quad(f, selFunMin, selFunMax)
nbar = nbar[0]
ntotal = scipy.integrate.quad(fTotal, 0.0, max(selfuncUnity[:,0]))
#ntotal = scipy.integrate.quad(f, 0.0, max(selfunc[:,0]))
ntotal = ntotal[0]
nbar = ntotal / area / nbar
else:
nbar = 1.0
#print "PROPERTIES: ", volume, nbar
return (volume, nbar)

View file

@ -0,0 +1,2 @@
from classes import *
from launchers import *

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,169 @@
# classes and routines used to support scripts
import os
LIGHT_SPEED = 299792.458
class Stack:
zMin = 0.0
zMax = 0.1
rMin = 5
rMax = 15
zMinPlot = 0.0
zMaxPlot = 0.1
includeInHubble = True
partOfCombo = False
needProfile = True
rescaleMode = "rmax" # options: "rmax" to scale to largest void in stack
# "rv" normalize each void
def __init__(self, zMin, zMax, rMin, rMax, includeInHubble, partOfCombo,
zMinPlot=None, needProfile=True, rescaleMode="rmax"):
self.zMin = zMin
self.zMax = zMax
self.rMin = rMin
self.rMax = rMax
self.zMaxPlot = zMax
self.includeInHubble = includeInHubble
self.partOfCombo = partOfCombo
self.needProfile = needProfile
self.rescaleMode = rescaleMode
if zMinPlot == None:
self.zMinPlot = self.zMin
else:
self.zMinPlot = zMinPlot
class Sample:
dataType = "observation"
dataFormat = "sdss"
dataFile = "lss.dr72dim.dat"
fullName = "lss.dr72dim.dat"
nickName = "dim"
zobovDir = ""
maskFile = "rast_window_512.fits"
selFunFile = "czselfunc.all.dr72dim.dat"
skyFraction = 0.19
zBoundary = (0.0, 0.1)
zBoundaryMpc = (0., 300)
zRange = (0.0, 0.1)
omegaM = 0.27
minVoidRadius = 5
fakeDensity = 0.01
profileBinSize = 2 # Mpc
volumeLimited = True
includeInHubble = True
partOfCombo = False
isCombo = False
comboList = []
# applies to simulations only
boxLen = 1024 # Mpc/h
usePecVel = False
subsample = 1.0
useLightCone = True
numSubDivisions = 1
numSubvolumes = 1
mySubvolume = 1
stacks = []
def __init__(self, dataFile="", fullName="",
nickName="", maskFile="", selFunFile="",
zBoundary=(), zRange=(), zBoundaryMpc=(),
minVoidRadius=0, fakeDensity=0.01, volumeLimited=True,
includeInHubble=True, partOfCombo=False, isCombo=False,
comboList=(), profileBinSize=2.0, skyFraction=0.19,
dataType="observation", numSubDivisions=2,
boxLen=1024, usePecVel=False, omegaM=0.27,
numSubvolumes=1, mySubvolume=1, dataFormat="sdss",
subsample="1.0", useLightCone=True):
self.dataFile = dataFile
self.fullName = fullName
self.nickName = nickName
self.maskFile = maskFile
self.selFunFile = selFunFile
self.zBoundary = zBoundary
self.zBoundaryMpc = zBoundaryMpc
self.zRange = zRange
self.minVoidRadius = minVoidRadius
self.fakeDensity = fakeDensity
self.volumeLimited = volumeLimited
self.includeInHubble = includeInHubble
self.partOfCombo = partOfCombo
self.isCombo = isCombo
self.comboList = comboList
self.zobovDir = None
self.profileBinSize = profileBinSize
self.skyFraction = skyFraction
self.dataType = dataType
self.numSubDivisions = numSubDivisions
self.boxLen = boxLen
self.usePecVel = usePecVel
self.omegaM = omegaM
self.numSubvolumes = numSubvolumes
self.mySubvolume = mySubvolume
self.dataFormat = dataFormat
self.subsample = subsample
self.useLightCone = useLightCone
self.stacks = []
def addStack(self, zMin, zMax, rMin, rMax,
includeInHubble, partOfCombo,zMinPlot=None,
needProfile=True, rescaleMode="rmax"):
self.stacks.append(Stack(zMin, zMax, rMin, rMax,
includeInHubble, partOfCombo,
zMinPlot=zMinPlot, needProfile=needProfile,
rescaleMode=rescaleMode))
def getHubbleStacks(self):
stacksForHubble = []
for stack in self.stacks:
if stack.includeInHubble:
stacksForHubble.append(stack)
return stacksForHubble
def getUniqueRBins(self):
uniqueRStacks = []
for stack in self.stacks:
if not stack.includeInHubble:
continue
alreadyHere = False
for stackCheck in uniqueRStacks:
if stack.rMin == stackCheck.rMin and stack.rMax == stackCheck.rMax:
alreadyHere = True
break
if not alreadyHere:
uniqueRStacks.append(stack)
return uniqueRStacks
def getUniqueZBins(self):
uniqueZStacks = []
for stack in self.stacks:
if not stack.includeInHubble:
continue
alreadyHere = False
for stackCheck in uniqueZStacks:
if stack.zMin == stackCheck.zMin and stack.zMax == stackCheck.zMax:
alreadyHere = True
break
if not alreadyHere:
uniqueZStacks.append(stack)
return uniqueZStacks
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
def jobSuccessful(logFile, doneString):
jobDone = False
checkLine = ""
if os.access(logFile, os.F_OK):
filelines = file(logFile, "r").readlines()
if len(filelines) >= 1:
checkLine = filelines[-1]
jobDone = (checkLine == doneString)
return jobDone
def getStackSuffix(zMin, zMax, rMin, rMax, dataPortion):
return "z"+str(zMin)+"-"+str(zMax)+"_"+str(rMin)+"-"+str(rMax)+\
"Mpc"+"_"+dataPortion

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,2 @@
from plotTools import *
from plotDefs import *

View file

@ -0,0 +1,13 @@
colorList = ['r', 'b', 'g', 'y', 'c', 'm', 'y',
'brown', 'grey',
'darkred', 'orange', 'pink', 'darkblue',
'lightblue', 'chocolate',
'indigo', 'lightseagreen', 'maroon', 'olive',
'royalblue', 'palevioletred', 'seagreen', 'tomato',
'aquamarine', 'darkslateblue',
'khaki', 'lawngreen', 'mediumorchid',
'orangered', 'thistle'
'yellowgreen']
linewidth = 4
fontsize = 12

View file

@ -0,0 +1,69 @@
__all__=['plotNumberCounts']
from void_python_tools.backend.classes import *
from plotDefs import *
import numpy as np
import os
import pylab as plt
# -----------------------------------------------------------------------------
def plotNumberCounts(workDir=None, sampleList=None, figDir=None, plotNameBase="numbercount",
showPlot=False, dataPortion=None, setName=None):
plt.clf()
plt.xlabel("Comoving Distance (Mpc/h)")
plt.ylabel("Number of Voids")
plotTitle = setName
plotName = plotNameBase
xMin = 1.e00
xMax = 0
for (iSample,sample) in enumerate(sampleList):
sampleName = sample.fullName
lineTitle = sampleName
filename = workDir+"/sample_"+sampleName+"/centers_"+dataPortion+"_"+\
sampleName+".out"
if not os.access(filename, os.F_OK):
print "File not found: ", filename
continue
data = np.loadtxt(filename, comments="#")
if data.ndim == 1:
print " Too few!"
continue
zMin = sample.zRange[0]
zMax = sample.zRange[1]
range = (zMin, zMax)
nbins = np.ceil((zMax-zMin)/0.1)
thisMax = np.max(data[:,5])
thisMin = np.min(data[:,5])
if thisMax > xMax: xMax = thisMax
if thisMin < xMin: xMin = thisMin
plt.hist(data[:,5], bins=nbins,
label=lineTitle, color=colorList[iSample],
histtype = "step", range=range,
linewidth=linewidth)
#plt.legend(title = "Samples", loc = "upper right")
plt.title(plotTitle)
plt.xlim(xMin, xMax)
#plt.xlim(xMin, xMax*1.4) # make room for legend
plt.savefig(figDir+"/fig_"+plotName+".pdf", bbox_inches="tight")
plt.savefig(figDir+"/fig_"+plotName+".eps", bbox_inches="tight")
plt.savefig(figDir+"/fig_"+plotName+".png", bbox_inches="tight")
if showPlot:
os.system("display %s" % figDir+"/fig_"+plotName+".png")