mirror of
https://bitbucket.org/cosmicvoids/vide_public.git
synced 2025-07-04 23:31:12 +00:00
Supports minimum halo mass cuts. Start of scripts to generate masked mock sets, some files added to later support more general preparation scripts
This commit is contained in:
parent
a53e3bf290
commit
10dfe29a26
9 changed files with 557 additions and 58 deletions
321
pipeline/applyMaskToMock.py
Executable file
321
pipeline/applyMaskToMock.py
Executable file
|
@ -0,0 +1,321 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# prepares input catalogs based on multidark simulations
|
||||||
|
# (borrows heavily from generateMock, but doesn't hold much in memory)
|
||||||
|
# also creates necessary analyzeVoids input files
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import void_python_tools as vp
|
||||||
|
import argparse
|
||||||
|
import imp
|
||||||
|
import healpy as hp
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def my_import(name):
|
||||||
|
mod = __import__(name)
|
||||||
|
components = name.split('.')
|
||||||
|
for comp in components[1:]:
|
||||||
|
mod = getattr(mod, comp)
|
||||||
|
return mod
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
LIGHT_SPEED = 299792.458
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='options')
|
||||||
|
parser.add_argument('--scripts', dest='script', action='store_const',
|
||||||
|
const=True, default=False,
|
||||||
|
help='write scripts')
|
||||||
|
parser.add_argument('--parmFile', dest='parmFile',
|
||||||
|
default="",
|
||||||
|
help='path to parameter file')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
filename = args.parmFile
|
||||||
|
print " Loading parameters from", filename
|
||||||
|
if not os.access(filename, os.F_OK):
|
||||||
|
print " Cannot find parameter file %s!" % filename
|
||||||
|
exit(-1)
|
||||||
|
parms = imp.load_source("name", filename)
|
||||||
|
globals().update(vars(parms))
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
def getSampleName(setName, redshift, useVel, iSlice=-1, iVol=-1):
|
||||||
|
|
||||||
|
sampleName = setName
|
||||||
|
|
||||||
|
sampleName += "_z" + redshift
|
||||||
|
|
||||||
|
if iVol != -1: sampleName += "_d" + iVol
|
||||||
|
|
||||||
|
return sampleName
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# for given dataset parameters, outputs a script for use with analyzeVoids
|
||||||
|
def writeScript(setName, dataFileNameBase,
|
||||||
|
scriptDir, catalogDir, fileNums, redshifts, numSubvolumes,
|
||||||
|
numSlices, useVel, lbox, minRadius, omegaM, subsample=1.0,
|
||||||
|
suffix=".dat"):
|
||||||
|
|
||||||
|
|
||||||
|
if useVel: setName += "_pv"
|
||||||
|
|
||||||
|
scriptFileName = scriptDir + "/" + setName + ".py"
|
||||||
|
scriptFile = open(scriptFileName, 'w')
|
||||||
|
|
||||||
|
scriptFile.write("""#!/usr/bin/env/python
|
||||||
|
import os
|
||||||
|
from void_python_tools.backend.classes import *
|
||||||
|
|
||||||
|
continueRun = True # set to True to enable restarting aborted jobs
|
||||||
|
startCatalogStage = 1
|
||||||
|
endCatalogStage = 4
|
||||||
|
|
||||||
|
startAPStage = 1
|
||||||
|
endAPStage = 7
|
||||||
|
|
||||||
|
ZOBOV_PATH = os.getenv("PWD")+"/../zobov/"
|
||||||
|
CTOOLS_PATH = os.getenv("PWD")+"/../c_tools/"
|
||||||
|
freshStack = True
|
||||||
|
errorBars = "CALCULATED"
|
||||||
|
numIncoherentRuns = 100
|
||||||
|
ranSeed = 101010
|
||||||
|
useLCDM = False
|
||||||
|
bias = 1.16
|
||||||
|
|
||||||
|
dataPortions = ["central"]
|
||||||
|
dataSampleList = []
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
dataInfo = """
|
||||||
|
setName = "{setName}"
|
||||||
|
|
||||||
|
workDir = "{voidOutputDir}/{setName}/"
|
||||||
|
inputDataDir = "{inputDataDir}"
|
||||||
|
figDir = "{figDir}/{setName}/"
|
||||||
|
logDir = "{logDir}/{setName}/"
|
||||||
|
|
||||||
|
numZobovDivisions = {numZobovDivisions}
|
||||||
|
numZobovThreads = {numZobovThreads}
|
||||||
|
"""
|
||||||
|
scriptFile.write(dataInfo.format(setName=setName, figDir=figDir,
|
||||||
|
logDir=logDir, voidOutputDir=voidOutputDir,
|
||||||
|
inputDataDir=catalogDir,
|
||||||
|
numZobovDivisions=numZobovDivisions,
|
||||||
|
numZobovThreads=numZobovThreads))
|
||||||
|
|
||||||
|
sampleInfo = """
|
||||||
|
newSample = Sample(dataFile = "{dataFile}",
|
||||||
|
dataFormat = "{dataFormat}",
|
||||||
|
dataUnit = {dataUnit},
|
||||||
|
fullName = "{sampleName}",
|
||||||
|
nickName = "{sampleName}",
|
||||||
|
dataType = "simulation",
|
||||||
|
zBoundary = ({zMin}, {zMax}),
|
||||||
|
zRange = ({zMin}, {zMax}),
|
||||||
|
zBoundaryMpc = ({zMinMpc}, {zMaxMpc}),
|
||||||
|
omegaM = {omegaM},
|
||||||
|
minVoidRadius = {minRadius},
|
||||||
|
includeInHubble = True,
|
||||||
|
partOfCombo = False,
|
||||||
|
isCombo = False,
|
||||||
|
boxLen = {boxLen},
|
||||||
|
usePecVel = {usePecVel},
|
||||||
|
numSubvolumes = {numSubvolumes},
|
||||||
|
mySubvolume = "{mySubvolume}",
|
||||||
|
useLightCone = {useLightCone},
|
||||||
|
subsample = {subsample})
|
||||||
|
dataSampleList.append(newSample)
|
||||||
|
newSample.addStack({zMin}, {zMax}, {minRadius} , {minRadius}+2, True, False)
|
||||||
|
newSample.addStack({zMin}, {zMax}, {minRadius} , {minRadius}+4, True, False)
|
||||||
|
newSample.addStack({zMin}, {zMax}, {minRadius}+2, {minRadius}+6, True, False)
|
||||||
|
newSample.addStack({zMin}, {zMax}, {minRadius}+6, {minRadius}+10, True, False)
|
||||||
|
newSample.addStack({zMin}, {zMax}, {minRadius}+10, {minRadius}+18, True, False)
|
||||||
|
newSample.addStack({zMin}, {zMax}, {minRadius}+18, {minRadius}+24, True, False)
|
||||||
|
"""
|
||||||
|
for (iFile, redshift) in enumerate(redshifts):
|
||||||
|
fileNum = fileNums[iFile]
|
||||||
|
zBox = float(redshift)
|
||||||
|
Om = float(omegaM)
|
||||||
|
zBoxMpc = LIGHT_SPEED/100.*vp.angularDiameter(zBox, Om=Om)
|
||||||
|
boxMaxMpc = zBoxMpc + lbox
|
||||||
|
|
||||||
|
# converter from redshift to comoving distance
|
||||||
|
zVsDY = np.linspace(0., zBox+8*lbox*100./LIGHT_SPEED, 10000)
|
||||||
|
zVsDX = np.zeros(len(zVsDY))
|
||||||
|
for i in xrange(len(zVsDY)):
|
||||||
|
zVsDX[i] = vp.angularDiameter(zVsDY[i], Om=Om)
|
||||||
|
|
||||||
|
if useLightCone:
|
||||||
|
boxWidthZ = np.interp(vp.angularDiameter(zBox,Om=Om)+100. / \
|
||||||
|
LIGHT_SPEED*lbox, zVsDX, zVsDY)-zBox
|
||||||
|
dzSafe = 0.03
|
||||||
|
else:
|
||||||
|
boxWidthZ = lbox*100./LIGHT_SPEED
|
||||||
|
dzSafe = 0.0
|
||||||
|
|
||||||
|
for iSlice in xrange(numSlices):
|
||||||
|
sliceMin = zBox + dzSafe + iSlice*(boxWidthZ-dzSafe)/numSlices
|
||||||
|
sliceMax = zBox + dzSafe + (iSlice+1)*(boxWidthZ-dzSafe)/numSlices
|
||||||
|
|
||||||
|
sliceMinMpc = sliceMin*LIGHT_SPEED/100.
|
||||||
|
sliceMaxMpc = sliceMax*LIGHT_SPEED/100.
|
||||||
|
|
||||||
|
sliceMin = "%0.2f" % sliceMin
|
||||||
|
sliceMax = "%0.2f" % sliceMax
|
||||||
|
sliceMinMpc = "%0.1f" % sliceMinMpc
|
||||||
|
sliceMaxMpc = "%0.1f" % sliceMaxMpc
|
||||||
|
|
||||||
|
dataFileName = dataFileNameBase + fileNum + suffix
|
||||||
|
|
||||||
|
for iX in xrange(numSubvolumes):
|
||||||
|
for iY in xrange(numSubvolumes):
|
||||||
|
|
||||||
|
mySubvolume = "%d%d" % (iX, iY)
|
||||||
|
|
||||||
|
sampleName = getSampleName(setName, sliceMin, useVel,
|
||||||
|
iSlice=iSlice, iVol=mySubvolume)
|
||||||
|
|
||||||
|
scriptFile.write(sampleInfo.format(dataFile=dataFileName,
|
||||||
|
dataFormat=dataFormat,
|
||||||
|
dataUnit=dataUnit,
|
||||||
|
sampleName=sampleName,
|
||||||
|
zMin=sliceMin,
|
||||||
|
zMax=sliceMax,
|
||||||
|
zMinMpc=sliceMinMpc,
|
||||||
|
zMaxMpc=sliceMaxMpc,
|
||||||
|
omegaM=Om,
|
||||||
|
boxLen=lbox,
|
||||||
|
usePecVel=useVel,
|
||||||
|
minRadius=minRadius,
|
||||||
|
numSubvolumes=numSubvolumes,
|
||||||
|
mySubvolume=mySubvolume,
|
||||||
|
useLightCone=useLightCone,
|
||||||
|
subsample=subsample))
|
||||||
|
|
||||||
|
scriptFile.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
if not os.access(scriptDir, os.F_OK): os.mkdir(scriptDir)
|
||||||
|
if not os.access(catalogDir, os.F_OK): os.mkdir(catalogDir)
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# now the SDSS HOD
|
||||||
|
parFileText = """
|
||||||
|
% cosmology
|
||||||
|
OMEGA_M {omegaM}
|
||||||
|
HUBBLE {hubble}
|
||||||
|
OMEGA_B 0.0469
|
||||||
|
SIGMA_8 0.82
|
||||||
|
SPECTRAL_INDX 0.95
|
||||||
|
ITRANS 5
|
||||||
|
REDSHIFT {redshift}
|
||||||
|
|
||||||
|
% halo definition
|
||||||
|
%DELTA_HALO 200
|
||||||
|
DELTA_HALO 740.74 % 200/Om_m
|
||||||
|
M_max 1.00E+16
|
||||||
|
|
||||||
|
% fit function types
|
||||||
|
pdfs 11
|
||||||
|
pdfc 2
|
||||||
|
EXCLUSION 4
|
||||||
|
|
||||||
|
% hod parameters
|
||||||
|
M_min {Mmin}
|
||||||
|
GALAXY_DENSITY 0.0111134 % computed automatically if M_min set, use for sanity
|
||||||
|
M1 {M1}
|
||||||
|
sigma_logM {sigma_logM}
|
||||||
|
alpha {alpha}
|
||||||
|
M_cut {Mcut}
|
||||||
|
|
||||||
|
% simulation info
|
||||||
|
real_space_xi 1
|
||||||
|
HOD 1
|
||||||
|
populate_sim 1
|
||||||
|
HaloFile {haloFile}
|
||||||
|
RESOLUTION {numPartPerSide}
|
||||||
|
BOX_SIZE {boxSize}
|
||||||
|
|
||||||
|
% output
|
||||||
|
root_filename hod
|
||||||
|
"""
|
||||||
|
|
||||||
|
print " Doing DR9 HOD"
|
||||||
|
|
||||||
|
# these parameters come from Manera et al 2012, eq. 26
|
||||||
|
parFileName = "./hod.par"
|
||||||
|
parFile = open(parFileName, 'w')
|
||||||
|
haloFile = catalogDir+haloFileBase+fileNums[iRedshift]
|
||||||
|
parFile.write(parFileText.format(omegaM=omegaM,
|
||||||
|
hubble=hubble,
|
||||||
|
redshift=redshift,
|
||||||
|
Mmin=1.23e13,
|
||||||
|
M1=1.e14,
|
||||||
|
sigma_logM=0.596,
|
||||||
|
alpha=1.0127,
|
||||||
|
Mcut=1.19399e13,
|
||||||
|
haloFile=haloFile,
|
||||||
|
numPartPerSide=numPart**(1/3.),
|
||||||
|
boxSize=lbox))
|
||||||
|
parFile.close()
|
||||||
|
|
||||||
|
os.system(hodPath+" "+parFileName+">& /dev/null")
|
||||||
|
|
||||||
|
# now place these particles on a lightcone, restrict redshift range, apply mask
|
||||||
|
mask = hp.read_map(maskFile)
|
||||||
|
nside = hp.get_nside(mask)
|
||||||
|
|
||||||
|
inFile = open('hod.mock', 'r')
|
||||||
|
outFile = open(catalogDir+"/mock.out"))
|
||||||
|
|
||||||
|
zBox = float(redshiftRange[0])
|
||||||
|
Om = float(omegaM)
|
||||||
|
|
||||||
|
# converter from redshift to comoving distance
|
||||||
|
zVsDY = np.linspace(0., zBox+8*lbox*100./LIGHT_SPEED, 10000)
|
||||||
|
zVsDX = np.zeros(len(zVsDY))
|
||||||
|
for i in xrange(len(zVsDY)):
|
||||||
|
zVsDX[i] = vp.angularDiameter(zVsDY[i], Om=Om)
|
||||||
|
|
||||||
|
for line in inFile:
|
||||||
|
line = line.split(',')
|
||||||
|
x = float(line[0]) - lbox/2.
|
||||||
|
y = float(line[1]) - lbox/2.
|
||||||
|
z = float(line[2]) - lbox/2.
|
||||||
|
vz = float(line[5])
|
||||||
|
|
||||||
|
zBoxInMpc = vp.angularDiameter(zBox, Om=Om)
|
||||||
|
|
||||||
|
redshift = np.sqrt(x*x + y*y + z*z)
|
||||||
|
redshift = np.interp(zBoxInMpc+100./LIGHT_SPEED*redshift, zVsDX, zVsDY)
|
||||||
|
|
||||||
|
if redshift < redshiftRange[0] or redshift > redshiftRange[1]: continue
|
||||||
|
|
||||||
|
RA = np.atan((y-lbox/2.)/(x-lbox/2.)) * 100/np.pi + 180.
|
||||||
|
Dec = np.asin((z-lboc/2.)/(redshift*LIGHT_SPEED/100.)) * 180/np.pi
|
||||||
|
|
||||||
|
phi = np.pi/180. * RA
|
||||||
|
theta = np.pi/2. - Dec*np.pi/180.
|
||||||
|
pos = np.zeros((3))
|
||||||
|
|
||||||
|
pix = hp.ang2pix(nside, theta, phi)
|
||||||
|
if mask[pix] <= 0: continue
|
||||||
|
|
||||||
|
print >> outFile, RA, Dec, redshift*LIGHT_SPEED, 0., x, y, z
|
||||||
|
|
||||||
|
inFile.close()
|
||||||
|
outFile.close()
|
||||||
|
|
||||||
|
os.system("rm ./hod.*")
|
||||||
|
|
||||||
|
|
62
pipeline/datasets/mock_dr9mid.py
Normal file
62
pipeline/datasets/mock_dr9mid.py
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# CONFIGURATION
|
||||||
|
|
||||||
|
# directory for the input simulation/observational particle files
|
||||||
|
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/mock_dr9mid/"
|
||||||
|
|
||||||
|
# path to HOD code
|
||||||
|
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
|
||||||
|
|
||||||
|
# path to mask
|
||||||
|
maskFile = os.getenv("HOME")+"/workspace/Voids/catalogs/boss/final_boss_mask.fits")
|
||||||
|
|
||||||
|
# where to put the final void catalog, figures, and output logs
|
||||||
|
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/mock_dr9mid/"
|
||||||
|
figDir = os.getenv("PWD")+"/../figs/mock_dr9mid/"
|
||||||
|
logDir = os.getenv("PWD")+"/../logs/mock_dr9mid/"
|
||||||
|
|
||||||
|
# where to place the pipeline scripts
|
||||||
|
scriptDir = os.getenv("PWD")+"/mock_dr9mid/"
|
||||||
|
|
||||||
|
# simulation or observation?
|
||||||
|
dataType = "observation"
|
||||||
|
|
||||||
|
# available formats for simulation: gadget, multidark
|
||||||
|
dataFormat = "multidark"
|
||||||
|
dataUnit = 1 # as multiple of Mpc/h
|
||||||
|
|
||||||
|
# place particles on the lightcone?
|
||||||
|
useLightCone = True
|
||||||
|
|
||||||
|
# list of file numbers for the particle files
|
||||||
|
# to get particle file name, we take particleFileBase+fileNum
|
||||||
|
fileNums = (("0.53"))
|
||||||
|
|
||||||
|
# redshift range of the mock
|
||||||
|
redshiftRange = (0.53, 0.6)
|
||||||
|
|
||||||
|
# prefix to give all outputs
|
||||||
|
prefix = "mock_"
|
||||||
|
|
||||||
|
# common filename of halo files
|
||||||
|
haloFileBase = "mdr1_halos_z"
|
||||||
|
|
||||||
|
# adjust these two parameters given the memory contraints on your system:
|
||||||
|
# numZobovDivisions: how many sub-volumes per dimension will zobov process
|
||||||
|
# numZobovThreads: how many sub-volumes to process at once?
|
||||||
|
numZobovDivisions = 2
|
||||||
|
numZobovThreads = 2
|
||||||
|
|
||||||
|
# simulation information
|
||||||
|
numPart = 1024*1024*1024
|
||||||
|
lbox = 1000 # Mpc/h
|
||||||
|
omegaM = 0.27
|
||||||
|
hubble = 0.70
|
||||||
|
|
||||||
|
|
||||||
|
# END CONFIGURATION
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# -----------------------------------------------------------------------------
|
|
@ -52,6 +52,13 @@ prefix = "md_"
|
||||||
#subSamples = [ 1.0 ]
|
#subSamples = [ 1.0 ]
|
||||||
subSamples = ((0.1, 0.05, 0.01, 0.002, 0.001, 0.0004, 0.0002))
|
subSamples = ((0.1, 0.05, 0.01, 0.002, 0.001, 0.0004, 0.0002))
|
||||||
|
|
||||||
|
# common filename of halo files
|
||||||
|
haloFileBase = "mdr1_halos_z"
|
||||||
|
|
||||||
|
# minimum halo mass cuts to apply for the halo catalog
|
||||||
|
# use "none" to get all halos
|
||||||
|
minHaloMasses = (("none", 2e12, 1.23e13))
|
||||||
|
|
||||||
# adjust these two parameters given the memory contraints on your system:
|
# adjust these two parameters given the memory contraints on your system:
|
||||||
# numZobovDivisions: how many sub-volumes per dimension will zobov process
|
# numZobovDivisions: how many sub-volumes per dimension will zobov process
|
||||||
# numZobovThreads: how many sub-volumes to process at once?
|
# numZobovThreads: how many sub-volumes to process at once?
|
||||||
|
|
|
@ -64,6 +64,9 @@ for sample in dataSampleList:
|
||||||
# save this sample's information
|
# save this sample's information
|
||||||
with open(zobovDir+"/sample_info.dat", 'wb') as output:
|
with open(zobovDir+"/sample_info.dat", 'wb') as output:
|
||||||
pickle.dump(sample, output, pickle.HIGHEST_PROTOCOL)
|
pickle.dump(sample, output, pickle.HIGHEST_PROTOCOL)
|
||||||
|
fp = open(zobovDir+"/sample_info.txt", 'w')
|
||||||
|
fp.write("Redshift range: %f - %f" %(sample.zBoundary[0], sample.zBoundary[1])
|
||||||
|
fp.close()
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
if (startCatalogStage <= 1) and (endCatalogStage >= 1) and not sample.isCombo:
|
if (startCatalogStage <= 1) and (endCatalogStage >= 1) and not sample.isCombo:
|
||||||
|
|
|
@ -60,12 +60,8 @@ def getSampleName(setName, redshift, useVel, iSlice=-1, iVol=-1):
|
||||||
|
|
||||||
sampleName = setName
|
sampleName = setName
|
||||||
|
|
||||||
#if useVel: sampleName += "_pv"
|
|
||||||
|
|
||||||
sampleName += "_z" + redshift
|
sampleName += "_z" + redshift
|
||||||
|
|
||||||
#if iSlice != -1: sampleName += "_s" + str(iSlice)
|
|
||||||
|
|
||||||
if iVol != -1: sampleName += "_d" + iVol
|
if iVol != -1: sampleName += "_d" + iVol
|
||||||
|
|
||||||
return sampleName
|
return sampleName
|
||||||
|
@ -87,7 +83,7 @@ def writeScript(setName, dataFileNameBase,
|
||||||
import os
|
import os
|
||||||
from void_python_tools.backend.classes import *
|
from void_python_tools.backend.classes import *
|
||||||
|
|
||||||
continueRun = False
|
continueRun = True # set to True to enable restarting aborted jobs
|
||||||
startCatalogStage = 1
|
startCatalogStage = 1
|
||||||
endCatalogStage = 4
|
endCatalogStage = 4
|
||||||
|
|
||||||
|
@ -326,59 +322,69 @@ for thisSubSample in subSamples:
|
||||||
if (args.script or args.all) and dataFormat == "multidark":
|
if (args.script or args.all) and dataFormat == "multidark":
|
||||||
print " Doing halo scripts"
|
print " Doing halo scripts"
|
||||||
|
|
||||||
dataFile = catalogDir+"/mdr1_halos_z"+fileNums[0]
|
for minHaloMass in minHaloMasses:
|
||||||
inFile = open(dataFile, 'r')
|
dataFile = catalogDir+haloFileBase+fileNums[0]
|
||||||
numPart = 0
|
inFile = open(dataFile, 'r')
|
||||||
for line in inFile: numPart += 1
|
numPart = 0
|
||||||
inFile.close()
|
for line in inFile:
|
||||||
|
line = line.split(',')
|
||||||
|
if numHaloMass == "none" or float(line[6]) > minHaloMass:
|
||||||
|
numPart += 1
|
||||||
|
inFile.close()
|
||||||
|
|
||||||
minRadius = 2*int(np.ceil(lbox/numPart**(1./3.)))
|
minRadius = 2*int(np.ceil(lbox/numPart**(1./3.)))
|
||||||
|
|
||||||
if dataFormat == "multidark":
|
if dataFormat == "multidark":
|
||||||
setName = prefix+"halos"
|
setName = prefix+"halos_min"str(minHaloMass)
|
||||||
writeScript(setName, "md.halos_z", scriptDir, catalogDir, fileNums,
|
writeScript(setName, "md.halos_min"+str(minHaloMass)+"_z",
|
||||||
redshifts,
|
scriptDir, catalogDir, fileNums,
|
||||||
numSubvolumes, numSlices, False, lbox, minRadius, omegaM)
|
redshifts,
|
||||||
writeScript(setName, "md.halos_z", scriptDir, catalogDir, fileNums,
|
numSubvolumes, numSlices, False, lbox, minRadius, omegaM)
|
||||||
redshifts, numSubvolumes,
|
writeScript(setName, "md.halos_min"+str(minHaloMass)+"_z",
|
||||||
|
scriptDir, catalogDir, fileNums,
|
||||||
numSlices, True, lbox, minRadius, omegaM)
|
numSlices, True, lbox, minRadius, omegaM)
|
||||||
|
|
||||||
if args.halos or args.all:
|
if args.halos or args.all:
|
||||||
print " Doing halos"
|
print " Doing halos"
|
||||||
|
|
||||||
for (iRedshift, redshift) in enumerate(redshifts):
|
for minHaloMass in minHaloMasses:
|
||||||
print " z = ", redshift
|
print " min halo mass = ", minHaloMass
|
||||||
|
|
||||||
dataFile = catalogDir+"/mdr1_halos_z"+fileNums[iRedshift]
|
for (iRedshift, redshift) in enumerate(redshifts):
|
||||||
inFile = open(dataFile, 'r')
|
print " z = ", redshift
|
||||||
numPart = 0
|
|
||||||
for line in inFile: numPart += 1
|
|
||||||
inFile.close()
|
|
||||||
|
|
||||||
sampleName = "md.halos_z"+redshift
|
dataFile = catalogDir+haloFileBase+fileNums[iRedshift]
|
||||||
outFile = open(catalogDir+"/"+sampleName+".dat", 'w')
|
inFile = open(dataFile, 'r')
|
||||||
|
for line in inFile:
|
||||||
|
line = line.split(',')
|
||||||
|
if numHaloMass == "none" or float(line[6]) > minHaloMass:
|
||||||
|
inFile.close()
|
||||||
|
|
||||||
outFile.write("%f\n" %(lbox))
|
sampleName = "md.halos_z"+redshift
|
||||||
outFile.write("%s\n" %(omegaM))
|
outFile = open(catalogDir+"/"+sampleName+".dat", 'w')
|
||||||
outFile.write("%s\n" %(hubble))
|
|
||||||
outFile.write("%s\n" %(redshift))
|
|
||||||
outFile.write("%d\n" %(numPart))
|
|
||||||
|
|
||||||
inFile = open(dataFile, 'r')
|
outFile.write("%f\n" %(lbox))
|
||||||
numKept = 0
|
outFile.write("%s\n" %(omegaM))
|
||||||
for line in inFile:
|
outFile.write("%s\n" %(hubble))
|
||||||
numKept += 1
|
outFile.write("%s\n" %(redshift))
|
||||||
line = line.split(',')
|
outFile.write("%d\n" %(numPart))
|
||||||
x = float(line[0])
|
|
||||||
y = float(line[1])
|
|
||||||
z = float(line[2])
|
|
||||||
vz = float(line[5])
|
|
||||||
|
|
||||||
# write to output file
|
inFile = open(dataFile, 'r')
|
||||||
outFile.write("%d %e %e %e %e\n" %(numKept,x,y,z,vz))
|
numKept = 0
|
||||||
|
for line in inFile:
|
||||||
|
if numHaloMass == "none" or float(line[6]) > minHaloMass:
|
||||||
|
numKept += 1
|
||||||
|
line = line.split(',')
|
||||||
|
x = float(line[0])
|
||||||
|
y = float(line[1])
|
||||||
|
z = float(line[2])
|
||||||
|
vz = float(line[5])
|
||||||
|
|
||||||
inFile.close()
|
# write to output file
|
||||||
outFile.close()
|
outFile.write("%d %e %e %e %e\n" %(numKept,x,y,z,vz))
|
||||||
|
|
||||||
|
inFile.close()
|
||||||
|
outFile.close()
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# now the SDSS HOD
|
# now the SDSS HOD
|
||||||
|
|
17
plotting/datasetsToPlot.py
Executable file
17
plotting/datasetsToPlot.py
Executable file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
|
||||||
|
workDir = "/home/psutter2/workspace/Voids/"
|
||||||
|
figDir = "./figs"
|
||||||
|
|
||||||
|
sampleDirList = [ "multidark/md_ss0.1_pv/sample_md_ss0.1_pv_z0.56_d00/",
|
||||||
|
"multidark/md_ss01.0_pv/sample_md_ss1.0_pv_z0.56_d00/",
|
||||||
|
"multidark/md_halos_min1.23e13_pv/sample_md_halos_min1.23e13_pv_z0.56_d00/",
|
||||||
|
"random/ran_ss0.0004/sample_ran_ss0.0004_z0.56_d00/",
|
||||||
|
"random/ran_ss0.1/sample_ran_ss0.1_z0.56_d00/",
|
||||||
|
"multidark/md_hod_dr9mid_pv/sample_md_hod_dr9mid_pv_z0.56_d00/",
|
||||||
|
"multidark/md_ss0.0004_pv/sample_md_ss0.0004_pv_z0.56_d00/",
|
||||||
|
"sdss_dr9/sample_lss.dr9cmassmid.dat/" ]
|
||||||
|
|
||||||
|
dataPortion = "central"
|
||||||
|
|
93
plotting/plotCompareDensCon.py
Executable file
93
plotting/plotCompareDensCon.py
Executable file
|
@ -0,0 +1,93 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# plots cumulative distributions of number counts versus density contrast
|
||||||
|
|
||||||
|
from void_python_tools.backend import *
|
||||||
|
from void_python_tools.plotting import *
|
||||||
|
import void_python_tools.apTools as vp
|
||||||
|
import imp
|
||||||
|
import pickle
|
||||||
|
import os
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
from datasetsToPlot import *
|
||||||
|
|
||||||
|
plotNameBase = "compdenscon"
|
||||||
|
|
||||||
|
obsFudgeFactor = .66 # what fraction of the volume are we *reall* capturing?
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='Plot.')
|
||||||
|
parser.add_argument('--show', dest='showPlot', action='store_const',
|
||||||
|
const=True, default=False,
|
||||||
|
help='display the plot (default: just write eps)')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if not os.access(figDir, os.F_OK):
|
||||||
|
os.makedirs(figDir)
|
||||||
|
|
||||||
|
dataSampleList = []
|
||||||
|
|
||||||
|
for sampleDir in sampleDirList:
|
||||||
|
with open(workDir+sampleDir+"/sample_info.dat", 'rb') as input:
|
||||||
|
dataSampleList.append(pickle.load(input))
|
||||||
|
|
||||||
|
plt.clf()
|
||||||
|
plt.xlabel("Void Radius (Mpc/h)")
|
||||||
|
plt.ylabel(r"N > R [$h^3$ Gpc$^{-3}$]")
|
||||||
|
plt.yscale('log')
|
||||||
|
plt.xlim(xmax=80.)
|
||||||
|
|
||||||
|
plotName = plotNameBase
|
||||||
|
|
||||||
|
for (iSample,sample) in enumerate(dataSampleList):
|
||||||
|
|
||||||
|
sampleName = sample.fullName
|
||||||
|
lineTitle = sampleName
|
||||||
|
|
||||||
|
if sample.dataType == "observation":
|
||||||
|
boxVol = vp.getSurveyProps(sample.maskFile,
|
||||||
|
sample.zBoundary[0], sample.zBoundary[1],
|
||||||
|
sample.zRange[0], sample.zRange[1], "all",
|
||||||
|
selectionFuncFile=sample.selFunFile)[0]
|
||||||
|
boxVol *= obsFudgeFactor
|
||||||
|
else:
|
||||||
|
boxVol = sample.boxLen*sample.boxLen*(sample.zBoundaryMpc[1] -
|
||||||
|
sample.zBoundaryMpc[0])
|
||||||
|
|
||||||
|
boxVol *= 1.e-9 # Mpc->Gpc
|
||||||
|
|
||||||
|
filename = workDir+"/"+sampleDirList[iSample]+"/centers_"+dataPortion+"_"+\
|
||||||
|
sampleName+".out"
|
||||||
|
if not os.access(filename, os.F_OK):
|
||||||
|
print "File not found: ", filename
|
||||||
|
continue
|
||||||
|
|
||||||
|
data = np.loadtxt(filename, comments="#")
|
||||||
|
if data.ndim == 1:
|
||||||
|
print " Too few!"
|
||||||
|
continue
|
||||||
|
data = data[:,8]
|
||||||
|
indices = np.arange(0, len(data), 1)
|
||||||
|
sorted = np.sort(data)
|
||||||
|
|
||||||
|
plt.plot(sorted, indices[::-1]/boxVol, '-',
|
||||||
|
label=lineTitle, color=colorList[iSample],
|
||||||
|
linewidth=linewidth)
|
||||||
|
|
||||||
|
plt.legend(title = "Samples", loc = "upper right", prop={'size':8})
|
||||||
|
#plt.title(plotTitle)
|
||||||
|
|
||||||
|
plt.savefig(figDir+"/fig_"+plotName+".pdf", bbox_inches="tight")
|
||||||
|
plt.savefig(figDir+"/fig_"+plotName+".eps", bbox_inches="tight")
|
||||||
|
plt.savefig(figDir+"/fig_"+plotName+".png", bbox_inches="tight")
|
||||||
|
|
||||||
|
if args.showPlot:
|
||||||
|
os.system("display %s" % figDir+"/fig_"+plotName+".png")
|
||||||
|
|
||||||
|
|
|
@ -14,21 +14,10 @@ import argparse
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
workDir = "/home/psutter2/workspace/Voids/"
|
from datasetsToPlot import *
|
||||||
figDir = "./figs"
|
|
||||||
|
|
||||||
sampleDirList = [ "multidark/md_ss0.1_pv/sample_md_ss0.1_pv_z0.56_d00/",
|
|
||||||
"multidark/md_halos_pv/sample_md_halos_pv_z0.56_d00/",
|
|
||||||
"random/ran_ss0.0004/sample_ran_ss0.0004_z0.56_d00/",
|
|
||||||
"random/ran_ss0.1/sample_ran_ss0.1_z0.56_d00/",
|
|
||||||
"multidark/md_hod_dr9mid_pv/sample_md_hod_dr9mid_pv_z0.56_d00/",
|
|
||||||
"multidark/md_ss0.0004_pv/sample_md_ss0.0004_pv_z0.56_d00/",
|
|
||||||
"sdss_dr9/sample_lss.dr9cmassmid.dat/" ]
|
|
||||||
|
|
||||||
plotNameBase = "compdist"
|
plotNameBase = "compdist"
|
||||||
|
|
||||||
dataPortion = "central"
|
|
||||||
|
|
||||||
obsFudgeFactor = .66 # what fraction of the volume are we *reall* capturing?
|
obsFudgeFactor = .66 # what fraction of the volume are we *reall* capturing?
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Plot.')
|
parser = argparse.ArgumentParser(description='Plot.')
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
from classes import *
|
from classes import *
|
||||||
from launchers import *
|
from launchers import *
|
||||||
|
from catalogPrep import *
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue