mirror of
https://bitbucket.org/cosmicvoids/vide_public.git
synced 2025-07-04 23:31:12 +00:00
prepareCatalogs now first loads a parameter file full of defaults
This commit is contained in:
parent
870e611b6a
commit
2c2426f662
2 changed files with 154 additions and 8 deletions
132
python_tools/pipeline_source/defaults.py
Normal file
132
python_tools/pipeline_source/defaults.py
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
#+
|
||||||
|
# VIDE -- Void IDEntification pipeline -- ./pipeline/datasets/mergertree.py
|
||||||
|
# Copyright (C) 2010-2013 Guilhem Lavaux
|
||||||
|
# Copyright (C) 2011-2013 P. M. Sutter
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; version 2 of the License.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#+
|
||||||
|
import os
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# DEFAULT CONFIGURATION
|
||||||
|
|
||||||
|
startCatalogStage = 1
|
||||||
|
endCatalogStage = 3
|
||||||
|
|
||||||
|
startAPStage = 1
|
||||||
|
endAPStage = 5
|
||||||
|
|
||||||
|
continueRun = True
|
||||||
|
dataPortions = ["central"]
|
||||||
|
|
||||||
|
# directory for the input simulation/observational particle files
|
||||||
|
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalog/"
|
||||||
|
|
||||||
|
# path to HOD code
|
||||||
|
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
|
||||||
|
|
||||||
|
# where to put the final void catalog, figures, and output logs
|
||||||
|
voidOutputDir = os.getenv("HOME")+"/workspace/Voids//"
|
||||||
|
figDir = os.getenv("PWD")+"/../figs/"
|
||||||
|
logDir = os.getenv("PWD")+"/../logs/"
|
||||||
|
|
||||||
|
# where to place the pipeline scripts
|
||||||
|
scriptDir = os.getenv("PWD")+"/scripts//"
|
||||||
|
|
||||||
|
# simulation or observation?
|
||||||
|
dataType = "simulation"
|
||||||
|
|
||||||
|
# available formats for simulation: gadget, mergertree
|
||||||
|
dataFormat = "sdf"
|
||||||
|
dataUnit = 1 # as multiple of Mpc/h
|
||||||
|
|
||||||
|
# place particles on the lightcone?
|
||||||
|
useLightCone = False
|
||||||
|
|
||||||
|
# also do peculiar velocities?
|
||||||
|
doPecVel = False
|
||||||
|
|
||||||
|
# common filename of particle files
|
||||||
|
particleFileBase = "mf_4s_1G_512_NNNNN"
|
||||||
|
particleFileDummy = 'NNNNN'
|
||||||
|
|
||||||
|
# list of file numbers for the particle files
|
||||||
|
# to get particle file name, we take particleFileBase+fileNum
|
||||||
|
fileNums = ["0.667", "0.500"]
|
||||||
|
|
||||||
|
# redshift of each file in the above list
|
||||||
|
redshifts = ["0.5", "1.0"]
|
||||||
|
|
||||||
|
# how many independent slices along the z-axis?
|
||||||
|
numSlices = 1
|
||||||
|
|
||||||
|
# how many subdivisions along the x- and y- axis?
|
||||||
|
# ( = 2 will make 4 subvolumes for each slice, = 3 will make 9, etc.)
|
||||||
|
numSubvolumes = 1
|
||||||
|
|
||||||
|
# prefix to give all outputs
|
||||||
|
prefix = "mt_"
|
||||||
|
|
||||||
|
# list of desired subsamples - these are in unts of h Mpc^-3!
|
||||||
|
subSamples = [1.0]
|
||||||
|
doSubSampling = True # do the subsampling in preparation script?
|
||||||
|
|
||||||
|
# common filename of halo files, leave blank to ignore halos
|
||||||
|
haloFileBase = "mf_4s_1G_512_bgc2_NNNNN.sdf"
|
||||||
|
haloFileDummy = 'NNNNN'
|
||||||
|
|
||||||
|
# minimum halo mass cuts to apply for the halo catalog
|
||||||
|
# use "none" to get all halos
|
||||||
|
minHaloMasses = [1.2e13]
|
||||||
|
|
||||||
|
# locations of data in the halo catalog
|
||||||
|
haloFileMCol = 6
|
||||||
|
haloFileXCol = 0
|
||||||
|
haloFileYCol = 1
|
||||||
|
haloFileZCol = 2
|
||||||
|
haloFileVXCol = 3
|
||||||
|
haloFileVYCol = 4
|
||||||
|
haloFileVZCol = 5
|
||||||
|
haloFileColSep = ','
|
||||||
|
haloFileNumComLines = 0
|
||||||
|
|
||||||
|
# adjust these two parameters given the memory contraints on your system:
|
||||||
|
# numZobovDivisions: how many sub-volumes per dimension will zobov process
|
||||||
|
# numZobovThreads: how many sub-volumes to process at once?
|
||||||
|
numZobovDivisions = 2
|
||||||
|
numZobovThreads = 2
|
||||||
|
|
||||||
|
# simulation information
|
||||||
|
numPart = 512*512*512
|
||||||
|
lbox = 999.983 # Mpc/h
|
||||||
|
omegaM = 0.2847979853038958
|
||||||
|
hubble = 0.6962
|
||||||
|
|
||||||
|
#galDens = 0.000225
|
||||||
|
hodParmList = [
|
||||||
|
{'name' : "dr9mid", #BOSS: Manera et al. 2012, eq. 26
|
||||||
|
'Mmin' : 0.0,
|
||||||
|
'M1' : 1.e14,
|
||||||
|
'sigma_logM' : 0.596,
|
||||||
|
'alpha' : 1.0127,
|
||||||
|
'Mcut' : 1.19399e13,
|
||||||
|
'galDens' : 0.0002,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# END CONFIGURATION
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# -----------------------------------------------------------------------------
|
|
@ -56,6 +56,10 @@ parser.add_argument('--parm', dest='parm',
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
defaultsFile = "@CMAKE_BINARY_DIR@/python_tools/pipeline_source/defaults.py"
|
||||||
|
parms = imp.load_source("name", defaultsFile)
|
||||||
|
globals().update(vars(parms))
|
||||||
|
|
||||||
filename = args.parm
|
filename = args.parm
|
||||||
print " Loading parameters from", filename
|
print " Loading parameters from", filename
|
||||||
if not os.access(filename, os.F_OK):
|
if not os.access(filename, os.F_OK):
|
||||||
|
@ -109,16 +113,16 @@ def writeScript(setName, dataFileNameBase, dataFormat,
|
||||||
scriptFileName = scriptDir + "/" + setName + ".py"
|
scriptFileName = scriptDir + "/" + setName + ".py"
|
||||||
scriptFile = open(scriptFileName, 'w')
|
scriptFile = open(scriptFileName, 'w')
|
||||||
|
|
||||||
scriptFile.write("""#!/usr/bin/env/python
|
header = """#!/usr/bin/env/python
|
||||||
import os
|
import os
|
||||||
from void_python_tools.backend.classes import *
|
from void_python_tools.backend.classes import *
|
||||||
|
|
||||||
continueRun = False # set to True to enable restarting aborted jobs
|
continueRun = {continueRun} # set to True to enable restarting aborted jobs
|
||||||
startCatalogStage = 1
|
startCatalogStage = {startCatalogStage}
|
||||||
endCatalogStage = 3
|
endCatalogStage = {endCatalogStage}
|
||||||
|
|
||||||
startAPStage = 1
|
startAPStage = {startAPStage}
|
||||||
endAPStage = 1
|
endAPStage = {endAPStage}
|
||||||
|
|
||||||
regenerateFlag = False
|
regenerateFlag = False
|
||||||
ZOBOV_PATH = "@CMAKE_BINARY_DIR@/zobov/"
|
ZOBOV_PATH = "@CMAKE_BINARY_DIR@/zobov/"
|
||||||
|
@ -130,10 +134,16 @@ ranSeed = 101010
|
||||||
useLCDM = False
|
useLCDM = False
|
||||||
bias = 1.16
|
bias = 1.16
|
||||||
|
|
||||||
dataPortions = ["central"]
|
dataPortions = {dataPortions}
|
||||||
dataSampleList = []
|
dataSampleList = []
|
||||||
""")
|
"""
|
||||||
|
|
||||||
|
scriptFile.write(header.format(startCatalogStage=startCatalogStage,
|
||||||
|
endCatalogStage=endCatalogStage,
|
||||||
|
startAPStage=startAPStage,
|
||||||
|
endAPStage=endAPStage,
|
||||||
|
continueRun=continueRun,
|
||||||
|
dataPortions=dataPortions))
|
||||||
|
|
||||||
dataInfo = """
|
dataInfo = """
|
||||||
setName = "{setName}"
|
setName = "{setName}"
|
||||||
|
@ -175,6 +185,8 @@ newSample = Sample(dataFile = "{dataFile}",
|
||||||
useLightCone = {useLightCone},
|
useLightCone = {useLightCone},
|
||||||
subsample = "{subsample}")
|
subsample = "{subsample}")
|
||||||
dataSampleList.append(newSample)
|
dataSampleList.append(newSample)
|
||||||
|
newSample.addStack(0.0, 5.0, 10, 15, False, False)
|
||||||
|
newSample.addStack(0.0, 5.0, 15, 20, False, False)
|
||||||
newSample.addStack(0.0, 5.0, 20, 25, False, False)
|
newSample.addStack(0.0, 5.0, 20, 25, False, False)
|
||||||
newSample.addStack(0.0, 5.0, 30, 35, False, False)
|
newSample.addStack(0.0, 5.0, 30, 35, False, False)
|
||||||
newSample.addStack(0.0, 5.0, 40, 45, False, False)
|
newSample.addStack(0.0, 5.0, 40, 45, False, False)
|
||||||
|
@ -693,6 +705,8 @@ if (args.hod or args.all) and haloFileBase != "":
|
||||||
outFileName = catalogDir+"/"+sampleName+".dat"
|
outFileName = catalogDir+"/"+sampleName+".dat"
|
||||||
os.system("mv %s/hod.mock %s" % (catalogDir, outFileName))
|
os.system("mv %s/hod.mock %s" % (catalogDir, outFileName))
|
||||||
os.system("rm %s/hod.*" % catalogDir)
|
os.system("rm %s/hod.*" % catalogDir)
|
||||||
|
os.system("rm ./hod.par")
|
||||||
|
os.system("rm ./hod-usedvalues")
|
||||||
|
|
||||||
if dataFormat == "sdf": os.system("rm %s" % haloFile)
|
if dataFormat == "sdf": os.system("rm %s" % haloFile)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue