placed mergingThreshold parameter in defaults file

This commit is contained in:
Paul M. Sutter 2024-06-04 11:19:07 +02:00
parent d48d740b78
commit ddcb971eae
5 changed files with 30 additions and 15 deletions

View file

@ -44,10 +44,10 @@ workDir = basePath
#workDir = os.path.join(basePath,"example_observation") #workDir = os.path.join(basePath,"example_observation")
# output directory for log files # output directory for log files
logDir = os.path.join(basePath,"logs","example_observation") logDir = os.path.join(workDir,"logs","example_observation")
# output directory for figures # output directory for figures
figDir = os.path.join(basePath,"figs","example_observation") figDir = os.path.join(workDir,"figs","example_observation")
# optimization: maximum number of parallel threads to use # optimization: maximum number of parallel threads to use
numZobovThreads = 2 numZobovThreads = 2

View file

@ -74,7 +74,7 @@ numZobovDivisions = 2
# maximum density for merging voids # maximum density for merging voids
# 0 (equivalent to infinitely large value) -> Merge everything (no threshold) # 0 (equivalent to infinitely large value) -> Merge everything (no threshold)
# 1e-9 (or smaller != 0) -> Do not merge anything # 1e-9 (or smaller != 0) -> Do not merge anything
mergingThreshold = 1e-9 mergingThreshold = 0.2
# prefix to give all outputs # prefix to give all outputs
prefix = "sim_" prefix = "sim_"

View file

@ -47,7 +47,7 @@ LIGHT_SPEED = 299792.458
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def launchPrep(sample, binPath, workDir=None, inputDataDir=None, def launchPrep(sample, binPath, workDir=None, inputDataDir=None,
zobovDir=None, figDir=None, logFile=None, useComoving=False, zobovDir=None, figDir=None, logFile=None, useComoving=False,
continueRun=None,regenerate=False): continueRun=None, regenerate=False):
if sample.dataType == "observation": if sample.dataType == "observation":
sampleName = sample.fullName sampleName = sample.fullName
@ -323,7 +323,8 @@ def launchPrep(sample, binPath, workDir=None, inputDataDir=None,
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None, def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None,
numZobovDivisions=None, numZobovThreads=None, mergingThreshold=0.2): numZobovDivisions=None, numZobovThreads=None,
mergingThreshold=0.2):
sampleName = sample.fullName sampleName = sample.fullName

View file

@ -23,12 +23,23 @@ import os
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# DEFAULT CONFIGURATION # DEFAULT CONFIGURATION
# the overall name for your dataset
datasetName = "" datasetName = ""
# where to start and end the VIDE pipeline
# stages:
# 1 : extract redshift slices from data
# 2 : void extraction using zobov
# 3 : removal of small voids and voids near the edge
startCatalogStage = 1 startCatalogStage = 1
endCatalogStage = 3 endCatalogStage = 3
# if True, will scan log files for last known completed state and run from there
continueRun = True continueRun = True
# re-build the inputs?
regenerateFlag = False
# directory for the input simulation/observational particle files # directory for the input simulation/observational particle files
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalog/" catalogDir = os.getenv("HOME")+"/workspace/Voids/catalog/"
@ -36,9 +47,9 @@ catalogDir = os.getenv("HOME")+"/workspace/Voids/catalog/"
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x" hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
# where to put the final void catalog, figures, and output logs # where to put the final void catalog, figures, and output logs
voidOutputDir = os.getenv("HOME")+"/workspace/Voids//" workDir = os.getenv("PWD")+"/output/"
figDir = os.getenv("PWD")+"/../figs/" figDir = os.getenv("PWD")+"/figs/"
logDir = os.getenv("PWD")+"/../logs/" logDir = os.getenv("PWD")+"/logs/"
# where to place the pipeline scripts # where to place the pipeline scripts
scriptDir = os.getenv("PWD")+"/scripts//" scriptDir = os.getenv("PWD")+"/scripts//"
@ -127,6 +138,11 @@ haloFilePosRescale = 1.0 # rescaling necessary to get Mpc/h
numZobovDivisions = 2 numZobovDivisions = 2
numZobovThreads = 2 numZobovThreads = 2
# Maximum density for merging voids
# 0 (equivalent to infinitely large value) -> Merge everything (no threshold)
# 1e-9 (or smaller != 0) -> Do not merge anything
mergingThreshold = 1.e-9
# simulation information # simulation information
numPart = 512*512*512 numPart = 512*512*512
lbox = 999.983 # Mpc/h lbox = 999.983 # Mpc/h

View file

@ -53,7 +53,7 @@ parser.add_argument('--parm', dest='parm',
args = parser.parse_args() args = parser.parse_args()
defaultsFile = "@CMAKE_SOURCE_DIR@/python_source/pipeline/defaults.py" defaultsFile = "@CMAKE_SOURCE_DIR@/python_source/vide_pipeline/defaults.py"
parms = imp.load_source("name", defaultsFile) parms = imp.load_source("name", defaultsFile)
globals().update(vars(parms)) globals().update(vars(parms))
@ -148,7 +148,7 @@ def getNickName(setName, sampleName):
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
# for given dataset parameters, outputs a script for use with analyzeVoids # for given dataset parameters, outputs a script for use with VIDE pipeline
def writeScript(setName, dataFileNameBase, dataFormat, def writeScript(setName, dataFileNameBase, dataFormat,
scriptDir, catalogDir, fileNums, redshifts, numSubvolumes, scriptDir, catalogDir, fileNums, redshifts, numSubvolumes,
numSlices, useVel, lbox, minRadius, omegaM, subsample=1.0, numSlices, useVel, lbox, minRadius, omegaM, subsample=1.0,
@ -167,10 +167,6 @@ from backend.classes import *
continueRun = {continueRun} # set to True to enable restarting aborted jobs continueRun = {continueRun} # set to True to enable restarting aborted jobs
startCatalogStage = {startCatalogStage} startCatalogStage = {startCatalogStage}
endCatalogStage = {endCatalogStage} endCatalogStage = {endCatalogStage}
regenerateFlag = False
mergingThreshold = 1e-9
dataSampleList = [] dataSampleList = []
""" """
@ -190,12 +186,14 @@ logDir = "{logDir}/{setName}/"
numZobovDivisions = {numZobovDivisions} numZobovDivisions = {numZobovDivisions}
numZobovThreads = {numZobovThreads} numZobovThreads = {numZobovThreads}
mergingThreshold = {mergingThreshold}
""" """
scriptFile.write(dataInfo.format(setName=setName, figDir=figDir, scriptFile.write(dataInfo.format(setName=setName, figDir=figDir,
logDir=logDir, voidOutputDir=voidOutputDir, logDir=logDir, voidOutputDir=voidOutputDir,
inputDataDir=catalogDir, inputDataDir=catalogDir,
numZobovDivisions=numZobovDivisions, numZobovDivisions=numZobovDivisions,
numZobovThreads=numZobovThreads)) numZobovThreads=numZobovThreads,
mergingThreshold=mergingThreshold))
sampleInfo = """ sampleInfo = """