removal of some cross-comparison scripts

This commit is contained in:
P.M. Sutter 2013-04-29 21:44:18 -05:00
parent 4d6e653bda
commit 41e179da64
5 changed files with 0 additions and 495 deletions

View file

@ -1,129 +0,0 @@
#!/usr/bin/env python
#+
# VIDE -- Void IDEntification pipeline -- ./pipeline/apAnalysis.py
# Copyright (C) 2010-2013 Guilhem Lavaux
# Copyright (C) 2011-2013 P. M. Sutter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
# takes a set of voids with given radii, and stacks and plots matches in
# other catalogs
from void_python_tools.backend import *
import imp
import pickle
import os
import numpy as np
import argparse
# ------------------------------------------------------------------------------
mergerNameBase = "mergerTree"
parser = argparse.ArgumentParser(description='Analyze.')
parser.add_argument('--parm', dest='parm', default='datasetsToAnalyze.py', help='path to parameter file')
args = parser.parse_args()
# ------------------------------------------------------------------------------
filename = args.parm
print " Loading parameters from", filename
if not os.access(filename, os.F_OK):
print " Cannot find parameter file %s!" % filename
exit(-1)
parms = imp.load_source("name", filename)
globals().update(vars(parms))
if not os.access(dataDir, os.F_OK):
os.makedirs(dataDir)
if not os.access(logDir, os.F_OK):
os.makedirs(logDir)
mergerFileBase = dataDir + "/" + mergerNameBase
# get list of base voids
with open(workDir+baseSampleDir+"/sample_info.dat", 'rb') as input:
baseSample = pickle.load(input)
baseSampleName = baseSample.fullName
baseVoidList = np.loadtxt(workDir+baseSampleDir+"/untrimmed_centers_central_"+\
baseSampleName+".out")
for stack in baseSample.stacks:
print " Stack:", stack.rMin
accepted = (baseVoidList[:,4] > stack.rMin) & (baseVoidList[:,4] < stack.rMax)
baseIDList = baseVoidList[:,7][accepted]
for (iSample, sampleDir) in enumerate(sampleDirList):
with open(workDir+sampleDir+"/sample_info.dat", 'rb') as input:
sample = pickle.load(input)
print " Working with", sample.fullName, "..."
sys.stdout.flush()
sampleName = sample.fullName
# get list of appropriate voids
if sample.fullName == baseSample.fullName:
idList = baseIDList
else:
matchList = np.loadtxt(mergerFileBase+"_"+baseSampleName+"_"+sampleName+\
"_summary.out")
idList = []
for i,testID in enumerate(matchList[:,8]):
if np.any(testID == baseIDList):
idList.append(matchList[i,0])
idList = np.array(idList)
idList = idList.astype(int)
print " Found", len(idList), "voids to work with"
voidBaseDir = workDir+"/"+sampleDir+"stacks"
runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin,
stack.rMax, dataPortion,
customLine="selected")
voidDir = voidBaseDir+"_"+runSuffix
if not os.access(voidDir,os.F_OK): os.makedirs(voidDir)
if len(idList) == 0:
print " No voids here anyway, skipping..."
continue
print " Stacking voids...",
sys.stdout.flush()
STACK_PATH = CTOOLS_PATH+"/stacking/stackVoidsZero"
launchStack(sample, stack, STACK_PATH,
thisDataPortion=dataPortion,
logDir=logDir, voidDir=voidDir,
zobovDir=workDir+"/"+sampleDir,
freshStack=True, INCOHERENT=False,
ranSeed=101010, summaryFile=None,
continueRun=False,
dataType=sample.dataType,
idList=idList, rescaleOverride="")
print " Profiling stacks...",
sys.stdout.flush()
logFile = logDir+"/profile_"+sampleName+"_"+runSuffix+".out"
launchProfile(sample, stack, voidDir=voidDir,
logFile=logFile, continueRun=False)
print " Done!"

View file

@ -1,44 +0,0 @@
#+
# VIDE -- Void IDEntification pipeline -- ./crossCompare/analysis/datasetsToAnalyze.py
# Copyright (C) 2010-2013 Guilhem Lavaux
# Copyright (C) 2011-2013 P. M. Sutter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
#!/usr/bin/env python
workDir = "/home/psutter2/workspace/Voids/"
dataDir = "/home/psutter2/workspace/Voids/crossCompare/mergerTree/"
CTOOLS_PATH = "../../c_tools/"
baseSampleDir = "mergertree512/mt_ss0.1/sample_md_ss0.1_z0.00_d00/"
sampleDirList = [
"mergertree512/mt_ss1e-05/sample_md_ss1e-05_z0.00_d00/",
#"mergertree512/mt_ss0.000175/sample_md_ss0.000175_z0.00_d00/",
#"mergertree512/mt_ss0.0004/sample_md_ss0.0004_z0.00_d00/",
#"mergertree512/mt_ss0.001/sample_md_ss0.001_z0.00_d00/",
#"mergertree512/mt_ss0.002/sample_md_ss0.002_z0.00_d00/",
"mergertree512/mt_ss0.01/sample_md_ss0.01_z0.00_d00/",
"mergertree512/mt_hod_dr72dim2/sample_md_hod_dr72dim2_z0.00_d00/",
"mergertree512/mt_hod_dr9mid/sample_md_hod_dr9mid_z0.00_d00/",
"mergertree512/mt_halos_min1.2e+13/sample_md_halos_min1.2e+13_z0.00_d00/",
]
dataPortion = "central"

View file

@ -1,104 +0,0 @@
#!/usr/bin/env python
#+
# VIDE -- Void IDEntification pipeline -- ./crossCompare/analysis/mergerTree.py
# Copyright (C) 2010-2013 Guilhem Lavaux
# Copyright (C) 2011-2013 P. M. Sutter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
from void_python_tools.backend import *
from void_python_tools.plotting import *
import imp
import pickle
import os
import matplotlib.pyplot as plt
import numpy as np
import argparse
# ------------------------------------------------------------------------------
dataNameBase = "mergerTree"
parser = argparse.ArgumentParser(description='Analyze.')
parser.add_argument('--parm', dest='parm', default='datasetsToAnalyze.py',
help='path to parameter file')
args = parser.parse_args()
# ------------------------------------------------------------------------------
filename = args.parm
print " Loading parameters from", filename
if not os.access(filename, os.F_OK):
print " Cannot find parameter file %s!" % filename
exit(-1)
parms = imp.load_source("name", filename)
globals().update(vars(parms))
if not os.access(dataDir, os.F_OK):
os.makedirs(dataDir)
outFileName = dataDir + "/" + dataNameBase #+ ".dat"
with open(workDir+baseSampleDir+"/sample_info.dat", 'rb') as input:
baseSample = pickle.load(input)
for (iSample, sampleDir) in enumerate(sampleDirList):
with open(workDir+sampleDir+"/sample_info.dat", 'rb') as input:
sample = pickle.load(input)
print " Working with", sample.fullName, "...",
sys.stdout.flush()
sampleName = sample.fullName
binPath = CTOOLS_PATH+"/analysis/voidOverlap"
logFile = logDir+"/mergertree_"+baseSample.fullName+"_"+sampleName+".out"
stepOutputFileName = outFileName + "_" + baseSample.fullName + "_" + \
sampleName + "_"
#stepOutputFileName = os.getcwd()+"/data/overlap_"
launchVoidOverlap(sample, baseSample, workDir+sampleDir,
workDir+baseSampleDir, binPath,
thisDataPortion="central", logFile=logFile,
continueRun=False, workDir=workDir,
outputFile=stepOutputFileName,
matchMethod="useID")
#matchMethod="prox")
# attach columns to summary file
#if iSample == 1:
# os.system("cp %s %s" % (stepOutputFileName, outFileName))
#else:
# outFile = open("temp.out", "w")
# stepOutFile1 = open(outFileName, "r")
# stepOutFile2 = open(stepOutputFileName, "r")
#
# for line1 in stepOutFile1:
# line1 = line1.rstrip()
# line2 = stepOutFile2.readline()
# outFile.write(line1+" "+line2)
#
# os.system("cp %s %s" % ("temp.out", outFileName))
# outFile.close()
# stepOutFile1.close()
# stepOutFile2.close()
#if os.access("mergerTree.log", os.F_OK): os.unlink("mergerTree.log")
#if os.access("temp.out", os.F_OK): os.unlink("temp.out")
#if os.access("thisStep.out", os.F_OK): os.unlink("thisStep.out")
print " Done!"

View file

@ -1,81 +0,0 @@
#+
# VIDE -- Void IDEntification pipeline -- ./pipeline/datasets/mock_dr9mid.py
# Copyright (C) 2010-2013 Guilhem Lavaux
# Copyright (C) 2011-2013 P. M. Sutter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
import os
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# CONFIGURATION
# directory for the input simulation/observational particle files
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/mock_dr9mid/"
# path to HOD code
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
# path to mask
maskFile = os.getenv("HOME")+"/workspace/Voids/catalogs/boss/final_boss_mask.fits")
# where to put the final void catalog, figures, and output logs
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/mock_dr9mid/"
figDir = os.getenv("PWD")+"/../figs/mock_dr9mid/"
logDir = os.getenv("PWD")+"/../logs/mock_dr9mid/"
# where to place the pipeline scripts
scriptDir = os.getenv("PWD")+"/mock_dr9mid/"
# simulation or observation?
dataType = "observation"
# available formats for simulation: gadget, multidark
dataFormat = "multidark"
dataUnit = 1 # as multiple of Mpc/h
# place particles on the lightcone?
useLightCone = True
# list of file numbers for the particle files
# to get particle file name, we take particleFileBase+fileNum
fileNums = (("0.53"))
# redshift range of the mock
redshiftRange = (0.53, 0.6)
# prefix to give all outputs
prefix = "mock_"
# common filename of halo files
haloFileBase = "mdr1_halos_z"
# adjust these two parameters given the memory contraints on your system:
# numZobovDivisions: how many sub-volumes per dimension will zobov process
# numZobovThreads: how many sub-volumes to process at once?
numZobovDivisions = 2
numZobovThreads = 2
# simulation information
numPart = 1024*1024*1024
lbox = 1000 # Mpc/h
omegaM = 0.27
hubble = 0.70
# END CONFIGURATION
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------

View file

@ -1,137 +0,0 @@
#+
# VIDE -- Void IDEntification pipeline -- ./pipeline/datasets/multidark.py
# Copyright (C) 2010-2013 Guilhem Lavaux
# Copyright (C) 2011-2013 P. M. Sutter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#+
import os
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# CONFIGURATION
# directory for the input simulation/observational particle files
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/multidark/"
# path to HOD code
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
# where to put the final void catalog, figures, and output logs
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/multidark/"
figDir = os.getenv("PWD")+"/../figs/multidark/"
logDir = os.getenv("PWD")+"/../logs/multidark/"
# where to place the pipeline scripts
scriptDir = os.getenv("PWD")+"/multidark/"
# simulation or observation?
dataType = "simulation"
# available formats for simulation: gadget, multidark
dataFormat = "multidark"
dataUnit = 1 # as multiple of Mpc/h
# place particles on the lightcone?
useLightCone = False
# common filename of particle files
particleFileBase = "mdr1_particles_z"
particleFileDummy = ''
# list of file numbers for the particle files
# to get particle file name, we take particleFileBase+fileNum
#fileNums = ["0.53"]
fileNums = ["0.0"]
#fileNums = ["0.0", "0.53", "1.0"]
# redshift of each file in the above list
#redshifts = ["0.53"]
redshifts = ["0.0"]
#redshifts = ["0.0", "0.53"]
#redshifts = ["0.0", "0.53", "1.0"]
# how many independent slices along the z-axis?
numSlices = 1
#numSlices = 4
# how many subdivisions along the x- and y- axis?
# ( = 2 will make 4 subvolumes for each slice, = 3 will make 9, etc.)
numSubvolumes = 1
# prefix to give all outputs
prefix = "md_"
# list of desired subsamples - these are in unts of h Mpc^-3!
#subSamples = [0.01]
subSamples = [0.1, 0.05, 0.01, 0.002, 0.001, 0.0004, 0.000175, 0.00001]
# common filename of halo files, leave blank to ignore halos
haloFileBase = "mdr1_halos_z"
haloFileDummy = ''
# minimum halo mass cuts to apply for the halo catalog
# use "none" to get all halos
minHaloMasses = [1.2e13]
#minHaloMasses = ["none", 1.2e13]
# locations of data in the halo catalog
haloFileMCol = 6
haloFileXCol = 0
haloFileYCol = 1
haloFileZCol = 2
haloFileVXCol = 3
haloFileVYCol = 4
haloFileVZCol = 5
haloFileColSep = ','
haloFileNumComLines = 0
# adjust these two parameters given the memory contraints on your system:
# numZobovDivisions: how many sub-volumes per dimension will zobov process
# numZobovThreads: how many sub-volumes to process at once?
numZobovDivisions = 2
numZobovThreads = 2
# simulation information
numPart = 100000000
#numPart = 2048*2048*2048
lbox = 1000 # Mpc/h
omegaM = 0.27
hubble = 0.70
#galDens = 0.000225
hodParmList = [
{'name' : "dr9mid", #BOSS: Manera et al. 2012, eq. 26
'Mmin' : 0.0,
'M1' : 1.e14,
'sigma_logM' : 0.596,
'alpha' : 1.0127,
'Mcut' : 1.19399e13,
'galDens' : 0.00016,
},
{'name' : "dr7dim2",
'Mmin' : 1.99525e12,
'M1' : 3.80189e13,
'sigma_logM' : 0.21,
'alpha' : 1.12,
'Mcut' : 6.91831e11,
'galDens' : 0.02,
}
]
# END CONFIGURATION
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------