mirror of
https://bitbucket.org/cosmicvoids/vide_public.git
synced 2025-07-04 15:21:11 +00:00
cleaned up catalog utlities
This commit is contained in:
parent
694692cb8a
commit
0a16653e9b
2 changed files with 68 additions and 73 deletions
|
@ -144,10 +144,11 @@ def loadPartVel(sampleDir):
|
|||
return partVel
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def getPartTree(sampleDir, partData, boxLen):
|
||||
def getPartTree(catalog):
|
||||
|
||||
with open(sampleDir+"/sample_info.dat", 'rb') as input:
|
||||
sample = pickle.load(input)
|
||||
sample = catalog.sampleInfo
|
||||
partData = catalog.partData
|
||||
boxLen = catalog.boxLen
|
||||
|
||||
periodicLine = getPeriodic(sample)
|
||||
|
||||
|
@ -222,7 +223,12 @@ class Catalog:
|
|||
sampleInfo = None
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def loadVoidCatalog(sampleDir, dataPortion="central"):
|
||||
def loadVoidCatalog(sampleDir, dataPortion="central", loadPart=True):
|
||||
# loads a void catalog
|
||||
# sampleDir: path to VIDE output directory
|
||||
# dataPortion: "central" or "all"
|
||||
# loadPart: if True, also load particle information
|
||||
|
||||
#print " Loading particle data..."
|
||||
sys.stdout.flush()
|
||||
|
||||
|
@ -246,31 +252,6 @@ def loadVoidCatalog(sampleDir, dataPortion="central"):
|
|||
catalog.boxLen[2] = ranges[2][1] - ranges[2][0]
|
||||
File.close()
|
||||
|
||||
print "Loading all particles..."
|
||||
partData, boxLen, volNorm, isObservationData, ranges = loadPart(sampleDir)
|
||||
numPartTot = len(partData)
|
||||
catalog.numPartTot = numPartTot
|
||||
catalog.partPos = partData
|
||||
catalog.part = []
|
||||
for i in xrange(len(partData)):
|
||||
catalog.part.append(Bunch(x = partData[i][0],
|
||||
y = partData[i][1],
|
||||
z = partData[i][2],
|
||||
volume = 0,
|
||||
ra = 0,
|
||||
dec = 0,
|
||||
redshift = 0,
|
||||
uniqueID = 0))
|
||||
|
||||
|
||||
print "Loading volumes..."
|
||||
volFile = sampleDir+"/vol_"+sample.fullName+".dat"
|
||||
File = file(volFile)
|
||||
chk = np.fromfile(File, dtype=np.int32,count=1)
|
||||
vols = np.fromfile(File, dtype=np.float32,count=numPartTot)
|
||||
for ivol in xrange(len(vols)):
|
||||
catalog.part[ivol].volume = vols[ivol] / volNorm
|
||||
|
||||
print "Loading voids..."
|
||||
fileName = sampleDir+"/untrimmed_voidDesc_"+dataPortion+"_"+sample.fullName+".out"
|
||||
catData = np.loadtxt(fileName, comments="#", skiprows=2)
|
||||
|
@ -342,40 +323,65 @@ def loadVoidCatalog(sampleDir, dataPortion="central"):
|
|||
|
||||
iLine += 1
|
||||
|
||||
print "Loading zone-void membership info..."
|
||||
zoneFile = sampleDir+"/voidZone_"+sample.fullName+".dat"
|
||||
catalog.void2Zones = []
|
||||
File = file(zoneFile)
|
||||
numZonesTot = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.numZonesTot = numZonesTot
|
||||
for iZ in xrange(numZonesTot):
|
||||
numZones = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.void2Zones.append(Bunch(numZones = numZones,
|
||||
zoneIDs = []))
|
||||
if loadPart:
|
||||
print "Loading all particles..."
|
||||
partData, boxLen, volNorm, isObservationData, ranges = loadPart(sampleDir)
|
||||
numPartTot = len(partData)
|
||||
catalog.numPartTot = numPartTot
|
||||
catalog.partPos = partData
|
||||
catalog.part = []
|
||||
for i in xrange(len(partData)):
|
||||
catalog.part.append(Bunch(x = partData[i][0],
|
||||
y = partData[i][1],
|
||||
z = partData[i][2],
|
||||
volume = 0,
|
||||
ra = 0,
|
||||
dec = 0,
|
||||
redshift = 0,
|
||||
uniqueID = 0))
|
||||
|
||||
|
||||
print "Loading volumes..."
|
||||
volFile = sampleDir+"/vol_"+sample.fullName+".dat"
|
||||
File = file(volFile)
|
||||
chk = np.fromfile(File, dtype=np.int32,count=1)
|
||||
vols = np.fromfile(File, dtype=np.float32,count=numPartTot)
|
||||
for ivol in xrange(len(vols)):
|
||||
catalog.part[ivol].volume = vols[ivol] / volNorm
|
||||
|
||||
for p in xrange(numZones):
|
||||
zoneID = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.void2Zones[iZ].zoneIDs.append(zoneID)
|
||||
print "Loading zone-void membership info..."
|
||||
zoneFile = sampleDir+"/voidZone_"+sample.fullName+".dat"
|
||||
catalog.void2Zones = []
|
||||
File = file(zoneFile)
|
||||
numZonesTot = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.numZonesTot = numZonesTot
|
||||
for iZ in xrange(numZonesTot):
|
||||
numZones = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.void2Zones.append(Bunch(numZones = numZones,
|
||||
zoneIDs = []))
|
||||
|
||||
for p in xrange(numZones):
|
||||
zoneID = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.void2Zones[iZ].zoneIDs.append(zoneID)
|
||||
|
||||
|
||||
print "Loading particle-zone membership info..."
|
||||
zonePartFile = sampleDir+"/voidPart_"+sample.fullName+".dat"
|
||||
catalog.zones2Parts = []
|
||||
File = file(zonePartFile)
|
||||
chk = np.fromfile(File, dtype=np.int32,count=1)
|
||||
numZonesTot = np.fromfile(File, dtype=np.int32,count=1)
|
||||
for iZ in xrange(numZonesTot):
|
||||
numPart = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.zones2Parts.append(Bunch(numPart = numPart,
|
||||
partIDs = []))
|
||||
print "Loading particle-zone membership info..."
|
||||
zonePartFile = sampleDir+"/voidPart_"+sample.fullName+".dat"
|
||||
catalog.zones2Parts = []
|
||||
File = file(zonePartFile)
|
||||
chk = np.fromfile(File, dtype=np.int32,count=1)
|
||||
numZonesTot = np.fromfile(File, dtype=np.int32,count=1)
|
||||
for iZ in xrange(numZonesTot):
|
||||
numPart = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.zones2Parts.append(Bunch(numPart = numPart,
|
||||
partIDs = []))
|
||||
|
||||
for p in xrange(numPart):
|
||||
partID = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.zones2Parts[iZ].partIDs.append(partID)
|
||||
for p in xrange(numPart):
|
||||
partID = np.fromfile(File, dtype=np.int32,count=1)
|
||||
catalog.zones2Parts[iZ].partIDs.append(partID)
|
||||
|
||||
return catalog
|
||||
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def getVoidPart(catalog, voidID):
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#+
|
||||
__all__=['plotRedshiftDistribution', 'plotSizeDistribution', 'plot1dProfiles',
|
||||
'plotMarg1d', 'plotNumberDistribution', 'plotVoidDistribution']
|
||||
__all__=['plotNumberFunction',]
|
||||
|
||||
from void_python_tools.backend.classes import *
|
||||
from plotDefs import *
|
||||
|
@ -40,7 +39,7 @@ def fill_between(x, y1, y2=0, ax=None, **kwargs):
|
|||
ax.add_patch(p)
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def plotNumberFunction(sampleDirList=None, figDir="./",
|
||||
def plotNumberFunction(catalogList, figDir="./",
|
||||
plotName="numberfunc",
|
||||
dataPortion="central"):
|
||||
|
||||
|
@ -50,22 +49,12 @@ plt.clf()
|
|||
plt.xlabel("$R_{eff}$ [$h^{-1}Mpc$]", fontsize=14)
|
||||
plt.ylabel(r"log ($n$ (> R) [$h^3$ Gpc$^{-3}$])", fontsize=14)
|
||||
|
||||
for (iSample,sampleDir) in enumerate(sampleDirList):
|
||||
with open(workDir+sampleDir+"/sample_info.dat", 'rb') as input:
|
||||
sample = pickle.load(input)
|
||||
filename = sampleDir+"/centers_"+dataPortion+"_"+sample.fullName+".out"
|
||||
if not os.access(filename, os.F_OK):
|
||||
print "File not found: ", filename
|
||||
else:
|
||||
data = np.loadtxt(filename, comments="#")[:,4]
|
||||
for (iSample,catalog) in enumerate(catalogList):
|
||||
sample = catalog.sampleInfo
|
||||
data = catalog.voids[:].radius
|
||||
|
||||
if sample.dataType == "observation":
|
||||
# look for the mask file
|
||||
if os.access(sample.maskFile, os.F_OK):
|
||||
maskFile = sample.maskFile
|
||||
else:
|
||||
maskFile = sampleDir+"/"+os.path.basename(sample.maskFile)
|
||||
print "Using maskfile found in:", maskFile
|
||||
maskFile = sample.maskFile
|
||||
|
||||
boxVol = vp.getSurveyProps(maskFile,
|
||||
sample.zBoundary[0], sample.zBoundary[1],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue