diff --git a/c_tools/hod/populate_simulation.c b/c_tools/hod/populate_simulation.c index 20438a7..43aa51b 100644 --- a/c_tools/hod/populate_simulation.c +++ b/c_tools/hod/populate_simulation.c @@ -63,7 +63,7 @@ void populate_simulation() FILE *fp,*fpa[9],*fp2,*fpb[9],*fpc[9],*fps[9],*fpt; int i,j,k,n,imass,n1,j_start=0,i1,galcnt[1000],halocnt[1000],imag; double mass,xg[3],vg[3],nsat,nc[10],ncen,mlo,mag,err1,err2,r,fac,sigv; - char aa[1000]; + char aa[5000]; float x1,xh[3],vh[3],vgf[3]; long IDUM3 = -445; @@ -74,7 +74,7 @@ void populate_simulation() int ngal,nsati[9],ALL_FILES=0,TRACK_GALAXIES=0,WARREN_MASS_CORRECTION=0,haloid; int nadded; FILE *fpBuh; - char buhFile[1000]; + char buhFile[5000]; nadded = 0; @@ -565,7 +565,7 @@ void calc_nbody_two_halo(float **gal, int *id, int ngal) static float *rad, *xi; static int nr, flag = 0; - char fname[100]; + char fname[5000]; FILE *fp; sprintf(fname,"%s.nbody_2halo",Task.root_filename); diff --git a/c_tools/mock/generateMock.cpp b/c_tools/mock/generateMock.cpp index 8192f42..7f44b99 100644 --- a/c_tools/mock/generateMock.cpp +++ b/c_tools/mock/generateMock.cpp @@ -354,7 +354,8 @@ void createBox(SimuData *simu, vector& targets, vector& snapshot_spl for (int j = 0; j < 3; j++) { boxed->Pos[j] = new float[boxed->NumPart]; - boxed->Vel[j] = 0; + boxed->Vel[j] = new float[boxed->NumPart]; + //boxed->Vel[j] = 0; mul[j] = 1.0/(ranges[2*j+1] - ranges[2*j+0]); } cout << "Min range = " << ranges[0] << " " << ranges[2] << " " << ranges[4] << endl; @@ -364,11 +365,11 @@ void createBox(SimuData *simu, vector& targets, vector& snapshot_spl // PMS FILE *fp = fopen("mask_index.txt", "w"); - fprintf(fp, "%d", boxed->NumPart); + fprintf(fp, "%ld", boxed->NumPart); fclose(fp); fp = fopen("total_particles.txt", "w"); - fprintf(fp, "%d", boxed->NumPart); + fprintf(fp, "%ld", boxed->NumPart); fclose(fp); printf("Done!\n"); // END PMS @@ -413,6 +414,7 @@ void buildBox(SimuData *simu, long num_targets, long loaded, for (int j = 0; j < 3; j++) { boxed->Pos[j][loaded] = max(min((simu->Pos[j][pid]-ranges[j*2])*mul[j], double(1)), double(0)); + boxed->Vel[j][loaded] = simu->Vel[j][pid]; assert(boxed->Pos[j][loaded] >= 0); assert(boxed->Pos[j][loaded] <= 1); } @@ -430,6 +432,9 @@ void saveBox(SimuData *&boxed, const std::string& outbox, generateMock_info& arg long *snapshot_split = boxed->as("snapshot_split"); int num_snapshots = *boxed->as("num_snapshots"); long *uniqueID = boxed->as("uniqueID"); + float *velX = boxed->Vel[0]; + float *velY = boxed->Vel[1]; + float *velZ = boxed->Vel[2]; if (!f.is_valid()) { @@ -469,6 +474,13 @@ void saveBox(SimuData *&boxed, const std::string& outbox, generateMock_info& arg v5->put(tmp_int, boxed->NumPart); delete[] tmp_int; } + + NcVar *v6 = f.add_var("vel_x", ncFloat, NumPart_dim); + NcVar *v7 = f.add_var("vel_y", ncFloat, NumPart_dim); + NcVar *v8 = f.add_var("vel_z", ncFloat, NumPart_dim); + v6->put(velX, boxed->NumPart); + v7->put(velY, boxed->NumPart); + v8->put(velZ, boxed->NumPart); } void makeBoxFromParameter(SimuData *simu, SimuData* &boxed, generateMock_info& args_info) @@ -541,7 +553,8 @@ void makeBoxFromParameter(SimuData *simu, SimuData* &boxed, generateMock_info& a for (int j = 0; j < 3; j++) { boxed->Pos[j] = new float[boxed->NumPart]; - boxed->Vel[j] = 0; + boxed->Vel[j] = new float[boxed->NumPart]; + //boxed->Vel[j] = 0; mul[j] = 1.0/(ranges[2*j+1] - ranges[2*j+0]); } diff --git a/c_tools/mock/loaders/multidark_loader.cpp b/c_tools/mock/loaders/multidark_loader.cpp index 3d9c8a8..b9d0118 100644 --- a/c_tools/mock/loaders/multidark_loader.cpp +++ b/c_tools/mock/loaders/multidark_loader.cpp @@ -76,9 +76,10 @@ public: long estimated = (preproc == 0) ? simu->NumPart : preproc->getEstimatedPostprocessed(simu->NumPart); long allocated = estimated; - for (int k = 0; k < 3; k++) + for (int k = 0; k < 3; k++) { simu->Pos[k] = new float[allocated]; - simu->Vel[2] = new float[allocated]; + simu->Vel[k] = new float[allocated]; + } simu->Id = new long[allocated]; long *uniqueID = new long[allocated]; long *index = new long[allocated]; @@ -98,7 +99,7 @@ public: SingleParticle p; fp >> p.ID >> p.Pos[0] >> p.Pos[1] - >> p.Pos[2] >> p.Vel[2] >> tempData >> tempData >> tempData; + >> p.Pos[2] >> p.Vel[2] >> p.Vel[1] >> p.Vel[0] >> tempData; if (p.ID == -99 && p.Pos[0] == -99 && p.Pos[1] == -99 && diff --git a/python_tools/pipeline_source/defaults.py b/python_tools/pipeline_source/defaults.py index 2a3de54..422797c 100644 --- a/python_tools/pipeline_source/defaults.py +++ b/python_tools/pipeline_source/defaults.py @@ -109,7 +109,10 @@ haloFileDummy = 'NNNNN' # minimum halo mass cuts to apply for the halo catalog # use "none" to get all halos -minHaloMasses = [1.2e13] +minHaloMasses = [] + +# density threshold for halo catalogs +haloDenList = [] # locations of data in the halo catalog haloFileMCol = 6 @@ -135,7 +138,6 @@ lbox = 999.983 # Mpc/h omegaM = 0.2847979853038958 hubble = 0.6962 -#galDens = 0.000225 hodParmList = [ {'name' : "dr9mid", #BOSS: Manera et al. 2012, eq. 26 'Mmin' : 0.0, @@ -143,7 +145,8 @@ hodParmList = [ 'sigma_logM' : 0.596, 'alpha' : 1.0127, 'Mcut' : 1.19399e13, - 'galDens' : 0.0002, + 'galDens' : 0.0002, # density passed to HOD code + 'galDensFinal' : 0.0002, # subsample galaxies to reach this density }, ] diff --git a/python_tools/pipeline_source/prepareCatalogs.in.py b/python_tools/pipeline_source/prepareCatalogs.in.py index e5e6c87..ba7cd42 100644 --- a/python_tools/pipeline_source/prepareCatalogs.in.py +++ b/python_tools/pipeline_source/prepareCatalogs.in.py @@ -69,6 +69,39 @@ if not os.access(filename, os.F_OK): parms = imp.load_source("name", filename) globals().update(vars(parms)) +#------------------------------------------------------------------------------ +def getSDFTags(dataFile): + idTag = "id" + for line in open(dataFile): + if "int64_t id" in line: + idTag = "id" + break + if "int64_t ident" in line: + idTag = "ident" + break + + massTag = "mass" + for line in open(dataFile): + if "float m200b" in line: + massTag = "m200b" + break + if "float mass" in line: + massTag = "mass" + break + + pidTag = "" + iLine = 0 + for line in open(dataFile): + iLine += 1 + if iLine > 100: break + if "parent_id" in line: + pidTag = "parent_id" + break + if "pid" in line: + pidTag = "pid" + break + + return idTag, massTag, pidTag #------------------------------------------------------------------------------ def getSampleName(setName, redshift, useVel, iSlice=-1, iVol=-1): @@ -465,6 +498,7 @@ for iSubSample in xrange(len(subSamples)): rescale_position = hubble/1000./scale shift = lbox/2. rescale_velocity = 3.08567802e16/3.1558149984e16 + command = "%s -a 200000 %s x y z vz vy vx mass | awk '{print $1*%g+%g, $2*%g+%g, $3*%g+%g, $4*%g, $5*%g, $6*%g, $7}' > %s" % (SDFcvt_PATH, dataFile, rescale_position, shift, @@ -549,7 +583,7 @@ for iSubSample in xrange(len(subSamples)): prevSubSample = thisSubSample # ----------------------------------------------------------------------------- -# now halos +# now halos - filter on mass if (args.script or args.all) and haloFileBase != "": for minHaloMass in minHaloMasses: @@ -565,10 +599,12 @@ if (args.script or args.all) and haloFileBase != "": numPart = 0 if dataFormat == "sdf": SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64" + idTag, massTag, pidTag = getSDFTags(dataFile) + if minHaloMass == "none": - command = "%s -a 200000 %s mass parent_id | awk '{if ($2==-1) print $1}' | wc" % (SDFcvt_PATH, dataFile) + command = "%s -a 200000 %s %s %s | awk '{if ($2==-1) print $1}' | wc" % (SDFcvt_PATH, dataFile, massTag, pidTag) else: - command = "%s -a 200000 %s mass parent_id | awk '{if ($1>%g && $2==-1) print $1}' | wc" % (SDFcvt_PATH, dataFile, minHaloMass) + command = "%s -a 200000 %s %s %s | awk '{if ($1>%g && $2==-1) print $1}' | wc" % (SDFcvt_PATH, dataFile, massTag, pidTag, minHaloMass) numPart = subprocess.check_output(command, shell=True) numPart = int(numPart.split()[0]) else: @@ -612,8 +648,8 @@ if (args.script or args.all) and haloFileBase != "": numSubvolumes, numSlices, True, lbox, minRadius, omegaM, dataFileNameList = fileList) -if (args.halos or args.all) and haloFileBase != "": - print " Doing halos" +if (args.halos or args.all) and haloFileBase != "" and len(minHaloMasses) > 0: + print " Doing halos - mass" sys.stdout.flush() for minHaloMass in minHaloMasses: @@ -664,11 +700,13 @@ if (args.halos or args.all) and haloFileBase != "": outFile.close() if dataFormat == "sdf": + idTag, massTag, pidTag = getSDFTags(dataFile) + SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64" if minHaloMass == "none": - command = "%s -a 200000 %s mass ident x y z vz vy vx parent_id | awk '{if ($9==-1) print $2, $3, $4, $5, $6, $7, $8, $1}'>>%s" % (SDFcvt_PATH, dataFile, outFileName ) + command = "%s -a 200000 %s %s %s x y z vz vy vx %s | awk '{if ($9==-1) print $2, $3, $4, $5, $6, $7, $8, $1}'>>%s" % (SDFcvt_PATH, dataFile, massTag, idTag, pidTag, outFileName ) else: - command = "%s -a 200000 %s mass ident x y z vz vy vx parent_id | awk '{if ($1>%g && $9==-1) print $2, $3, $4, $5, $6, $7, $8, $1}'>>%s" % (SDFcvt_PATH, dataFile, minHaloMass, outFileName ) + command = "%s -a 200000 %s %s %s x y z vz vy vx %s | awk '{if ($1>%g && $9==-1) print $2, $3, $4, $5, $6, $7, $8, $1}'>>%s" % (SDFcvt_PATH, dataFile, massTag, idTag, pidTag, minHaloMass, outFileName ) #os.system(command) subprocess.call(command, shell=True) outFile = open(outFileName, 'a') @@ -697,6 +735,152 @@ if (args.halos or args.all) and haloFileBase != "": outFile.close() inFile.close() +# ----------------------------------------------------------------------------- +# now halos - filter on density +if (args.script or args.all) and haloFileBase != "": + + for haloDen in haloDenList: + print " Doing halo script", haloDen + sys.stdout.flush() + + # estimate number of halos to get density + if haloFileDummy == '': + dataFile = catalogDir+haloFileBase+fileNums[0] + else: + dataFile = catalogDir+haloFileBase.replace(haloFileDummy, + fileNums[0]) + + numPart = haloDen * lbox**3 + meanDen = lbox/numPart**(1./3.) + if meanDen < 1: + minRadius = 4 + else: + minRadius = int(np.ceil(meanDen)) + + setName = prefix+"halos_den"+str(haloDen) + fileList = [] + for (iRedshift, redshift) in enumerate(redshifts): + sampleName = getSampleName(setName, redshift, False) + outFileName = sampleName+".dat" + fileList.append(outFileName) + + writeScript(setName, prefix+"halos_den"+str(haloDen)+"_z", "multidark", + scriptDir, catalogDir, fileNums, + redshifts, + numSubvolumes, numSlices, False, lbox, minRadius, omegaM, + dataFileNameList = fileList) + + if doPecVel: + writeScript(setName, prefix+"halos_den"+str(haloDen)+"_z", + "multidark", + scriptDir, catalogDir, fileNums, + redshifts, + numSubvolumes, numSlices, True, lbox, minRadius, omegaM, + dataFileNameList = fileList) + +if (args.halos or args.all) and haloFileBase != "": + print " Doing halos - density" + sys.stdout.flush() + + for haloDen in haloDenList: + print " halo density = ", haloDen + sys.stdout.flush() + + for (iRedshift, redshift) in enumerate(redshifts): + print " z = ", redshift + sys.stdout.flush() + + if haloFileDummy == '': + dataFile = catalogDir+haloFileBase+fileNums[iRedshift] + else: + dataFile = catalogDir+haloFileBase.replace(haloFileDummy, + fileNums[iRedshift]) + # check the actual density of halos + inFile = open(dataFile, 'r') + numPart = 0 + if dataFormat == "sdf": + for line in inFile: + if "nhalos" in line: + numPart = int(line.split()[3].strip(';')) + break + if "npart" in line: + numPart = int(line.split()[3].strip(';')) + break + inFile.close() + else: + for (iLine, line) in enumerate(inFile): + if iLine < haloFileNumComLines: continue + numPart += 1 + inFile.close() + + numPartExpect = int(np.ceil(haloDen * lbox**3)) + if numPart < numPartExpect: + print " ERROR: not enough halos to support that density! Maximum is %g" % (1.*numPart / lbox**3) + exit(-1) + + + sampleName = prefix+"halos_den"+str(haloDen)+"_z"+redshifts[iRedshift] + outFileName = catalogDir+"/"+sampleName+".dat" + outFile = open(outFileName, 'w') + outFile.write("%f\n" %(lbox)) + outFile.write("%s\n" %(omegaM)) + outFile.write("%s\n" %(hubble)) + outFile.write("%s\n" %(redshift)) + outFile.write("%d\n" %(numPartExpect)) + outFile.close() + + if dataFormat == "sdf": + idTag, massTag, pidTag = getSDFTags(dataFile) + + tempFile = catalogDir+"/temp_"+sampleName+".dat" + SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64" + command = "%s -a 200000 %s %s %s x y z vz vy vx %s | awk '{if ($9==-1) print $2, $3, $4, $5, $6, $7, $8, $1}'>>%s" % (SDFcvt_PATH, dataFile, massTag, idTag, pidTag, tempFile ) + #os.system(command) + subprocess.call(command, shell=True) + + # do the check again since now we've filtered out subhalos + numPart = 0 + for line in enumerate(tempFile): + numPart += 1 + + actualDen = 1.*numPart / lbox**3 + keepFraction = haloDen / actualDen + if numPart < numPartExpect: + print " ERROR: not enough galaxies to support that density! Maximum is %g" % (1.*numPart / lbox**3) + exit(-1) + + numKept = 0 + for (iLine,line) in enumerate(inFile): + if np.random.uniform() > keepFraction: continue + outFile.write(line) + numKept += 1 + + outFile.write("-99 -99 -99 -99 -99 -99 -99 -99\n") + outFile.close() + else: + actualDen = 1.*numPart / lbox**3 + keepFraction = haloDen / actualDen + + inFile = open(dataFile, 'r') + outFile = open(outFileName, 'a') + for (iHalo,line) in enumerate(inFile): + if iHalo < haloFileNumComLines: continue + if np.random.uniform() > keepFraction: continue + line = line.split(haloFileColSep) + x = float(line[haloFileXCol]) * haloFilePosRescale + y = float(line[haloFileYCol]) * haloFilePosRescale + z = float(line[haloFileZCol]) * haloFilePosRescale + vz = float(line[haloFileVZCol]) + vy = float(line[haloFileVYCol]) + vx = float(line[haloFileVXCol]) + mass = float(line[haloFileMCol]) + + outFile.write("%d %e %e %e %e %e %e %e\n" %(iHalo,x,y,z, + vz,vy,vx,mass)) + outFile.write("-99 -99 -99 -99 -99 -99 -99 -99\n") + inFile.close() + outFile.close() + # ----------------------------------------------------------------------------- # now the HOD parFileText = """ @@ -754,27 +938,13 @@ if (args.script or args.all) and haloFileBase != "": print " ", thisHod['name'] # estimate number of halos to get density - if haloFileDummy == '': - dataFile = catalogDir+haloFileBase+fileNums[0] + numPart = thisHod['galDensFinal'] * lbox**3 + meanDen = lbox/numPart**(1./3.) + if meanDen < 1: + minRadius = 4 else: - dataFile = catalogDir+haloFileBase.replace(haloFileDummy, - fileNums[0]) - if dataFormat == "sdf": - numPart = 0 - SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64" - command = "%s -a 200000 %s mass parent_id | awk '{if ($1>%g && $2==-1) print $1}' | wc" % (SDFcvt_PATH, dataFile, thisHod['Mcut']) - numPart = subprocess.check_output(command, shell=True) - numPart = int(numPart.split()[0]) - else: - inFile = open(dataFile, 'r') - for (iHalo,line) in enumerate(inFile): - if iHalo < haloFileNumComLines: continue - line = line.split(haloFileColSep) - if float(line[haloFileMCol]) > thisHod['Mcut']: numPart += 1 - inFile.close() - - minRadius = int(np.ceil(float(lbox)/numPart**(1./3.))) - + minRadius = int(np.ceil(meanDen)) + setName = prefix+"hod_"+thisHod['name'] writeScript(setName, prefix+"hod_"+thisHod['name']+"_z", "multidark", scriptDir, catalogDir, fileNums, redshifts, @@ -802,8 +972,11 @@ if (args.hod or args.all) and haloFileBase != "": if dataFormat == "sdf": inFile = haloFile outFile = haloFile+"_temp" + + idTag, massTag, pidTag = getSDFTags(inFile) + SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64" - command = "%s -a 200000 %s mass x y z vx vy vz parent_id | awk '{if ($8 ==-1) print $1, $2, $3, $4, $5, $6, $7}'>>%s" % (SDFcvt_PATH, inFile, outFile) + command = "%s -a 200000 %s %s x y z vx vy vz %s | awk '{if ($8 ==-1) print $1, $2, $3, $4, $5, $6, $7}'>>%s" % (SDFcvt_PATH, inFile, massTag, pidTag, outFile) #os.system(command) subprocess.call(command, shell=True) haloFile = outFile @@ -824,6 +997,7 @@ if (args.hod or args.all) and haloFileBase != "": sigma_logM=thisHod['sigma_logM'], alpha=thisHod['alpha'], Mcut=thisHod['Mcut'], + #galden=-1, galden=thisHod['galDens'], haloFile=haloFile, haloFileFormat=dataFormat, @@ -857,9 +1031,49 @@ if (args.hod or args.all) and haloFileBase != "": print line exit(-1) - outFileName = catalogDir+"/"+sampleName+".dat" - os.system("mv %s/hod_%s.mock %s" % (catalogDir, sampleName, outFileName)) - os.system("rm %s/hod.*" % catalogDir) + # now randomly subsample the galaxies to get desired density + inFile = open(catalogDir+"/hod_"+sampleName+".mock") + numPartExpect = thisHod['galDensFinal'] * lbox**3 + numPartActual = 0 + actualDen = 0 + for (iLine,line) in enumerate(inFile): + numPartActual += 1 + inFile.close() + + if numPartActual < numPartExpect: + print " ERROR: not enough galaxies to support that density! Maximum is %g" % (1.*numPartActual / lbox**3) + exit(-1) + + actualDen = 1.*numPartActual / lbox**3 + keepFraction = float(thisHod['galDensFinal']) / actualDen + + inFile = open(catalogDir+"/hod_"+sampleName+".mock") + outFile = open(catalogDir+"/"+sampleName+".dat", 'w') + + #outFile.write("%f\n" %(lbox)) + #outFile.write("%s\n" %(omegaM)) + #outFile.write("%s\n" %(hubble)) + #outFile.write("%s\n" %(redshift)) + #outFile.write("%d\n" %(numPartExpect)) + + numKept = 0 + for (iLine,line) in enumerate(inFile): + if iLine < 5: + outFile.write(line) + continue + if np.random.uniform() > keepFraction: continue + outFile.write(line) + numKept += 1 + + inFile.close() + os.unlink(catalogDir+"/hod_"+sampleName+".mock") + + outFile.write("-99 -99 -99 -99 -99 -99 -99 -99\n") + outFile.close() + + + #os.system("mv %s/hod_%s.mock %s" % (catalogDir, sampleName, outFileName)) + #os.system("rm %s/hod.*" % catalogDir) os.system("rm ./hod.par") os.system("rm ./hod-usedvalues") diff --git a/python_tools/void_python_tools/backend/classes.py b/python_tools/void_python_tools/backend/classes.py index 7f9c84f..cd0af1b 100644 --- a/python_tools/void_python_tools/backend/classes.py +++ b/python_tools/void_python_tools/backend/classes.py @@ -21,6 +21,8 @@ import os from numpy import abs +import matplotlib +matplotlib.use('Agg') LIGHT_SPEED = 299792.458 diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index 07836c5..530fb01 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -325,6 +325,9 @@ def launchZobov(sample, binPath, zobovDir=None, logDir=None, continueRun=None, else: maskIndex = -1 maxDen = 0.2 + # TEST + #maxDen = 1.e99 + # END TEST if not (continueRun and jobSuccessful(logFile, "Done!\n")): for fileName in glob.glob(zobovDir+"/part._"+sampleName+".*"): @@ -413,6 +416,9 @@ def launchPrune(sample, binPath, mockIndex = -1 maxDen = 0.2 observationLine = "" + # TEST + #maxDen = 1.e99 + # END TEST #periodicLine = " --periodic='" #if sample.numSubvolumes == 1: periodicLine += "xy" @@ -577,7 +583,7 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, centralRadius = stack.rMin * 0.25 - # restrict to relavent ranges of sample + # restrict to relevant ranges of sample zMin = max(sample.zRange[0],stack.zMin) * 3000 zMax = min(sample.zRange[1],stack.zMax) * 3000 @@ -596,6 +602,9 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, else: maskIndex = 999999999 maxDen = 0.2 + # TEST + #maxDen = 1.e99 + # END TEST if INCOHERENT: nullTestFlag = "INCOHERENT" @@ -1219,8 +1228,8 @@ def launchFit(sample, stack, logFile=None, voidDir=None, figDir=None, while badChain: ntries += 1 - #Rexpect = (stack.rMin+stack.rMax)/2 - #Rtruncate = stack.rMin*3. + 1 # TEST + Rexpect = (stack.rMin+stack.rMax)/2 + Rtruncate = stack.rMin*3. + 1 # TEST #if sample.dataType == "observation": # ret,fits,args = vp.fit_ellipticity(voidDir,Rbase=Rexpect, # Niter=300000, @@ -1231,11 +1240,9 @@ def launchFit(sample, stack, logFile=None, voidDir=None, figDir=None, # Niter=300000, # Nburn=100000, # Rextracut=Rtruncate) - #badChain = (args[0][0] > 0.5 or args[0][1] > stack.rMax or \ - # args[0][2] > stack.rMax) and \ + #badChain = (args[0][0] > 0.5 or args[0][1] > 2.*stack.rMax or \ + # args[0][2] > 2.*stack.rMax) and \ # (ntries < maxtries) - #ret,fits,args = vp.compute_radial_inertia(voidDir, stack.rMax, mode="symmetric", nBootstraps=5) - #ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, nBootstraps=100, rMaxInertia=1.0) ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, mode="2d", nBootstraps=500, rMaxInertia=0.7) #ret,fits,args = vp.compute_inertia(voidDir, stack.rMax, mode="symmetric", nBootstraps=500, rMaxInertia=100) badChain = False diff --git a/python_tools/void_python_tools/partUtil/partUtil.py b/python_tools/void_python_tools/partUtil/partUtil.py index 7444939..0f8a1f7 100644 --- a/python_tools/void_python_tools/partUtil/partUtil.py +++ b/python_tools/void_python_tools/partUtil/partUtil.py @@ -25,16 +25,20 @@ from netCDF4 import Dataset import sys from void_python_tools.backend import * import void_python_tools.apTools as vp +import pickle NetCDFFile = Dataset ncFloat = 'f8' # ----------------------------------------------------------------------------- -def loadPart(workDir, sampleDir, sample): +def loadPart(sampleDir): #print " Loading particle data..." sys.stdout.flush() - infoFile = workDir+"/"+sampleDir+"/zobov_slice_"+sample.fullName+".par" + with open(sampleDir+"/sample_info.dat", 'rb') as input: + sample = pickle.load(input) + + infoFile = sampleDir+"/zobov_slice_"+sample.fullName+".par" File = NetCDFFile(infoFile, 'r') ranges = np.zeros((3,2)) ranges[0][0] = getattr(File, 'range_x_min') @@ -49,7 +53,7 @@ def loadPart(workDir, sampleDir, sample): mul = np.zeros((3)) mul[:] = ranges[:,1] - ranges[:,0] - partFile = workDir+"/"+sampleDir+"/zobov_slice_"+sample.fullName + partFile = sampleDir+"/zobov_slice_"+sample.fullName iLine = 0 partData = [] part = np.zeros((3)) @@ -105,6 +109,33 @@ def loadPart(workDir, sampleDir, sample): return partData, boxLen, volNorm, isObservationData +# ----------------------------------------------------------------------------- +def loadPartVel(sampleDir): + #print " Loading particle velocities..." + sys.stdout.flush() + + with open(sampleDir+"/sample_info.dat", 'rb') as input: + sample = pickle.load(input) + + infoFile = sampleDir+"/zobov_slice_"+sample.fullName+".par" + File = NetCDFFile(infoFile, 'r') + isObservation = getattr(File, 'is_observation') + + if isObservation: + print "No velocities for observations!" + return -1 + + vx = File.variables['vel_x'][0:] + vy = File.variables['vel_y'][0:] + vz = File.variables['vel_z'][0:] + + File.close() + + partVel = np.column_stack((vx,vy,vz)) + + return partVel + + # ----------------------------------------------------------------------------- def shiftPart(inPart, newCenter, periodicLine, boxLen): @@ -125,3 +156,146 @@ def shiftPart(inPart, newCenter, periodicLine, boxLen): np.copysign(boxLen[2], part[shiftUs,2]) return part + +# ----------------------------------------------------------------------------- + +class Bunch: + def __init__(self, **kwds): + self.__dict__.update(kwds) + +class Catalog: + numVoids = 0 + numPartTot = 0 + numZonesTot = 0 + boxLen = np.zeros((3)) + part = None + zones2Parts = None + void2Zones = None + voids = None + +# ----------------------------------------------------------------------------- +def loadVoidCatalog(sampleDir): + #print " Loading particle data..." + sys.stdout.flush() + + catalog = Catalog() + with open(sampleDir+"/sample_info.dat", 'rb') as input: + sample = pickle.load(input) + + print "Loading info..." + infoFile = sampleDir+"/zobov_slice_"+sample.fullName+".par" + File = NetCDFFile(infoFile, 'r') + ranges = np.zeros((3,2)) + ranges[0][0] = getattr(File, 'range_x_min') + ranges[0][1] = getattr(File, 'range_x_max') + ranges[1][0] = getattr(File, 'range_y_min') + ranges[1][1] = getattr(File, 'range_y_max') + ranges[2][0] = getattr(File, 'range_z_min') + ranges[2][1] = getattr(File, 'range_z_max') + catalog.boxLen[0] = ranges[0][1] - ranges[0][0] + catalog.boxLen[1] = ranges[1][1] - ranges[1][0] + catalog.boxLen[2] = ranges[2][1] - ranges[2][0] + File.close() + + print "Loading all particles..." + partData, boxLen, volNorm, isObservationData = loadPart(sampleDir) + numPartTot = len(partData) + catalog.numPartTot = numPartTot + catalog.part = [] + for i in xrange(len(partData)): + catalog.part.append(Bunch(x = partData[i][0], + y = partData[i][1], + z = partData[i][2], + volume = 0, + ra = 0, + dec = 0, + redshift = 0, + uniqueID = 0)) + + + print "Loading volumes..." + volFile = sampleDir+"/vol_"+sample.fullName+".dat" + File = file(volFile) + chk = np.fromfile(File, dtype=np.int32,count=1) + vols = np.fromfile(File, dtype=np.float32,count=numPartTot) + for ivol in xrange(len(vols)): + catalog.part[ivol].volume = vols[ivol] / volNorm + + print "Loading voids..." + fileName = sampleDir+"/voidDesc_central_"+sample.fullName+".out" + catData = np.loadtxt(fileName, comments="#", skiprows=2) + catalog.voids = [] + for line in catData: + catalog.voids.append(Bunch(iVoid = int(line[0]), + voidID = int(line[1]), + coreParticle = line[2], + coreDens = line[3], + zoneVol = line[4], + zoneNumPart = line[5], + numZones = int(line[6]), + voidVol = line[7], + numPart = int(line[8]), + densCon = line[9], + voidProp = line[10], + radius = pow(line[7]/volNorm*3./4./np.pi, 1./3.), + barycenter = np.zeros((3)))) + + print "Read %d voids" % len(catalog.voids) + + print "Loading barycenters..." + iLine = 0 + for line in open(sampleDir+"/barycenters_central_"+sample.fullName+".out"): + line = line.split() + catalog.voids[iLine].barycenter[0] = float(line[1]) + catalog.voids[iLine].barycenter[1] = float(line[2]) + catalog.voids[iLine].barycenter[2] = float(line[3]) + iLine += 1 + + + print "Loading zone-void membership info..." + zoneFile = sampleDir+"/voidZone_"+sample.fullName+".dat" + catalog.void2Zones = [] + File = file(zoneFile) + numZonesTot = np.fromfile(File, dtype=np.int32,count=1) + catalog.numZonesTot = numZonesTot + for iZ in xrange(numZonesTot): + numZones = np.fromfile(File, dtype=np.int32,count=1) + catalog.void2Zones.append(Bunch(numZones = numZones, + zoneIDs = [])) + + for p in xrange(numZones): + zoneID = np.fromfile(File, dtype=np.int32,count=1) + catalog.void2Zones[iZ].zoneIDs.append(zoneID) + + + print "Loading particle-zone membership info..." + zonePartFile = sampleDir+"/voidPart_"+sample.fullName+".dat" + catalog.zones2Parts = [] + File = file(zonePartFile) + chk = np.fromfile(File, dtype=np.int32,count=1) + numZonesTot = np.fromfile(File, dtype=np.int32,count=1) + for iZ in xrange(numZonesTot): + numPart = np.fromfile(File, dtype=np.int32,count=1) + catalog.zones2Parts.append(Bunch(numPart = numPart, + partIDs = [])) + + for p in xrange(numPart): + partID = np.fromfile(File, dtype=np.int32,count=1) + catalog.zones2Parts[iZ].partIDs.append(partID) + + return catalog + + + +# ----------------------------------------------------------------------------- +def getVoidPart(catalog, voidID): + + partOut = [] + for iZ in xrange(catalog.void2Zones[voidID].numZones): + zoneID = catalog.void2Zones[voidID].zoneIDs[iZ] + for p in xrange(catalog.zones2Parts[zoneID].numPart): + partID = catalog.zones2Parts[zoneID].partIDs[p] + partOut.append(catalog.part[partID]) + + return partOut +