From 938df0cbb177ad53ed96c3e37e0fddecf738a3ad Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 7 Mar 2013 10:43:52 -0600 Subject: [PATCH 01/39] added a temporary debug printout to check the parent/child status of accepted voids --- c_tools/mock/generateMock.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/c_tools/mock/generateMock.cpp b/c_tools/mock/generateMock.cpp index ffbe1e1..5e126d8 100644 --- a/c_tools/mock/generateMock.cpp +++ b/c_tools/mock/generateMock.cpp @@ -259,7 +259,7 @@ void selectBox(SimuData *simu, std::vector& targets, generateMock_info& ar acceptance = acceptance && (simu->Pos[j][i] > ranges[j][0]) && - (simu->Pos[j][i] < ranges[j][1]); + (simu->Pos[j][i] <= ranges[j][1]); p.Pos[j] = simu->Pos[j][i]; p.Vel[j] = (simu->Vel[j] != 0) ? simu->Vel[j][i] : 0; } From 5e36b11505a5e6108927f20f9f778c5954b635be Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 09:29:05 -0400 Subject: [PATCH 02/39] Made SHARP building optional in CosmoTool. Disabled in VIDE building makefiles --- external/external_build.cmake | 1 + 1 file changed, 1 insertion(+) diff --git a/external/external_build.cmake b/external/external_build.cmake index 401abaf..2885053 100644 --- a/external/external_build.cmake +++ b/external/external_build.cmake @@ -234,6 +234,7 @@ ExternalProject_Add(cosmotool -DGSLCBLAS_LIBRARY=${GSLCBLAS_LIBRARY} -DNETCDF_LIBRARY=${NETCDF_LIBRARY} -DNETCDFCPP_LIBRARY=${NETCDFCPP_LIBRARY} + -DENABLE_SHARP=OFF ) SET(COSMOTOOL_LIBRARY ${CMAKE_BINARY_DIR}/ext_build/cosmotool/lib/libCosmoTool.a) set(COSMOTOOL_INCLUDE_PATH ${CMAKE_BINARY_DIR}/ext_build/cosmotool/include) From 0a20f4a36a7825e4022807719d184ed634afbc85 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 09:48:21 -0400 Subject: [PATCH 03/39] Fixed compiler warnings in Zobov (hopefully fixing some potential bugs too) --- zobov/jozov.c | 54 +++++++++++++++++++++++++------------------------- zobov/voz.h | 6 ++++-- zobov/voz1b1.c | 8 ++++---- zobov/voztie.c | 6 +++--- 4 files changed, 38 insertions(+), 36 deletions(-) diff --git a/zobov/jozov.c b/zobov/jozov.c index e5ebf7f..697d039 100644 --- a/zobov/jozov.c +++ b/zobov/jozov.c @@ -85,7 +85,7 @@ int main(int argc,char **argv) { printf("Bad density threshold.\n"); exit(0); } - if (sscanf(argv[7],"%f",&mockIndex) == 0) { + if (sscanf(argv[7],"%d",&mockIndex) == 0) { printf("Bad mock galaxy index.\n"); exit(0); } @@ -108,29 +108,29 @@ int main(int argc,char **argv) { p = (PARTICLE *)malloc(np*sizeof(PARTICLE)); /* Adjacencies*/ for (i=0;i 0) - p[i].adj = (int *)malloc(p[i].nadj*sizeof(int)); + p[i].adj = (pid_t *)malloc(p[i].nadj*sizeof(pid_t)); else p[i].adj = 0; p[i].ncnt = 0; /* Temporarily, it's an adj counter */ } for (i=0;i 0) for (k=0;k 0) - fwrite(adjs[i].adj,adjs[i].nadj,sizeof(int),out); + fwrite(adjs[i].adj,adjs[i].nadj,sizeof(pid_t),out); else printf("0"); } fclose(out); diff --git a/zobov/voztie.c b/zobov/voztie.c index cc8c006..b629e81 100644 --- a/zobov/voztie.c +++ b/zobov/voztie.c @@ -18,6 +18,8 @@ int main(int argc, char *argv[]) { int nvp,npnotdone,nvpmax, nvpsum, *orig; double avgnadj, avgvol; + int numRemoved = 0; + // PMS int mockIndex; // END PMS @@ -157,8 +159,6 @@ int main(int argc, char *argv[]) { adjs[i].nadj = 0; } - int numRemoved = 0; - // unlink particles adjacent to mock galaxies for (i = 0; i < mockIndex; i++) { for (j = 0; j < adjs[i].nadj; j++) { @@ -217,7 +217,7 @@ printf("\n"); avgvol += (double)(vols[p]); } if (npnotdone > 0) - printf("%d particles not done!\n"); + printf("%d particles not done!\n", npnotdone); printf("%d particles done more than once.\n",nvpsum-np); avgnadj /= (double)np; avgvol /= (double)np; From 17546f2a1ea7c96f3becd85bbef06ee4adcca40b Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 10:00:25 -0400 Subject: [PATCH 04/39] Reenforce periodic boundaries (to deal with round-off error in float) --- c_tools/mock/loaders/sdf_loader.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/c_tools/mock/loaders/sdf_loader.cpp b/c_tools/mock/loaders/sdf_loader.cpp index b716536..cab171d 100644 --- a/c_tools/mock/loaders/sdf_loader.cpp +++ b/c_tools/mock/loaders/sdf_loader.cpp @@ -190,7 +190,7 @@ public: if (load_flags & (NEED_POSITION | NEED_VELOCITY)) rescaleParticles(d, d->Hubble*1e-5, one_kpc/one_Gyr); -// enforceBoxSize(d); + enforceBoxSize(d); applyTransformations(d); basicPreprocessing(d, preproc); From 61b016c72fc6f677d5b5c99f81f23029177ad0ae Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 10:07:15 -0400 Subject: [PATCH 05/39] Use more of the new type 'pid_t' to specify indexes that needs to handle particle indexes/ids --- zobov/voz1b1.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/zobov/voz1b1.c b/zobov/voz1b1.c index 17b77ad..93fda36 100644 --- a/zobov/voz1b1.c +++ b/zobov/voz1b1.c @@ -12,7 +12,7 @@ int posread(char *posfile, float ***p, float fact); int main(int argc, char *argv[]) { int exitcode; - int i, j, np; + pid_t i, j, np; float **r; coordT rtemp[3], *parts; coordT deladjs[3*MAXVERVER], points[3*MAXVERVER]; @@ -26,7 +26,8 @@ int main(int argc, char *argv[]) { int isitinbuf; char isitinmain, d; - int numdiv, nvp, nvpall, nvpbuf; + int numdiv; + pid_t nvp, nvpall, nvpbuf; float width, width2, totwidth, totwidth2, bf, s, g; float border, boxsize; float c[3]; From bfe2475b516aba51950caf22aa422465f68fb4d8 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 10:17:27 -0400 Subject: [PATCH 06/39] Use negated isitmain to be sure the selection is done the same way (float comparison). More pid_t --- zobov/voz1b1.c | 9 +++++---- zobov/voztie.c | 16 ++++++++-------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/zobov/voz1b1.c b/zobov/voz1b1.c index 93fda36..5f321a8 100644 --- a/zobov/voz1b1.c +++ b/zobov/voz1b1.c @@ -22,7 +22,7 @@ int main(int argc, char *argv[]) { PARTADJ *adjs; float *vols; float predict, xmin,xmax,ymin,ymax,zmin,zmax; - int *orig; + pid_t *orig; int isitinbuf; char isitinmain, d; @@ -183,15 +183,16 @@ int main(int argc, char *argv[]) { nvpbuf = nvp; for (i=0; i 0.5) rtemp[d] --; if (rtemp[d] < -0.5) rtemp[d] ++; isitinbuf = isitinbuf && (fabs(rtemp[d]) 0) && - ((fabs(rtemp[0])>width2)||(fabs(rtemp[1])>width2)||(fabs(rtemp[2])>width2))) { - + if (isitinbuf && !isitinmain) { /*printf("%3.3f ",sqrt(rtemp[0]*rtemp[0] + rtemp[1]*rtemp[1] + rtemp[2]*rtemp[2])); printf("|%2.2f,%2.2f,%2.2f,%f,%f",r[i][0],r[i][1],r[i][2],width2,totwidth2);*/ diff --git a/zobov/voztie.c b/zobov/voztie.c index b629e81..8e40aa3 100644 --- a/zobov/voztie.c +++ b/zobov/voztie.c @@ -14,8 +14,8 @@ int main(int argc, char *argv[]) { int numdiv,np,np2,na; - int i,j,k,p,nout; - int nvp,npnotdone,nvpmax, nvpsum, *orig; + pid_t i,j,k,p,nout; + pid_t nvp,npnotdone,nvpmax, nvpsum, *orig; double avgnadj, avgvol; int numRemoved = 0; @@ -89,7 +89,7 @@ int main(int argc, char *argv[]) { if (adjs == NULL) printf("Couldn't allocate adjs.\n"); vols = (float *)malloc(np*sizeof(float)); if (vols == NULL) printf("Couldn't allocate vols.\n"); - orig = (int *)malloc(nvpmax*sizeof(int)); + orig = (pid_t *)malloc(nvpmax*sizeof(pid_t)); if (orig == NULL) printf("Couldn't allocate orig.\n"); if ((cnt_adj==NULL) || (vols == NULL) || (orig == NULL) || (adjs == NULL)) { printf("Not enough memory to allocate. Exiting.\n"); @@ -113,7 +113,7 @@ int main(int argc, char *argv[]) { nvpsum += nvp; - fread(orig,nvp,sizeof(int),part); + fread(orig,nvp,sizeof(pid_t),part); for (p=0;p -1.) @@ -133,12 +133,12 @@ int main(int argc, char *argv[]) { fread(&na,1,sizeof(int),part); if (na > 0) { adjs[orig[p]].nadj = na; - adjs[orig[p]].adj = (int *)malloc(na*sizeof(int)); + adjs[orig[p]].adj = (pid_t *)malloc(na*sizeof(pid_t)); if (adjs[orig[p]].adj == NULL) { printf("Couldn't allocate adjs[orig[%d]].adj.\n",p); exit(0); } - fread(adjs[orig[p]].adj,na,sizeof(int),part); + fread(adjs[orig[p]].adj,na,sizeof(pid_t),part); } else { printf("0"); fflush(stdout); } @@ -272,9 +272,9 @@ printf("\n"); for (j=0;j i) nout++; fwrite(&nout,1,sizeof(int),adj); for (j=0;j i) - fwrite(&(adjs[i].adj[j]),1,sizeof(int),adj); + fwrite(&id,1,sizeof(pid_t),adj); } } From a0d5b171647d4e373fe420674550c4ff19988bf8 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 10:45:33 -0400 Subject: [PATCH 07/39] Have an output that is more informative. --- c_tools/mock/generateMock.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/c_tools/mock/generateMock.cpp b/c_tools/mock/generateMock.cpp index 628dca1..cd92cce 100644 --- a/c_tools/mock/generateMock.cpp +++ b/c_tools/mock/generateMock.cpp @@ -274,7 +274,7 @@ void selectBox(SimuData *simu, std::vector& targets, generateMock_info& ar numAccepted++; } } - cout << "Num accepted here = " << numAccepted << " / input = " << simu->NumPart << endl; + cout << "SELECTBOX: Num accepted here = " << numAccepted << " / input = " << simu->NumPart << " (after resubsampling)" << endl; } class PreselectParticles: public SimulationPreprocessor From e63a9f5021065d9c9785ace0b50bbbaa8d465561 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 10:47:46 -0400 Subject: [PATCH 08/39] More pid_t --- zobov/voztie.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zobov/voztie.c b/zobov/voztie.c index 8e40aa3..5130449 100644 --- a/zobov/voztie.c +++ b/zobov/voztie.c @@ -107,8 +107,8 @@ int main(int argc, char *argv[]) { printf("Unable to open file %s.\n\n",partfile); exit(0); } - fread(&np2,1,sizeof(int),part); - fread(&nvp,1,sizeof(int),part); + fread(&np2,1,sizeof(pid_t),part); + fread(&nvp,1,sizeof(pid_t),part); /*printf("nvp = %d\n",nvp);fflush(stdout);*/ nvpsum += nvp; From dcce9c115616eb01d2f3b542e32e8149d5c70c11 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 16:20:19 +0100 Subject: [PATCH 09/39] Reset nvpsum --- zobov/voztie.c | 1 + 1 file changed, 1 insertion(+) diff --git a/zobov/voztie.c b/zobov/voztie.c index 8e40aa3..7c334a3 100644 --- a/zobov/voztie.c +++ b/zobov/voztie.c @@ -98,6 +98,7 @@ int main(int argc, char *argv[]) { for (p=0;p Date: Tue, 19 Mar 2013 20:35:42 +0100 Subject: [PATCH 10/39] Added explanatory exception instead of a cryptic error for sample.dataFormat --- python_tools/void_python_tools/backend/launchers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index 1a0fb20..e056d84 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -162,6 +162,8 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, dataFileLine = "gadget " + datafile elif sample.dataFormat == "sdf": dataFileLine = "sdf " + datafile + else: + raise ValueError("unknown dataFormat '%s'" % sample.dataFormat) iX = float(sample.mySubvolume[0]) iY = float(sample.mySubvolume[1]) From 189fa22c6bd968f791d0eaa40ab8192d0c94340f Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 19 Mar 2013 16:36:26 -0400 Subject: [PATCH 11/39] Generate a graceful error message instead of a Segmentation Fault --- c_tools/mock/generateMock.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/c_tools/mock/generateMock.cpp b/c_tools/mock/generateMock.cpp index cd92cce..5bb1a50 100644 --- a/c_tools/mock/generateMock.cpp +++ b/c_tools/mock/generateMock.cpp @@ -395,6 +395,11 @@ void saveBox(SimuData *&boxed, const std::string& outbox) int num_snapshots = *boxed->as("num_snapshots"); long *uniqueID = boxed->as("uniqueID"); + if (!f.is_valid()) + { + cerr << "Could not create parameter file '" << outbox << "'. Aborting." << endl; + exit(1); + } f.add_att("range_x_min", ranges[0]); f.add_att("range_x_max", ranges[1]); f.add_att("range_y_min", ranges[2]); From 5cbf68e0726bdfb117ba12f109765dbaed9af3f6 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Wed, 20 Mar 2013 04:55:59 -0400 Subject: [PATCH 12/39] Added a check that the subsampling value is the adequate one. --- c_tools/mock/generateMock.cpp | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/c_tools/mock/generateMock.cpp b/c_tools/mock/generateMock.cpp index 5bb1a50..bfd2b94 100644 --- a/c_tools/mock/generateMock.cpp +++ b/c_tools/mock/generateMock.cpp @@ -385,7 +385,7 @@ void buildBox(SimuData *simu, long num_targets, long loaded, } } -void saveBox(SimuData *&boxed, const std::string& outbox) +void saveBox(SimuData *&boxed, const std::string& outbox, generateMock_info& args_info) { double *ranges = boxed->as("ranges"); NcFile f(outbox.c_str(), NcFile::Replace, 0, 0, NcFile::Netcdf4); @@ -408,6 +408,7 @@ void saveBox(SimuData *&boxed, const std::string& outbox) f.add_att("range_z_max", ranges[5]); f.add_att("mask_index", -1); f.add_att("is_observation", 0); + f.add_att("data_subsampling", args_info.subsample_arg); NcDim *NumPart_dim = f.add_dim("numpart_dim", boxed->NumPart); NcDim *NumSnap_dim = f.add_dim("numsnap_dim", num_snapshots); @@ -451,6 +452,13 @@ void makeBoxFromParameter(SimuData *simu, SimuData* &boxed, generateMock_info& a boxed->time = simu->time; boxed->BoxSize = simu->BoxSize; + NcAtt *d_sub = f.get_att("data_subsampling"); + if (d_sub == 0 || d_sub->as_double(0) != args_info.subsample_arg) + { + cerr << "Parameter file was not generated with the same simulation subsampling argument. Particles will be different. Stop here." << endl; + exit(1); + } + NcVar *v_id = f.get_var("particle_ids"); NcVar *v_snap = f.get_var("snapshot_split"); long *edges1; @@ -715,7 +723,7 @@ int main(int argc, char **argv) delete[] efac; } - saveBox(simuOut, args_info.outputParameter_arg); + saveBox(simuOut, args_info.outputParameter_arg, args_info); generateOutput(simuOut, args_info.axis_arg, args_info.output_arg); delete preselector; From c6a9a1c875af2d0594420f73d4ef6b49308f599a Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Wed, 20 Mar 2013 07:18:15 -0500 Subject: [PATCH 13/39] reconfigure multidark loader to work with sdf rescaling and shifts --- c_tools/mock/loaders/multidark_loader.cpp | 29 ++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/c_tools/mock/loaders/multidark_loader.cpp b/c_tools/mock/loaders/multidark_loader.cpp index bd43748..fd944fa 100644 --- a/c_tools/mock/loaders/multidark_loader.cpp +++ b/c_tools/mock/loaders/multidark_loader.cpp @@ -20,13 +20,16 @@ #include +#include #include #include #include #include #include #include "simulation_loader.hpp" +#include +using boost::format; using namespace std; using namespace CosmoTool; @@ -82,6 +85,12 @@ public: double tempData; + double shift = 0.5*simu->BoxSize; + double rescale_position = simu->Hubble*1.e-5*100./simu->time; + double rescale_velocity = one_kpc/one_Gyr; +#define INFINITY std::numeric_limits::max() + float min_pos[3] = {INFINITY,INFINITY, INFINITY}, max_pos[3] = {-INFINITY,-INFINITY,-INFINITY}; + cout << "loading multidark particles" << endl; long actualNumPart = 0; @@ -96,6 +105,17 @@ public: p.Pos[2] == -99 && p.Vel[2] == -99) break; + p.Pos[0] = p.Pos[0]*rescale_position + shift; + p.Pos[1] = p.Pos[1]*rescale_position + shift; + p.Pos[2] = p.Pos[2]*rescale_position + shift; + p.Vel[2] = p.Vel[2]*rescale_velocity; + + // enforce box size in case of roundoff error + for (int k = 0; k < 3; k++) { + //if (p.Pos[k] < 0) p.Pos[k] += simu->BoxSize; + //if (p.Pos[k] >= simu->BoxSize) p.Pos[k] -= simu->BoxSize; + } + if (preproc != 0 && !preproc->accept(p)) continue; @@ -110,7 +130,14 @@ public: reallocArray(uniqueID, allocated, actualNumPart); reallocArray(index, allocated, actualNumPart); } - } + + for (int k = 0; k < 3; k++) { + min_pos[k] = std::min(min_pos[k], p.Pos[k]); + max_pos[k] = std::max(max_pos[k], p.Pos[k]); + } + } + for (int k = 0; k < 3; k++) cout << boost::format("min[%d] = %g, max[%d] = %g") % k % min_pos[k] % k %max_pos[k] << endl; + applyTransformations(simu); simu->NumPart = actualNumPart; simu->TotalNumPart = actualNumPart; From 7c16c9477f0c4bd5965c41b9fdeea7c2898ba73e Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Wed, 20 Mar 2013 07:19:25 -0500 Subject: [PATCH 14/39] more correct handling of multiple subsample levels --- python_tools/void_python_tools/backend/launchers.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index 1a0fb20..69e5973 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -132,6 +132,7 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, return prevSubSample = -1 + prevKeepFraction = -1 for thisSubSample in sample.subsample.split(', '): if prevSubSample == -1: @@ -147,7 +148,7 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, outputFile = zobovDir+"/zobov_slice_" + sampleName + "_ss" + \ thisSubSample keepFraction = float(thisSubSample)/float(prevSubSample) - subSampleLine = "subsample %g" % keepFraction + subSampleLine = "subsample %s" % prevSubSample resubSampleLine = "resubsample %g" % keepFraction includePecVelString = "" @@ -207,9 +208,10 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, os.system(cmd) # remove intermediate files - if (prevSubSample != -1): - os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample+".par") - os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample) +# TEMP + #if (prevSubSample != -1): + # os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample+".par") + # os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample) doneLine = "Done! %5.2e\n" % keepFraction if not jobSuccessful(logFile, doneLine): @@ -217,6 +219,7 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, exit(-1) prevSubSample = thisSubSample + prevKeepFraction = keepFraction if jobSuccessful(logFile, doneLine): print "done" From 25a6ead2590cab96e48ef011759fb035239ee80c Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Wed, 20 Mar 2013 07:21:02 -0500 Subject: [PATCH 15/39] can now do subsampling of sdf in prepareCatalogs --- .../pipeline_source/prepareCatalogs.in.py | 110 +++++++++++++----- 1 file changed, 84 insertions(+), 26 deletions(-) diff --git a/python_tools/pipeline_source/prepareCatalogs.in.py b/python_tools/pipeline_source/prepareCatalogs.in.py index 372c002..8b66fcc 100644 --- a/python_tools/pipeline_source/prepareCatalogs.in.py +++ b/python_tools/pipeline_source/prepareCatalogs.in.py @@ -285,24 +285,39 @@ for iSubSample in xrange(len(subSamples)): scriptDir, catalogDir, fileNums, redshifts, numSubvolumes, numSlices, False, lbox, minRadius, omegaM, subsample=subSampleToUse, suffix=suffix) - writeScript(setName, fileToUse, dataFormat, - scriptDir, catalogDir, fileNums, redshifts, - numSubvolumes, numSlices, True, lbox, minRadius, omegaM, - subsample=subSampleToUse, suffix=suffix) + if doPecVel: + writeScript(setName, fileToUse, dataFormat, + scriptDir, catalogDir, fileNums, redshifts, + numSubvolumes, numSlices, True, lbox, minRadius, omegaM, + subsample=subSampleToUse, suffix=suffix) else: - subSampleToUse = keepFractionList - suffix = "" - fileToUse = partFileList[0] - writeScript(setName, fileToUse, dataFormat, + if doSubSampling: + # prepare scripts using our own format + dataFormatToUse = "multidark" + subSampleToUse = 1.0 + fileToUse = prefix+"ss"+str(thisSubSample)+"_z" + partFileList = [] + suffix = ".dat" + for (iRedshift, redshift) in enumerate(redshifts): + sampleName = getSampleName(setName, redshift, False) + outFileName = sampleName+".dat" + partFileList.append(outFileName) + else: + dataFormatToUse = dataFormat + subSampleToUse = keepFractionList + suffix = "" + fileToUse = partFileList[0] + writeScript(setName, fileToUse, dataFormatToUse, scriptDir, catalogDir, fileNums, redshifts, numSubvolumes, numSlices, False, lbox, minRadius, omegaM, subsample=subSampleToUse, suffix=suffix, dataFileNameList=partFileList) - writeScript(setName, fileToUse, dataFormat, - scriptDir, catalogDir, fileNums, redshifts, - numSubvolumes, numSlices, True, lbox, minRadius, omegaM, - subsample=subSampleToUse, suffix=suffix, - dataFileNameList=partFileList) + if doPecVel: + writeScript(setName, fileToUse, dataFormatToUse, + scriptDir, catalogDir, fileNums, redshifts, + numSubvolumes, numSlices, True, lbox, minRadius, omegaM, + subsample=subSampleToUse, suffix=suffix, + dataFileNameList=partFileList) if args.subsample or args.all: @@ -313,17 +328,31 @@ for iSubSample in xrange(len(subSamples)): print " redshift", redshift sys.stdout.flush() - if dataFormat == "multidark": + if dataFormat == "multidark" or dataFormat == "sdf": # reuse previous subamples in order to: # - preserve unique IDs across multiple subsamples # - reuse smaller files for faster processing if prevSubSample == -1: - dataFile = catalogDir+"/"+particleFileBase+fileNums[iRedshift] + if particleFileDummy == '': + dataFile = catalogDir+"/"+particleFileBase+fileNums[iRedshift] + else: + dataFile = particleFileBase.replace(particleFileDummy, + fileNums[iRedshift]) + dataFile = catalogDir+"/"+dataFile + keepFraction = float(thisSubSample) / baseResolution else: sampleName = prefix+"ss"+str(prevSubSample)+"_z"+redshift dataFile = catalogDir+"/"+sampleName+".dat" keepFraction = float(thisSubSample) / float(prevSubSample) + if prevSubSample == -1 and dataFormat == "sdf": + convertedFile = dataFile + "_temp" + SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64" + command = "%s %s x y z vz vy vx > %s" % (SDFcvt_PATH, dataFile, + convertedFile ) + os.system(command) + dataFile = convertedFile + inFile = open(dataFile, 'r') sampleName = prefix+"ss"+str(thisSubSample)+"_z"+redshift @@ -335,6 +364,11 @@ for iSubSample in xrange(len(subSamples)): outFile.write("%s\n" %(redshift)) outFile.write("%d\n" %(maxKeep)) + if dataFormat == "sdf": + splitter = ' ' + if dataFormat == "multidark": + splitter = ',' + numKept = 0 for (i,line) in enumerate(inFile): if (prevSubSample != -1 and i < 5): continue # skip header @@ -344,7 +378,7 @@ for iSubSample in xrange(len(subSamples)): #if numKept > maxKeep: break if (prevSubSample == -1): - line = line.split(',') + line = line.split(splitter) x = float(line[0]) y = float(line[1]) z = float(line[2]) @@ -360,6 +394,9 @@ for iSubSample in xrange(len(subSamples)): inFile.close() outFile.close() + if prevSubSample == -1 and dataFormat == "sdf": + os.unlink(dataFile) + elif dataFormat == "random": sampleName = "ran.ss"+str(thisSubSample)+"_z"+redshift outFile = open(catalogDir+"/"+sampleName+".dat", 'w') @@ -409,14 +446,25 @@ if (args.script or args.all) and haloFileBase != "": minRadies = 10 setName = prefix+"halos_min"+str(minHaloMass) + fileList = [] + for (iRedshift, redshift) in enumerate(redshifts): + sampleName = getSampleName(setName, redshift, False) + outFileName = sampleName+".dat" + fileList.append(outFileName) + writeScript(setName, prefix+"halos_min"+str(minHaloMass)+"_z", "multidark", scriptDir, catalogDir, fileNums, redshifts, - numSubvolumes, numSlices, False, lbox, minRadius, omegaM) - writeScript(setName, prefix+"halos_min"+str(minHaloMass)+"_z", "multidark", - scriptDir, catalogDir, fileNums, - redshifts, - numSubvolumes, numSlices, True, lbox, minRadius, omegaM) + numSubvolumes, numSlices, False, lbox, minRadius, omegaM, + dataFileNameList = fileList) + + if doPecVel: + writeScript(setName, prefix+"halos_min"+str(minHaloMass)+"_z", + "multidark", + scriptDir, catalogDir, fileNums, + redshifts, + numSubvolumes, numSlices, True, lbox, minRadius, omegaM, + dataFileNameList = fileList) if (args.halos or args.all) and haloFileBase != "": print " Doing halos" @@ -451,7 +499,7 @@ if (args.halos or args.all) and haloFileBase != "": numPart += 1 inFile.close() - sampleName = prefix+"halos_min"+str(minHaloMass)+"_z"+fileNums[iRedshift] + sampleName = prefix+"halos_min"+str(minHaloMass)+"_z"+redshifts[iRedshift] outFileName = catalogDir+"/"+sampleName+".dat" outFile = open(outFileName, 'w') outFile.write("%f\n" %(lbox)) @@ -536,15 +584,25 @@ root_filename {workDir}/hod if (args.script or args.all) and haloFileBase != "": print " Doing HOD scripts" sys.stdout.flush() + for thisHod in hodParmList: + fileList = [] + for (iRedshift, redshift) in enumerate(redshifts): + sampleName = getSampleName(prefix+"hod_"+thisHod['name'], redshift, False) + outFileName = sampleName+".dat" + fileList.append(outFileName) + print " ", thisHod['name'] setName = prefix+"hod_"+thisHod['name'] writeScript(setName, prefix+"hod_"+thisHod['name']+"_z", "multidark", scriptDir, catalogDir, fileNums, redshifts, - numSubvolumes, numSlices, False, lbox, 15, omegaM) - writeScript(setName, prefix+"hod_"+thisHod['name']+"_z", "multidark", - scriptDir, catalogDir, fileNums, redshifts, - numSubvolumes, numSlices, True, lbox, 15, omegaM) + numSubvolumes, numSlices, False, lbox, 15, omegaM, + dataFileNameList = fileList) + if doPecVel: + writeScript(setName, prefix+"hod_"+thisHod['name']+"_z", "multidark", + scriptDir, catalogDir, fileNums, redshifts, + numSubvolumes, numSlices, True, lbox, 15, omegaM, + dataFileNameList = fileList) if (args.hod or args.all) and haloFileBase != "": print " Doing HOD" From d6344fae43c326880e58732b7b9ed032d1d34698 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Wed, 20 Mar 2013 07:22:39 -0500 Subject: [PATCH 16/39] two new parameters in prepareCatalogs: doPecVel and doSubSampling From 1e5e6ca9d9e7bb81c66c11b322e33a2a484933ed Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Thu, 21 Mar 2013 12:47:42 +0100 Subject: [PATCH 17/39] Use triangulated instead of joggled input to correct for the degenerated facets (fix problems with seemingly large volumes) --- zobov/vozutil.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zobov/vozutil.c b/zobov/vozutil.c index aba81e2..c56da6f 100644 --- a/zobov/vozutil.c +++ b/zobov/vozutil.c @@ -46,7 +46,7 @@ int delaunadj (coordT *points, int nvp, int nvpbuf, int nvpall, PARTADJ **adjs) } /* Delaunay triangulation*/ - sprintf (flags, "qhull s d QJ"); + sprintf (flags, "qhull s d Qt"); exitcode= qh_new_qhull (dim, nvpall, points, ismalloc, flags, outfile, errfile); From 8f24cb3aab48b736998cd54a269f695f9a775e27 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 21 Mar 2013 21:49:15 -0500 Subject: [PATCH 18/39] some extra tools From a30f19f0b09c5ce48a21b57b6d9e18ae7485726b Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 21 Mar 2013 21:50:24 -0500 Subject: [PATCH 19/39] added ability to pass a list of acceptable void IDs to stackVoidsZero From b1dc0abc0c8fae1aeb3f875595bfc4e56131c341 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 21 Mar 2013 21:51:29 -0500 Subject: [PATCH 20/39] can now do sdf subsampling (optionally) within prepareCatalogs --- .../pipeline_source/prepareCatalogs.in.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/python_tools/pipeline_source/prepareCatalogs.in.py b/python_tools/pipeline_source/prepareCatalogs.in.py index 8b66fcc..839a544 100644 --- a/python_tools/pipeline_source/prepareCatalogs.in.py +++ b/python_tools/pipeline_source/prepareCatalogs.in.py @@ -348,8 +348,23 @@ for iSubSample in xrange(len(subSamples)): if prevSubSample == -1 and dataFormat == "sdf": convertedFile = dataFile + "_temp" SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64" - command = "%s %s x y z vz vy vx > %s" % (SDFcvt_PATH, dataFile, - convertedFile ) + scale = 1./(1.+float(redshift)) + rescale_position = hubble/1000./scale + shift = lbox/2. + rescale_velocity = 3.08567802e16/3.1558149984e16 + command = "%s %s x y z vz vy vx | awk '{print $1*%g+%g, $2*%g+%g, $3*%g+%g, $4*%g, $5*%g, $6*%g}' > %s" % (SDFcvt_PATH, dataFile, + rescale_position, + shift, + rescale_position, + shift, + rescale_position, + shift, + rescale_velocity, + rescale_velocity, + rescale_velocity, + convertedFile ) + os.system(command) + os.system(command) dataFile = convertedFile From 04eb2a248399021832c9348c57fafc8f678407b3 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 21 Mar 2013 21:52:34 -0500 Subject: [PATCH 21/39] top-level script to build sky projections; should probably move this into generateCatalog From cb2b0de143e75fa897119e4ec6275b4c86e7905e Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 21 Mar 2013 21:54:02 -0500 Subject: [PATCH 22/39] reset to original multidark loading --- c_tools/mock/generateMock.cpp | 4 ++-- c_tools/mock/loaders/multidark_loader.cpp | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/c_tools/mock/generateMock.cpp b/c_tools/mock/generateMock.cpp index 9a72aa5..ff84ace 100644 --- a/c_tools/mock/generateMock.cpp +++ b/c_tools/mock/generateMock.cpp @@ -453,9 +453,9 @@ void makeBoxFromParameter(SimuData *simu, SimuData* &boxed, generateMock_info& a boxed->BoxSize = simu->BoxSize; NcAtt *d_sub = f.get_att("data_subsampling"); - if (d_sub == 0 || d_sub->as_double(0) != args_info.subsample_arg) + if (d_sub == 0 || d_sub->as_double(0)/args_info.subsample_arg-1. > 1.e-5) { - cerr << "Parameter file was not generated with the same simulation subsampling argument. Particles will be different. Stop here." << endl; + cerr << "Parameter file was not generated with the same simulation subsampling argument. Particles will be different. Stop here." << d_sub->as_double(0) << " " << args_info.subsample_arg <BoxSize; - //if (p.Pos[k] >= simu->BoxSize) p.Pos[k] -= simu->BoxSize; + if (p.Pos[k] < 0) p.Pos[k] += simu->BoxSize; + if (p.Pos[k] >= simu->BoxSize) p.Pos[k] -= simu->BoxSize; } if (preproc != 0 && !preproc->accept(p)) From 60965e1fa4e3d370189a31929a8086372fb3d9a2 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 21 Mar 2013 21:54:56 -0500 Subject: [PATCH 23/39] multiple levels of subsampling now correctly handled --- python_tools/void_python_tools/backend/launchers.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index 4f834ca..b84f053 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -132,7 +132,7 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, return prevSubSample = -1 - prevKeepFraction = -1 + firstSubSample = -1 for thisSubSample in sample.subsample.split(', '): if prevSubSample == -1: @@ -142,13 +142,14 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, keepFraction = float(thisSubSample) subSampleLine = "subsample %g" % keepFraction resubSampleLine = "" + firstSubSample = keepFraction else: inputParameterFlag = "inputParameter " + zobovDir+"/zobov_slice_"+\ sampleName+"_ss"+prevSubSample+".par" outputFile = zobovDir+"/zobov_slice_" + sampleName + "_ss" + \ thisSubSample keepFraction = float(thisSubSample)/float(prevSubSample) - subSampleLine = "subsample %s" % prevSubSample + subSampleLine = "subsample %s" % firstSubSample resubSampleLine = "resubsample %g" % keepFraction includePecVelString = "" @@ -210,10 +211,9 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, os.system(cmd) # remove intermediate files -# TEMP - #if (prevSubSample != -1): - # os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample+".par") - # os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample) + if (prevSubSample != -1): + os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample+".par") + os.unlink(zobovDir+"/zobov_slice_"+sampleName+"_ss"+prevSubSample) doneLine = "Done! %5.2e\n" % keepFraction if not jobSuccessful(logFile, doneLine): @@ -221,7 +221,6 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None, exit(-1) prevSubSample = thisSubSample - prevKeepFraction = keepFraction if jobSuccessful(logFile, doneLine): print "done" From 37f8add9378b35fd73023e6086161f8f59ba3d77 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 21 Mar 2013 21:58:08 -0500 Subject: [PATCH 24/39] fixed output of subsampling fraction --- pipeline/generateCatalog.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pipeline/generateCatalog.py b/pipeline/generateCatalog.py index 0321fa6..a786b99 100755 --- a/pipeline/generateCatalog.py +++ b/pipeline/generateCatalog.py @@ -88,7 +88,10 @@ for sample in dataSampleList: if (sample.dataType == "simulation"): fp.write("Particles placed on lightcone: %g\n" % sample.useLightCone) fp.write("Peculiar velocities included: %g\n" % sample.usePecVel) - fp.write("Additional subsampling fraction: %s\n" % sample.subsample[-1]) + if (len(sample.subsample) == 1): + fp.write("Additional subsampling fraction: %s\n" % sample.subsample) + else: + fp.write("Additional subsampling fraction: %s\n" % sample.subsample[-1]) fp.write("Simulation box length (Mpc/h): %g\n" % sample.boxLen) fp.write("Simulation Omega_M: %g\n" % sample.omegaM) fp.write("Number of simulation subvolumes: %s\n" % sample.numSubvolumes) From 02f756e8a5bc644fc5d059fdde3cc04a47c9e265 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Fri, 22 Mar 2013 08:51:45 -0500 Subject: [PATCH 25/39] integrated nico's cross-correlation backend and enabled plotting of the void distribution --- pipeline/generateCatalog.py | 2 + python_tools/setup.py | 2 +- python_tools/void_python_tools/__init__.py | 5 +- .../void_python_tools/plotting/plotTools.py | 50 ++++++++++++- .../void_python_tools/xcor/__init__.py | 20 +++++ .../void_python_tools/xcor/xcorlib.py | 73 +++++++++++++++++++ 6 files changed, 147 insertions(+), 5 deletions(-) create mode 100644 python_tools/void_python_tools/xcor/__init__.py create mode 100644 python_tools/void_python_tools/xcor/xcorlib.py diff --git a/pipeline/generateCatalog.py b/pipeline/generateCatalog.py index a786b99..5ef2771 100755 --- a/pipeline/generateCatalog.py +++ b/pipeline/generateCatalog.py @@ -150,5 +150,7 @@ if (startCatalogStage <= 4) and (endCatalogStage >= 4): dataPortion=thisDataPortion, setName=setName) plotNumberDistribution(workDir, dataSampleList, figDir, showPlot=False, dataPortion=thisDataPortion, setName=setName) + plotVoidDistribution(workDir, dataSampleList, figDir, showPlot=False, + dataPortion=thisDataPortion, setName=setName) print "\n Done!" diff --git a/python_tools/setup.py b/python_tools/setup.py index b0bc165..ad0518b 100644 --- a/python_tools/setup.py +++ b/python_tools/setup.py @@ -31,7 +31,7 @@ setup( cmdclass = {'build_ext': build_ext}, include_dirs = [np.get_include()], packages= - ['void_python_tools','void_python_tools.backend','void_python_tools.apTools', + ['void_python_tools','void_python_tools.backend','void_python_tools.apTools', 'void_python_tools.xcor', 'void_python_tools.apTools.profiles','void_python_tools.apTools.chi2', 'void_python_tools.plotting'], #ext_modules = [Extension("void_python_tools.chi2.velocityProfileFitNative", ["void_python_tools/chi2/velocityProfileFitNative.pyx"], libraries=["gsl", "gslcblas"]), Extension("void_python_tools.chi2.likelihoo", ["void_python_tools/chi2/likelihood.pyx"], libraries=["gsl", "gslcblas"])] ext_modules = [ diff --git a/python_tools/void_python_tools/__init__.py b/python_tools/void_python_tools/__init__.py index 5f07ae6..a84eedd 100644 --- a/python_tools/void_python_tools/__init__.py +++ b/python_tools/void_python_tools/__init__.py @@ -17,6 +17,7 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #+ -from void_python_tools.backend import * -from void_python_tools.apTools import * +from void_python_tools.backend import * +from void_python_tools.apTools import * from void_python_tools.plotting import * +from void_python_tools.xcor import * diff --git a/python_tools/void_python_tools/plotting/plotTools.py b/python_tools/void_python_tools/plotting/plotTools.py index 5bc617c..317efef 100644 --- a/python_tools/void_python_tools/plotting/plotTools.py +++ b/python_tools/void_python_tools/plotting/plotTools.py @@ -18,7 +18,7 @@ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #+ __all__=['plotRedshiftDistribution', 'plotSizeDistribution', 'plot1dProfiles', - 'plotMarg1d', 'plotNumberDistribution'] + 'plotMarg1d', 'plotNumberDistribution', 'plotVoidDistribution'] from void_python_tools.backend.classes import * from plotDefs import * @@ -26,6 +26,7 @@ import numpy as np import os import pylab as plt import void_python_tools.apTools as vp +import void_python_tools.xcor as xcor # ----------------------------------------------------------------------------- def plotRedshiftDistribution(workDir=None, sampleList=None, figDir=None, @@ -266,7 +267,7 @@ def plotNumberDistribution(workDir=None, sampleList=None, figDir=None, boxVol *= 1.e-9 - filename = workDir+"/sample_"+sampleName+"/centers_"+dataPortion+"_"+\ + filename = workDir+"/sample_"+sampleName+"/centers_nocut_"+dataPortion+"_"+\ sampleName+".out" if not os.access(filename, os.F_OK): print "File not found: ", filename @@ -294,5 +295,50 @@ def plotNumberDistribution(workDir=None, sampleList=None, figDir=None, if showPlot: os.system("display %s" % figDir+"/fig_"+plotName+".png") +# ----------------------------------------------------------------------------- +def plotVoidDistribution(workDir=None, sampleList=None, figDir=None, + plotNameBase="dv", + showPlot=False, dataPortion=None, setName=None): + Nmesh = 256 + + for (iSample,sample) in enumerate(sampleList): + plt.clf() + plt.xlabel("x (Mpc/h)") + plt.xlabel("y (Mpc/h)") + + sampleName = sample.fullName + + plotName = plotNameBase+"_"+sampleName + + filename = workDir+"/sample_"+sampleName+"/centers_nocut_"+dataPortion+"_"+\ + sampleName+".out" + + if not os.access(filename, os.F_OK): + print "File not found: ", filename + continue + + void_file = open(filename,'r') + void_header1 = void_file.readline().split(",") + void_data1 = np.reshape(void_file.read().split(), + (-1,len(void_header1))).astype(np.float32) + void_file.close() + + xv = void_data1[:,0:3] + rv = void_data1[:,4] + vv = void_data1[:,6] + dv, wm, ws = xcor.cic(xv, sample.boxLen, Lboxcut = 0., Nmesh = Nmesh) + plt.imshow(np.sum(dv+1,2)/Nmesh,extent=[0,sample.boxLen,0,sample.boxLen], + aspect='equal', + cmap='YlGnBu_r',interpolation='gaussian') + plt.colorbar() + + plt.title("Voids: "+sampleName) + + plt.savefig(figDir+"/fig_"+plotName+".pdf", bbox_inches="tight") + plt.savefig(figDir+"/fig_"+plotName+".eps", bbox_inches="tight") + plt.savefig(figDir+"/fig_"+plotName+".png", bbox_inches="tight") + + if showPlot: + os.system("display %s" % figDir+"/fig_"+plotName+".png") diff --git a/python_tools/void_python_tools/xcor/__init__.py b/python_tools/void_python_tools/xcor/__init__.py new file mode 100644 index 0000000..26f152d --- /dev/null +++ b/python_tools/void_python_tools/xcor/__init__.py @@ -0,0 +1,20 @@ +#+ +# VIDE -- Void IDEntification pipeline -- ./python_tools/void_python_tools/plotting/__init__.py +# Copyright (C) 2010-2013 Guilhem Lavaux +# Copyright (C) 2011-2013 P. M. Sutter +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +#+ +from xcorlib import * diff --git a/python_tools/void_python_tools/xcor/xcorlib.py b/python_tools/void_python_tools/xcor/xcorlib.py new file mode 100644 index 0000000..d5b489a --- /dev/null +++ b/python_tools/void_python_tools/xcor/xcorlib.py @@ -0,0 +1,73 @@ +import numpy as np + +def cic( x, Lbox, Lboxcut = 0, Nmesh = 128, weights = None ): + + if weights == None: weights = 1 + wm = np.mean(weights) + ws = np.mean(weights**2) + + d = np.mod(x/(Lbox+2*Lboxcut)*Nmesh,1) + + box = ([Lboxcut,Lbox+Lboxcut],[Lboxcut,Lbox+Lboxcut],[Lboxcut,Lbox+Lboxcut]) + + rho = np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*(1-d[:,1])*(1-d[:,2]))[0] \ + + np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*(1-d[:,1])*(1-d[:,2]))[0],1,0) \ + + np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*d[:,1]*(1-d[:,2]))[0],1,1) \ + + np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*(1-d[:,1])*d[:,2])[0],1,2) \ + + np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*d[:,1]*(1-d[:,2]))[0],1,0),1,1) \ + + np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*(1-d[:,1])*d[:,2])[0],1,0),1,2) \ + + np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*(1-d[:,0])*d[:,1]*d[:,2])[0],1,1),1,2) \ + + np.roll(np.roll(np.roll(np.histogramdd(x, range = box, bins = Nmesh, weights = weights*d[:,0]*d[:,1]*d[:,2])[0],1,0),1,1),1,2) + + rho /= wm + + rho = rho/rho.mean() - 1. + + return (rho, wm, ws) + + +def powcor( d1, d2, Lbox, Nmesh = 128, Nbin = 100, scale = 'lin', cor = False ): + + # Fourier transform + d1 = np.fft.fftn(d1) + d2 = np.fft.fftn(d2) + + # CIC correction + wid = np.indices(np.shape(d1)) - Nmesh/2 + #wid[np.where(wid >= Nmesh/2)] -= Nmesh + wid = wid*np.pi/Nmesh + 1e-100 + wcic = np.prod(np.sin(wid)/wid,0)**2 + + # Shell average power spectrum + dk = 2*np.pi/Lbox + Pk = np.conj(d1)*d2*(Lbox/Nmesh**2)**3 + Pk = np.fft.fftshift(Pk)/wcic**2 + + (Nm, km, Pkm, SPkm) = shellavg(np.real(Pk), dk, Nmesh, Nbin = Nbin, xmin = 0., xmax = Nmesh*dk/2, scale = scale) + + # Inverse Fourier transform and shell average correlation function + if cor == True: + dx = Lbox/Nmesh + Xr = np.fft.ifftshift(np.fft.ifftn(np.fft.ifftshift(Pk)))*(Nmesh/Lbox)**3 + + (Nmx, rm, Xrm, SXrm) = shellavg(np.real(Xr), dx, Nmesh, Nbin = Nbin, xmin = dx, xmax = 140., scale = scale) + + return ((Nm, km, Pkm, SPkm),(Nmx, rm, Xrm, SXrm)) + + else: return (Nm, km, Pkm, SPkm) + + +def shellavg( f, dx, Nmesh, Nbin = 100, xmin = 0., xmax = 1., scale = 'lin' ): + + x = np.indices(np.shape(f)) - Nmesh/2 + #x[np.where(x >= Nmesh/2)] -= Nmesh + x = dx*np.sqrt(np.sum(x**2,0)) + if scale == 'lin': bins = xmin+(xmax-xmin)* (np.arange(Nbin+1)/float(Nbin)) + if scale == 'log': bins = xmin*(xmax/xmin)**(np.arange(Nbin+1)/float(Nbin)) + + Nm = np.histogram(x, bins = bins)[0] + xm = np.histogram(x, bins = bins, weights = x)[0]/Nm + fm = np.histogram(x, bins = bins, weights = f)[0]/Nm + fs = np.sqrt((np.histogram(x, bins = bins, weights = f**2)[0]/Nm - fm**2)/(Nm-1)) + + return (Nm, xm, fm, fs) \ No newline at end of file From f699ed372f6791fabf250454a45ec974774f59f8 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Fri, 22 Mar 2013 11:46:20 -0500 Subject: [PATCH 26/39] added nico's cross-correlation script to a new directory, analysis/ --- analysis/datasetsToAnalyze.py | 51 +++ analysis/xcor.py | 352 ++++++++++++++++++ pipeline/generateCatalog.py | 2 +- .../void_python_tools/plotting/plotTools.py | 2 +- 4 files changed, 405 insertions(+), 2 deletions(-) create mode 100644 analysis/datasetsToAnalyze.py create mode 100755 analysis/xcor.py diff --git a/analysis/datasetsToAnalyze.py b/analysis/datasetsToAnalyze.py new file mode 100644 index 0000000..412de5c --- /dev/null +++ b/analysis/datasetsToAnalyze.py @@ -0,0 +1,51 @@ +#+ +# VIDE -- Void IDEntification pipeline -- ./crossCompare/analysis/datasetsToAnalyze.py +# Copyright (C) 2010-2013 Guilhem Lavaux +# Copyright (C) 2011-2013 P. M. Sutter +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +#+ +#!/usr/bin/env python + + +outputDir = "/home/psutter2/workspace/Voids/analysis/xcor/" + +# Sim parameters +Ni = 1000237 # Number of dark matter particles per dimension in simulation +ss = 0.1 # Subsampling fraction of dark matter particles to read +Mpart = 8.721e9 # Particle mass [M_sol] +Npart = int(ss*Ni) # Particle number of subsample +Lboxcut = 0. # Size of optional margin to be cut from the box [h^(-1)Mpc] +Nmesh = 256 # Interpolation meshlength +Nbin = 70 # Number of bins for power spectrum and correlation function +r_H = 3000. # Hubble scale [h^(-1)Mpc] +ns = 0.95 # Spectral index +sigma_8 = 0.82 # Sigma_8 +h = 0.7 # Dimensionless Hubble parameter + +# Input files +matterDir = '/home/psutter2/workspace/Voids/catalogs/mergertree512/' +haloDir = '/home/psutter2/workspace/Voids/catalogs/mergertree512/' +matterFilename = 'mf_4s_1G_512_1.000' +haloFilename = 'mf_4s_1G_512_bgc2_1.000.sdf' +voidBaseDir = "/home/psutter2/workspace/Voids/" + + +sampleDirList = [ + "mergertree512/mt_ss0.01/sample_mt_ss0.01_z0.00_d00/", + ] + +dataPortion = "central" + diff --git a/analysis/xcor.py b/analysis/xcor.py new file mode 100755 index 0000000..2343443 --- /dev/null +++ b/analysis/xcor.py @@ -0,0 +1,352 @@ +#!/usr/bin/env python +#+ +# VIDE -- Void IDEntification pipeline -- ./crossCompare/analysis/mergerTree.py +# Copyright (C) 2010-2013 Guilhem Lavaux +# Copyright (C) 2011-2013 P. M. Sutter +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +#+ + +from void_python_tools.backend import * +from void_python_tools.plotting import * +import void_python_tools.xcor as xcorlib +import imp +import pickle +import argparse +import os +import string +import numpy as np +import matplotlib as mpl +mpl.use('Agg') +import matplotlib.pyplot as plt +import matplotlib.cm as cm +from matplotlib import rc +from matplotlib.ticker import NullFormatter +import random +import sys + +# ------------------------------------------------------------------------------ + +dataNameBase = "xcor" + +parser = argparse.ArgumentParser(description='Analyze.') +parser.add_argument('--parmFile', dest='parmFile', default='datasetsToAnalyze.py', + help='path to parameter file') +args = parser.parse_args() + +# ------------------------------------------------------------------------------ + +filename = args.parmFile +print " Loading parameters from", filename +if not os.access(filename, os.F_OK): + print " Cannot find parameter file %s!" % filename + exit(-1) +parms = imp.load_source("name", filename) +globals().update(vars(parms)) + +if not os.access(outputDir, os.F_OK): + os.makedirs(outputDir) + +for (iSample, sampleDir) in enumerate(sampleDirList): + + with open(voidBaseDir+sampleDir+"/sample_info.dat", 'rb') as input: + sample = pickle.load(input) + + print " Working with", sample.fullName, "...", + sys.stdout.flush() + + sampleName = sample.fullName + + # Sim parameters + Lbox = sample.boxLen # Boxlength [h^(-1)Mpc] + Lbox -= 2*Lboxcut # Reduced boxlength [h^(-1)Mpc] + Om = sample.omegaM # Omega_m + Ol = 1.-Om # Omega_l + z = sample.zRange[0] # Redshift + a = 1./(1.+z) # Scale factor + rho_m = Mpart*(Ni/Lbox)**3 # Background density [(h/Mpc)^3] + + # Input files + voidDir = voidBaseDir+'/'+sampleDir+'/' + voidFilename1 = 'centers_central_'+sample.fullName+'.out' + voidFilename2 = 'voidDesc_central_'+sample.fullName+'.out' + + + # Read files + matter_file = open(matterDir+matterFilename,'r') + matter_data = [] + count = 0 + sep = 1e8 + for (i,line) in enumerate(matter_file): + if i/int(sep) == i/sep: print str(round(i*100./Ni**3,1))+' percent of all particles read' + if random.random() > ss: continue + count += 1 + matter_data.append(line.split(',')[0:3]) + + print str(count)+' particles read in total' + matter_data = np.asarray(matter_data,dtype=np.float32) + matter_file.close() + + + halo_file = open(haloDir+haloFilename,'r') + halo_data = np.reshape(halo_file.read().replace('\n',',').split(',')[0:-1],(-1,12)).astype(np.float32) + halo_file.close() + + void_file = open(voidDir+voidFilename1,'r') + void_header1 = void_file.readline().split(",") + void_data1 = np.reshape(void_file.read().split(),(-1,len(void_header))).astype(np.float32) + void_file.close() + + void_file = open(voidDir+voidFilename2,'r') + void_header2 = void_file.readline().split(",")+void_file.readline().split(",") + void_data2 = np.reshape(void_file.read().split(),(-1,11)).astype(np.float32) + void_file.close() + + + # Define arrays + xm = matter_data[:,0:3] + + xh = halo_data[:,0:3] + mh = halo_data[:,6] + + xv = void_data1[:,0:3] + rv = void_data1[:,4] + vv = void_data1[:,6] + mv = void_data2[:,8]*Mpart + + + # Interpolate to mesh + dm, wm, ws = xcorlib.cic(xm, Lbox, Lboxcut = Lboxcut, Nmesh = Nmesh) + dh, wm, ws = xcorlib.cic(xh, Lbox, Lboxcut = Lboxcut, Nmesh = Nmesh) + dv, wm, ws = xcorlib.cic(xv, Lbox, Lboxcut = Lboxcut, Nmesh = Nmesh) + + # Load dark matter grid + #output = open('dm_'+str(Nmesh)+'_ss'+str(ss)+'_z'+str(z)+'.dat', 'rb') + #dm = pickle.load(output) + #output.close() + + # Save dark matter grid + #output = open('dm_'+str(Nmesh)+'_ss'+str(ss)+'_z'+str(z)+'.dat', 'wb') + #pickle.dump(dm,output) + #output.close() + + + # Power spectra & correlation functions + ((Nm, km, Pmm, SPmm),(Nmx, rm, Xmm, SXmm)) = xcorlib.powcor(dm, dm, Lbox, Nmesh = Nmesh, Nbin = Nbin, scale = 'lin', cor = True) + ((Nm, km, Pvm, SPvm),(Nmx, rm, Xvm, SXvm)) = xcorlib.powcor(dv, dm, Lbox, Nmesh = Nmesh, Nbin = Nbin, scale = 'lin', cor = True) + ((Nm, km, Phm, SPhm),(Nmx, rm, Xhm, SXhm)) = xcorlib.powcor(dh, dm, Lbox, Nmesh = Nmesh, Nbin = Nbin, scale = 'lin', cor = True) + ((Nm, km, Pvv, SPvv),(Nmx, rm, Xvv, SXvv)) = xcorlib.powcor(dv, dv, Lbox, Nmesh = Nmesh, Nbin = Nbin, scale = 'lin', cor = True) + ((Nm, km, Pvh, SPvh),(Nmx, rm, Xvh, SXvh)) = xcorlib.powcor(dv, dh, Lbox, Nmesh = Nmesh, Nbin = Nbin, scale = 'lin', cor = True) + ((Nm, km, Phh, SPhh),(Nmx, rm, Xhh, SXhh)) = xcorlib.powcor(dh, dh, Lbox, Nmesh = Nmesh, Nbin = Nbin, scale = 'lin', cor = True) + + + # Number densities + nm = np.empty(len(km)) + nh = np.empty(len(km)) + nv = np.empty(len(km)) + nm[:] = Npart/Lbox**3 + nh[:] = len(xh)/Lbox**3 + nv[:] = len(xv)/Lbox**3 + + # Number functions + Mbin = 40 + Vbin = 40 + Nh, Mh = np.histogram(mh, bins = mh.min()*(mh.max()/mh.min())**(np.arange(Mbin+1)/float(Mbin))) + Nvm, Mv = np.histogram(mv, bins = mv.min()*(mv.max()/mv.min())**(np.arange(Vbin+1)/float(Vbin))) + Nvv, Vv = np.histogram(vv, bins = vv.min()*(vv.max()/vv.min())**(np.arange(Vbin+1)/float(Vbin))) + + # Bias + b_hh = np.sqrt(Phh/Pmm) + b_vv = np.sqrt(Pvv/Pmm) + b_hm = Phm/Pmm + b_vm = Pvm/Pmm + b_vh = Pvh/Phh + + knl = 0.04 # Wavenumber above which nonlinearities kick in [h/Mpc] + idls = np.where(km <= knl)[0] + bm_hm = np.average(b_hm[idls],weights=Nm[idls]) + bm_vm = np.average(b_vm[idls],weights=Nm[idls]) + bm_vh = np.average(b_vh[idls],weights=Nm[idls]) + + # Shot Noise + sn_hh = Phh - Phm**2/Pmm + sn_vh = Pvh - Pvm*Phm/Pmm + sn_vv = Pvv - Pvm**2/Pmm + + + + # Plots + ms = 4 + mew = 0.2 + margin = 1.2 + kmin = km.min()/margin + kmax = km.max()*margin + rmin = rm.min()/margin + rmax = rm.max()*margin + + plt.imshow(np.sum(dm+1,2)/Nmesh,extent=[0,Lbox,0,Lbox],aspect='equal',cmap='YlGnBu_r',interpolation='gaussian') + plt.xlabel(r'$x \;[h^{-1}\mathrm{Mpc}]$') + plt.ylabel(r'$y \;[h^{-1}\mathrm{Mpc}]$') + plt.title(r'Dark matter') + plt.colorbar() + plt.savefig(outputDir+'/dm_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + plt.imshow(np.sum(dv+1,2)/Nmesh,extent=[0,Lbox,0,Lbox],aspect='equal',cmap='YlGnBu_r',interpolation='gaussian') + plt.xlabel(r'$x \;[h^{-1}\mathrm{Mpc}]$') + plt.ylabel(r'$y \;[h^{-1}\mathrm{Mpc}]$') + plt.title(r'Voids') + plt.colorbar() + plt.savefig(outputDir+'/dv_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + plt.imshow(np.sum(dh+1,2)/Nmesh,extent=[0,Lbox,0,Lbox],aspect='equal',cmap='YlGnBu_r',interpolation='gaussian') + plt.xlabel(r'$x \;[h^{-1}\mathrm{Mpc}]$') + plt.ylabel(r'$y \;[h^{-1}\mathrm{Mpc}]$') + plt.title(r'Halos') + plt.colorbar() + plt.savefig(outputDir+'/dh_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + + pa, = plt.plot(Mh[:-1], Nh, 'ro-', ms=ms, mew=mew) + pb, = plt.plot(Vv[:-1]*1e9, Nvv, 'bo-', ms=ms, mew=mew) + plt.xlabel(r'$M \;[h^{-1}M_{\odot}]$ , $V \;[h^{-3}\mathrm{kpc}^3]$') + plt.ylabel(r'$N(M,V)$') + plt.title(r'Number of halos and voids') + plt.xscale('log') + plt.yscale('log') + plt.xlim(min(10**np.floor(np.log10(Vv.min()))*1e9,10**np.floor(np.log10(Mh.min()))), max(10**np.ceil(np.log10(Mh.max())),10**np.ceil(np.log10(Vv.max()))*1e8)) + plt.ylim(10**np.floor(np.log10(Nh.min())), 10**np.ceil(np.log10(Nh.max()))) + plt.annotate(r'$\frac{4\pi}{3}\langle r_\mathrm{v}\rangle^3$', xy=(4./3.*np.pi*rv.mean()**3*1e9,1.1), xytext=(-50,235),textcoords='offset points',arrowprops=dict(fc='k',arrowstyle="->",connectionstyle="angle,angleA=0,angleB=90,rad=10")) + plt.legend([pa,pb],['halos','voids'],'best' ) + plt.savefig(outputDir+'/number_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + + plt.subplot(211) + plt.subplots_adjust(wspace=0,hspace=0) + plt.plot(np.sort(vv), mv[np.argsort(vv)], 'r-', ms=ms, mew=mew, lw=0.01) + plt.plot(np.sort(vv), np.sort(mv), 'k-', ms=ms, mew=mew) + plt.plot(np.sort(vv), np.sort(vv)*1e9, 'k--', ms=ms, mew=mew) + plt.title(r'Mass-volume relation of voids') + plt.ylabel(r'$M \;[h^{-1}M_\odot]$') + plt.xscale('log') + plt.yscale('log') + plt.subplot(211).xaxis.set_major_formatter(NullFormatter()) + plt.subplot(212) + plt.plot(np.sort(vv), mv[np.argsort(vv)]/np.sort(vv)/rho_m-1., 'b-', ms=ms, mew=mew, lw=0.01) + plt.plot(np.sort(vv), np.sort(mv)/np.sort(vv)/rho_m-1., 'k-', ms=ms, mew=mew) + plt.xlabel(r'$V \;[h^{-3}\mathrm{Mpc}^3]$') + plt.ylabel(r'$\delta$') + plt.xscale('log') + plt.yscale('linear') + plt.ylim(-1.01,-0.861) + plt.savefig(outputDir+'/massvol_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + + pa ,= plt.plot(km, Phh, 'r-', ms=ms, mew=mew) + plt.plot(km, Phh-sn_hh, 'r:', ms=ms, mew=mew) + plt.fill_between(km, Phh+SPhh, abs(Phh-SPhh), color='r', alpha=0.2) + pb ,= plt.plot(km, Phm, 'y-', ms=ms, mew=mew) + plt.fill_between(km, Phm+SPhm, abs(Phm-SPhm), color='y', alpha=0.2) + pc ,= plt.plot(km, Pmm, 'k-', ms=ms, mew=mew) + plt.plot(km, Pmm-1./nm, 'k:', ms=ms, mew=mew) + plt.fill_between(km, Pmm+SPmm, abs(Pmm-SPmm), color='k', alpha=0.2) + pd ,= plt.plot(km, Pvh, 'g-', ms=ms, mew=mew) + plt.plot(km, -Pvh, 'g--', ms=ms, mew=mew) + plt.plot(km, abs(Pvh-sn_vh), 'g:', ms=ms, mew=mew) + plt.fill_between(km, abs(Pvh+SPvh), abs(Pvh-SPvh), color='g', alpha=0.2) + pe ,= plt.plot(km, Pvm, 'm-', ms=ms, mew=mew) + plt.plot(km, -Pvm, 'm--', ms=ms, mew=mew) + plt.fill_between(km, abs(Pvm+SPvm), abs(Pvm-SPvm), color='m', alpha=0.2) + pf ,= plt.plot(km, Pvv, 'b-', ms=ms, mew=mew) + plt.plot(km, Pvv-sn_vv, 'b:', ms=ms, mew=mew) + plt.fill_between(km, Pvv+SPvv, abs(Pvv-SPvv), color='b', alpha=0.2) + plt.annotate(r'$\frac{\pi}{\langle r_\mathrm{v}\rangle}$', xy=(np.pi/(rv.mean()),1.01*10**np.floor(np.log10(abs(Pvh).min()))/margin), xytext=(10,280),textcoords='offset points',arrowprops=dict(fc='k',arrowstyle="->",connectionstyle="angle,angleA=0,angleB=90,rad=10")) + plt.xlabel(r'$k \;[h\mathrm{Mpc}^{-1}]$') + plt.ylabel(r'$P(k) \;[h^{-3}\mathrm{Mpc}^3]$') + plt.title(r'Power spectra') + plt.xscale('log') + plt.yscale('log') + plt.xlim(kmin,kmax) + plt.ylim(10**np.floor(np.log10(abs(Pvh).min()))/margin, max(10**np.ceil(np.log10(Phh.max())),10**np.ceil(np.log10(Pvv.max())))*margin) + plt.legend([pa, pb, pc, pd, pe, pf],['hh', 'hm', 'mm', 'vh', 'vm', 'vv'],'lower left' ) + plt.savefig(outputDir+'/power_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + + pa ,= plt.plot(rm, Xhh, 'r-', ms=ms, mew=mew) + plt.fill_between(rm, abs(Xhh+SXhh), abs(Xhh-SXhh), color='r', alpha=0.2) + pb ,= plt.plot(rm, Xhm, 'y-', ms=ms, mew=mew) + plt.fill_between(rm, abs(Xhm+SXhm), abs(Xhm-SXhm), color='y', alpha=0.2) + pc ,= plt.plot(rm, Xmm, 'k-', ms=ms, mew=mew) + plt.fill_between(rm, abs(Xmm+SXmm), abs(Xmm-SXmm), color='k', alpha=0.2) + pd ,= plt.plot(rm, Xvh, 'g-', ms=ms, mew=mew) + plt.plot(rm, -Xvh, 'g--', ms=ms, mew=mew) + plt.fill_between(rm, abs(Xvh+SXvh), abs(Xvh-SXvh), color='g', alpha=0.2) + pe ,= plt.plot(rm, Xvm, 'm-', ms=ms, mew=mew) + plt.plot(rm, -Xvm, 'm--', ms=ms, mew=mew) + plt.fill_between(rm, abs(Xvm+SXvm), abs(Xvm-SXvm), color='m', alpha=0.2) + pf ,= plt.plot(rm, Xvv, 'b-', ms=ms, mew=mew) + plt.fill_between(rm, abs(Xvv+SXvv), abs(Xvv-SXvv), color='b', alpha=0.2) + plt.annotate(r'$\langle r_\mathrm{v}\rangle$', xy=(rv.mean(),1.01*10**np.floor(np.log10(abs(Xvh).min()))/margin), xytext=(10,300),textcoords='offset points',arrowprops=dict(fc='k',arrowstyle="->",connectionstyle="angle,angleA=0,angleB=90,rad=10")) + plt.xlabel(r'$r \;[h^{-1}\mathrm{Mpc}]$') + plt.ylabel(r'$\xi(r)$') + plt.title(r'Correlation functions') + plt.xscale('log') + plt.yscale('log') + plt.xlim(rmin,rmax) + plt.ylim(10**np.floor(np.log10(abs(Xvh).min()))/margin, max(10**np.ceil(np.log10(Xhh.max())),10**np.ceil(np.log10(Xvv.max())))*margin) + plt.legend([pa, pb, pc, pd, pe, pf],['hh', 'hm', 'mm', 'vh', 'vm', 'vv'],'lower left' ) + plt.savefig(outputDir+'/correlation_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + + pa, = plt.plot(km, b_hh, 'r-', ms=ms, mew=mew) + pb, = plt.plot(km, b_hm, 'r--', ms=ms, mew=mew) + pc, = plt.plot(km, b_vv, 'b-', ms=ms, mew=mew) + pd, = plt.plot(km, b_vm, 'b--', ms=ms, mew=mew) + pe, = plt.plot(km, b_vh/bm_vh, 'g-', ms=ms, mew=mew) + plt.plot(km, np.sin(km*rv.mean())/(km*rv.mean()), 'k:', ms=ms, mew=mew) + plt.xlabel(r'$k \;[h\mathrm{Mpc}^{-1}]$') + plt.ylabel(r'$b(k)$') + plt.title(r'Bias') + plt.xscale('log') + plt.yscale('linear') + plt.xlim(kmin,kmax) + plt.ylim(np.floor(b_vm.min()),np.ceil(max(b_hh.max(),b_vv.max()))) + plt.legend([pa,pb,pc,pd,pe],['hh', 'hm', 'vv', 'vm', r'$\bar{u}_\mathrm{v}(k)$'],'best' ) + plt.savefig(outputDir+'/bias_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() + + + pa, = plt.plot(km, sn_hh, 'r-', ms=ms, mew=mew) + pb, = plt.plot(km, sn_vh, 'g-', ms=ms, mew=mew) + pc, = plt.plot(km, sn_vv, 'b-', ms=ms, mew=mew) + plt.plot(km, abs(sn_vh), 'g:') + pd, = plt.plot(km, 1/nh, 'r--') + pe, = plt.plot(km, 1/nv, 'b-.') + plt.xlabel(r'$k \;[h\mathrm{Mpc}^{-1}]$') + plt.ylabel(r'$\sigma^2(k)$') + plt.title(r'Shotnoise') + plt.xscale('log') + plt.yscale('log') + plt.xlim(kmin,kmax) + plt.ylim(10**np.floor(np.log10(abs(sn_vh).min())), 10**np.ceil(np.log10(sn_vv.max()))) + plt.legend([pa,pb,pc,pd,pe],['hh', 'vh', 'vv', r'$\bar{n}_\mathrm{h}^{-1}$', r'$\bar{n}_\mathrm{v}^{-1}$'],'best' ) + plt.savefig(outputDir+'/shotnoise_'+sample.fullName+'.pdf', format='pdf', bbox_inches="tight") + plt.clf() diff --git a/pipeline/generateCatalog.py b/pipeline/generateCatalog.py index 5ef2771..937a628 100755 --- a/pipeline/generateCatalog.py +++ b/pipeline/generateCatalog.py @@ -29,7 +29,7 @@ import pickle # ------------------------------------------------------------------------------ if (len(sys.argv) == 1): - print "Usage: ./analyzeVoids.py parameter_file.py" + print "Usage: ./generateCatalog.py parameter_file.py" exit(-1) if (len(sys.argv) > 1): diff --git a/python_tools/void_python_tools/plotting/plotTools.py b/python_tools/void_python_tools/plotting/plotTools.py index 317efef..b7d3efa 100644 --- a/python_tools/void_python_tools/plotting/plotTools.py +++ b/python_tools/void_python_tools/plotting/plotTools.py @@ -305,7 +305,7 @@ def plotVoidDistribution(workDir=None, sampleList=None, figDir=None, for (iSample,sample) in enumerate(sampleList): plt.clf() plt.xlabel("x (Mpc/h)") - plt.xlabel("y (Mpc/h)") + plt.ylabel("y (Mpc/h)") sampleName = sample.fullName From bdb6ff2ebd1ac1015337a7f3aad018307fb8a041 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Fri, 22 Mar 2013 21:18:27 +0100 Subject: [PATCH 27/39] Removed buggy code that double counted adjacencies --- zobov/voztie.c | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/zobov/voztie.c b/zobov/voztie.c index 6c67519..d9737ec 100644 --- a/zobov/voztie.c +++ b/zobov/voztie.c @@ -247,19 +247,10 @@ printf("\n"); // Recount the number of adjacencies after merge for(i=0;i Date: Fri, 22 Mar 2013 21:19:04 +0100 Subject: [PATCH 28/39] Have mockIndex set to np if negative in jozov --- zobov/jozov.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/zobov/jozov.c b/zobov/jozov.c index 697d039..db6909f 100644 --- a/zobov/jozov.c +++ b/zobov/jozov.c @@ -101,6 +101,8 @@ int main(int argc,char **argv) { exit(0); } fread(&np,1, sizeof(int),adj); + if (mockIndex < 0) + mockIndex = np; printf("adj: %d particles\n", np); FF; From 8043742123092d5b9ffd9f2c95b31948ac95aec5 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Sat, 23 Mar 2013 11:50:17 -0500 Subject: [PATCH 29/39] added script to compare stacks and radial profiles of matched voids --- c_tools/analysis/voidOverlap.cpp | 7 +- crossCompare/analysis/compareProfiles.py | 121 ++++++++++++++++++ crossCompare/analysis/mergerTree.py | 12 +- crossCompare/plotting/plotNumberFunc.py | 47 ++++--- .../void_python_tools/backend/classes.py | 4 +- .../void_python_tools/backend/launchers.py | 30 ++++- 6 files changed, 184 insertions(+), 37 deletions(-) create mode 100755 crossCompare/analysis/compareProfiles.py mode change 100644 => 100755 crossCompare/analysis/mergerTree.py mode change 100644 => 100755 crossCompare/plotting/plotNumberFunc.py diff --git a/c_tools/analysis/voidOverlap.cpp b/c_tools/analysis/voidOverlap.cpp index 6fb3b87..f746e39 100644 --- a/c_tools/analysis/voidOverlap.cpp +++ b/c_tools/analysis/voidOverlap.cpp @@ -271,7 +271,7 @@ int main(int argc, char **argv) { filename = string(args.outfile_arg); filename = filename.append("summary.out"); fp = fopen(filename.c_str(), "w"); - fprintf(fp, "# void ID, radius, radius ratio, common volume ratio, common volume ratio 2, relative dist, num matches, num significant matches\n"); + fprintf(fp, "# void ID, radius, radius ratio, common volume ratio, common volume ratio 2, relative dist, num matches, num significant matches, match ID\n"); for (iVoid1 = 0; iVoid1 < catalog1.numVoids; iVoid1++) { int voidID = catalog1.voids[iVoid1].voidID; if (catalog1.voids[iVoid1].numMatches > 0) { @@ -286,14 +286,15 @@ int main(int argc, char **argv) { rdist = catalog1.voids[iVoid1].matches[0].dist; rdist /= catalog1.voids[iVoid1].radius; - fprintf(fp, "%d %.4f %.4f %.4f %.4f %.4f %d %d\n", voidID, + fprintf(fp, "%d %.4f %.4f %.4f %.4f %.4f %d %d %d\n", voidID, catalog1.voids[iVoid1].radiusMpc, rRatio, commonVolRatio, volRatio, rdist, catalog1.voids[iVoid1].numMatches, - catalog1.voids[iVoid1].numBigMatches); + catalog1.voids[iVoid1].numBigMatches, + catalog2.voids[iVoid2].voidID); } else { fprintf(fp, "%d %.2f 0.0 0.0 0.0 0.0 0 0\n", voidID, diff --git a/crossCompare/analysis/compareProfiles.py b/crossCompare/analysis/compareProfiles.py new file mode 100755 index 0000000..a2afd0c --- /dev/null +++ b/crossCompare/analysis/compareProfiles.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +#+ +# VIDE -- Void IDEntification pipeline -- ./pipeline/apAnalysis.py +# Copyright (C) 2010-2013 Guilhem Lavaux +# Copyright (C) 2011-2013 P. M. Sutter +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +#+ + +# takes a set of voids with given radii, and stacks and plots matches in +# other catalogs + +from void_python_tools.backend import * +import imp +import pickle +import os +import numpy as np +import argparse + +# ------------------------------------------------------------------------------ + +mergerNameBase = "mergerTree" + +parser = argparse.ArgumentParser(description='Analyze.') +parser.add_argument('--parm', dest='parm', default='datasetsToAnalyze.py', help='path to parameter file') +args = parser.parse_args() + +# ------------------------------------------------------------------------------ + +filename = args.parm +print " Loading parameters from", filename +if not os.access(filename, os.F_OK): + print " Cannot find parameter file %s!" % filename + exit(-1) +parms = imp.load_source("name", filename) +globals().update(vars(parms)) + +if not os.access(dataDir, os.F_OK): + os.makedirs(dataDir) + +mergerFileBase = dataDir + "/" + mergerNameBase + +baseIDList = [] + +# get list of base voids +with open(workDir+baseSampleDir+"/sample_info.dat", 'rb') as input: + baseSample = pickle.load(input) + baseSampleName = baseSample.fullName + baseVoidList = np.loadtxt(workDir+baseSampleDir+"/centers_nocut_central_"+\ + baseSampleName+".out") + +for stack in baseSample.stacks: + print " Stack:", stack.rMin + + accepted = (baseVoidList[:,4] > stack.rMin) & (baseVoidList[:,4] < stack.rMax) + baseIDList = baseVoidList[accepted][:,7] + + for (iSample, sampleDir) in enumerate(sampleDirList): + with open(workDir+sampleDir+"/sample_info.dat", 'rb') as input: + sample = pickle.load(input) + + print " Working with", sample.fullName, "...", + sys.stdout.flush() + sampleName = sample.fullName + + # get list of appropriate voids + if sample == baseSample: + idList = baseIDList + else: + idList = 2 + matchList = np.loadtxt(mergerFileBase+"_"+baseSampleName+"_"+sampleName+\ + "_summary.out") + accepted = (matchList[:,0] == baseIDList).any() + idList = matchList[accepted][:,8] + + voidBaseDir = workDir+"/"+sampleDir+"stacks" + + runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin, + stack.rMax, thisDataPortion, + customLine="selected") + stack.rMin = 0. + stack.rMax = 1000. + + voidDir = voidBaseDir+"_"+runSuffix + + if not os.access(voidDir,os.F_OK): os.makedirs(voidDir) + + print " Stacking voids...", + sys.stdout.flush() + + STACK_PATH = CTOOLS_PATH+"/stacking/stackVoidsZero" + + launchStack(sample, stack, STACK_PATH, + thisDataPortion=thisDataPortion, + logDir=logDir, voidDir=voidDir, + zobovDir=workDir+"/"+sampleDir, + freshStack=freshStack, INCOHERENT=False, + ranSeed=101010, summaryFile=None, + continueRun=continueRun, + dataType=sample.dataType, + idList=idList) + + print " Profiling stacks...", + sys.stdout.flush() + logFile = logDir+"/profile_"+sampleName+"_"+runSuffix+".out" + launchProfile(sample, stack, voidDir=voidDir, + logFile=logFile, continueRun=False) + +print " Done!" diff --git a/crossCompare/analysis/mergerTree.py b/crossCompare/analysis/mergerTree.py old mode 100644 new mode 100755 index 4868858..5e4b12b --- a/crossCompare/analysis/mergerTree.py +++ b/crossCompare/analysis/mergerTree.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python #+ # VIDE -- Void IDEntification pipeline -- ./crossCompare/analysis/mergerTree.py # Copyright (C) 2010-2013 Guilhem Lavaux @@ -17,9 +18,6 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #+ -#!/usr/bin/env python - -# plots cumulative distributions of number counts from void_python_tools.backend import * from void_python_tools.plotting import * @@ -35,13 +33,13 @@ import argparse dataNameBase = "mergerTree" parser = argparse.ArgumentParser(description='Analyze.') -parser.add_argument('--parmFile', dest='parmFile', default='datasetsToAnalyze.py', +parser.add_argument('--parm', dest='parm', default='datasetsToAnalyze.py', help='path to parameter file') args = parser.parse_args() # ------------------------------------------------------------------------------ -filename = args.parmFile +filename = args.parm print " Loading parameters from", filename if not os.access(filename, os.F_OK): print " Cannot find parameter file %s!" % filename @@ -78,7 +76,7 @@ for (iSample, sampleDir) in enumerate(sampleDirList): thisDataPortion="central", logFile=logFile, continueRun=False, workDir=workDir, outputFile=stepOutputFileName, - matchMethod="proximity") + matchMethod="useID") # attach columns to summary file #if iSample == 1: @@ -101,3 +99,5 @@ for (iSample, sampleDir) in enumerate(sampleDirList): #if os.access("mergerTree.log", os.F_OK): os.unlink("mergerTree.log") #if os.access("temp.out", os.F_OK): os.unlink("temp.out") #if os.access("thisStep.out", os.F_OK): os.unlink("thisStep.out") + +print " Done!" diff --git a/crossCompare/plotting/plotNumberFunc.py b/crossCompare/plotting/plotNumberFunc.py old mode 100644 new mode 100755 index 72a5616..2f918ff --- a/crossCompare/plotting/plotNumberFunc.py +++ b/crossCompare/plotting/plotNumberFunc.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python #+ # VIDE -- Void IDEntification pipeline -- ./crossCompare/plotting/plotNumberFunc.py # Copyright (C) 2010-2013 Guilhem Lavaux @@ -17,7 +18,6 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #+ -#!/usr/bin/env python # plots cumulative distributions of number counts @@ -36,8 +36,8 @@ from scipy.stats import ks_2samp plotNameBase = "compdist" -obsFudgeFactor = 1.0 # what fraction of the volume are we *reall* capturing? -#obsFudgeFactor = .66 # what fraction of the volume are we *reall* capturing? +#obsFudgeFactor = 1.0 # what fraction of the volume are we *reall* capturing? +obsFudgeFactor = .15 # what fraction of the volume are we *reall* capturing? linewidth = 1 @@ -45,7 +45,7 @@ parser = argparse.ArgumentParser(description='Plot.') parser.add_argument('--show', dest='showPlot', action='store_const', const=True, default=False, help='display the plot (default: just write eps)') -parser.add_argument('--parmFile', dest='parmFile', default='datasetsToPlot.py', +parser.add_argument('--parm', dest='parm', default='datasetsToPlot.py', help='path to parameter file') args = parser.parse_args() @@ -55,7 +55,7 @@ errorBarsX = np.linspace(0, plotMax, num=nErrorBars) # ------------------------------------------------------------------------------ -filename = args.parmFile +filename = args.parm print " Loading parameters from", filename if not os.access(filename, os.F_OK): print " Cannot find parameter file %s!" % filename @@ -102,7 +102,7 @@ for dataPortion in dataPortions: boxVol *= 1.e-9 # Mpc->Gpc - filename = workDir+"/"+sampleDirList[iSample]+"/centers_"+dataPortion+"_"+\ + filename = workDir+"/"+sampleDirList[iSample]+"/centers_nocut_"+dataPortion+"_"+\ sampleName+".out" if not os.access(filename, os.F_OK): print "File not found: ", filename @@ -137,8 +137,13 @@ for dataPortion in dataPortions: ecolor=colorList[iSample], fmt=None, label='_nolegend_', capsize=0) - hist, bin_edges = np.histogram(data, bins=100, range=(0,100)) - allData.append(hist) + hist, bin_edges = np.histogram(data, bins=40, range=(0,100)) + #allData.append(hist) + + binCenters = 0.5*(bin_edges[1:] + bin_edges[:-1]) + #plt.plot(binCenters, hist, '-', + # label=lineTitle, color=colorList[iSample], + # linewidth=linewidth) plt.legend(title = "Samples", loc = "upper right", prop={'size':8}) #plt.title(plotTitle) @@ -155,19 +160,19 @@ plt.savefig(figDir+"/fig_"+plotName+".pdf", bbox_inches="tight") plt.savefig(figDir+"/fig_"+plotName+".eps", bbox_inches="tight") plt.savefig(figDir+"/fig_"+plotName+".png", bbox_inches="tight") -dataFile = figDir+"/data_"+plotName+".dat" -fp = open(dataFile, 'w') -fp.write("# R [Mpc/h], N [h^3 Gpc^-3]\n") -fp.write("# ") -for sample in dataSampleList: - fp.write(sample.fullName+" ") -fp.write("\n") -for i in xrange(100): - fp.write(str(bin_edges[i]) + " ") - for iSample in xrange(len(dataSampleList)): - fp.write(str(allData[iSample][i])+" ") - fp.write("\n") -fp.close() +#dataFile = figDir+"/data_"+plotName+".dat" +#fp = open(dataFile, 'w') +#fp.write("# R [Mpc/h], N [h^3 Gpc^-3]\n") +#fp.write("# ") +#for sample in dataSampleList: +# fp.write(sample.fullName+" ") +#fp.write("\n") +#for i in xrange(100): +# fp.write(str(bin_edges[i]) + " ") +# for iSample in xrange(len(dataSampleList)): +# fp.write(str(allData[iSample][i])+" ") +# fp.write("\n") +#fp.close() if args.showPlot: os.system("display %s" % figDir+"/fig_"+plotName+".png") diff --git a/python_tools/void_python_tools/backend/classes.py b/python_tools/void_python_tools/backend/classes.py index 0170199..7736a47 100644 --- a/python_tools/void_python_tools/backend/classes.py +++ b/python_tools/void_python_tools/backend/classes.py @@ -184,9 +184,9 @@ def jobSuccessful(logFile, doneString): if doneString in line: jobDone = True return jobDone -def getStackSuffix(zMin, zMax, rMin, rMax, dataPortion): +def getStackSuffix(zMin, zMax, rMin, rMax, dataPortion, customLine=""): return "z"+str(zMin)+"-"+str(zMax)+"_"+str(rMin)+"-"+str(rMax)+\ - "Mpc"+"_"+dataPortion + "Mpc"+customLine+"_"+dataPortion def my_import(name): mod = __import__(name) diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index b84f053..6571c05 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -437,12 +437,19 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, voidDir=None, freshStack=True, runSuffix=None, zobovDir=None, INCOHERENT=False, ranSeed=None, summaryFile=None, - continueRun=None, dataType=None, prefixRun=""): + continueRun=None, dataType=None, prefixRun="", + idList=None): sampleName = sample.fullName + if idList != None: + customLine = "selected" + else: + customLine = "" + runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin, - stack.rMax, thisDataPortion) + stack.rMax, thisDataPortion, + customLine=customLine) logFile = logDir+("/%sstack_"%prefixRun)+sampleName+"_"+runSuffix+".out" @@ -483,6 +490,16 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, else: rescaleFlag = "" + if idList != None: + idListFlag = "idList '" + str(idList).strip('[]') + "'" + rMinToUse = 0. + rMaxToUse = 100000. + centralRadius = rMaxToUse + else: + idListFlag = "" + rMinToUse = stack.rMin + rMaxToUse = stack.rMax + conf=""" desc %s partzone %s @@ -505,13 +522,14 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, barycenters %s boundaryDistances %s %s + %s """ % \ (zobovDir+"/voidDesc_"+thisDataPortion+"_"+sampleName+".out", zobovDir+"/voidPart_"+sampleName+".dat", zobovDir+"/voidZone_"+sampleName+".dat", zobovDir+"/vol_"+sampleName+".dat", - stack.rMin, - stack.rMax, + rMinToUse, + rMaxToUse, zobovDir+("/%szobov_slice_"%prefixRun)+sampleName, zobovDir+"/zobov_slice_"+sampleName+".par", maxDen, @@ -526,7 +544,9 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, thisDataPortion, zobovDir+"/barycenters_"+thisDataPortion+"_"+sampleName+".out", zobovDir+"/boundaryDistances_"+thisDataPortion+"_"+sampleName+".out", - rescaleFlag) + rescaleFlag, + idListFlag + ) parmFile = os.getcwd()+("/%sstack_"%prefixRun)+sample.fullName+".par" From afb562d419201b8f83195fef1b804a71427ea3c7 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Sun, 24 Mar 2013 16:06:01 -0500 Subject: [PATCH 30/39] once again, fixed bug in periodic boundary detection... --- python_tools/void_python_tools/backend/launchers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index 6571c05..0589ed8 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -324,8 +324,8 @@ def launchPrune(sample, binPath, periodicLine = " --periodic='" if sample.numSubvolumes == 1: periodicLine += "xy" - if sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \ - sample.boxLen <= 1.e-1: + if np.abs(sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \ + sample.boxLen) <= 1.e-1: periodicLine += "z" periodicLine += "' " @@ -380,7 +380,7 @@ def launchVoidOverlap(sample1, sample2, sample1Dir, sample2Dir, periodicLine = " --periodic='" if sample1.dataType != "observation": if sample1.numSubvolumes == 1: periodicLine += "xy" - if sample1.zBoundaryMpc[1] - sample1.zBoundaryMpc[0] - sample1.boxLen <= 1.e-1: + if np.abs(sample1.zBoundaryMpc[1] - sample1.zBoundaryMpc[0] - sample1.boxLen) <= 1.e-1: periodicLine += "z" periodicLine += "' " From aaef7634cdceba01cf925201756c4ea10379d854 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Tue, 26 Mar 2013 09:18:24 +0100 Subject: [PATCH 31/39] Harden zobov for problems link to single precisions (not fool proof yet but make the problem more unlikely) --- zobov/jozov.c | 25 +++++++++----- zobov/voz1b1.c | 36 +++++++++---------- zobov/voztie.c | 93 +++++++++++++++++++++----------------------------- 3 files changed, 72 insertions(+), 82 deletions(-) diff --git a/zobov/jozov.c b/zobov/jozov.c index db6909f..a4ee12b 100644 --- a/zobov/jozov.c +++ b/zobov/jozov.c @@ -124,19 +124,26 @@ int main(int argc,char **argv) { fread(&j,1,sizeof(pid_t),adj); if (j < np) { /* Set both halves of the pair */ - assert(i < j); + assert(i < j); if (p[i].ncnt == p[i].nadj) { - p[i].adj = (pid_t *)realloc(p[i].adj, (p[i].nadj+1)*sizeof(pid_t)); - p[i].nadj++; + int q; + printf("OVERFLOW for particle %d (pending %d). List of accepted:\n", i, j); + for (q=0;q 0.5) rtemp[d] --; if (rtemp[d] < -0.5) rtemp[d] ++; isitinmain = isitinmain && (fabs(rtemp[d]) <= width2); @@ -186,7 +186,7 @@ int main(int argc, char *argv[]) { isitinmain = 1; DL { - rtemp[d] = r[i][d] - c[d]; + rtemp[d] = (realT)r[i][d] - (realT)c[d]; if (rtemp[d] > 0.5) rtemp[d] --; if (rtemp[d] < -0.5) rtemp[d] ++; isitinbuf = isitinbuf && (fabs(rtemp[d]) #include #include #include @@ -96,7 +97,11 @@ int main(int argc, char *argv[]) { exit(0); } for (p=0;p -1.) -// PMS if (fabs(vols[orig[p]]-volstemp)/volstemp > 1.5e-3 && orig[p] < mockIndex) { - //if (fabs(vols[orig[p]]-volstemp)/volstemp > 1.5e-3) { -// END PMS printf("Inconsistent volumes for p. %d: (%10g,%10g)!\n", orig[p],vols[orig[p]],volstemp); -// TEST //exit(0); } vols[orig[p]] = volstemp; @@ -132,10 +133,13 @@ int main(int argc, char *argv[]) { for (p=0;p 0) { + assert(adjs[pid].nadj == 0);// || adjs[pid].nadj == na); adjs[orig[p]].nadj = na; - adjs[orig[p]].adj = (pid_t *)malloc(na*sizeof(pid_t)); - if (adjs[orig[p]].adj == NULL) { + if (adjs[pid].adj == 0) + adjs[orig[p]].adj = (pid_t *)malloc(na*sizeof(pid_t)); + if (adjs[orig[p]].adj == 0) { printf("Couldn't allocate adjs[orig[%d]].adj.\n",p); exit(0); } @@ -160,51 +164,37 @@ int main(int argc, char *argv[]) { adjs[i].nadj = 0; } - // unlink particles adjacent to mock galaxies - for (i = 0; i < mockIndex; i++) { - for (j = 0; j < adjs[i].nadj; j++) { - if (adjs[i].adj[j] > mockIndex) { -//printf("KILLING %d\n", i); - vols[i] = 1.e-29; - adjs[i].nadj = 0; - numRemoved++; - break; - } - } - } - - // update all other adjacencies - for (i = 0; i < mockIndex; i++) { - - int numAdjSaved = 0; - for (j = 0; j < adjs[i].nadj; j++) { - - //if ( vols[adjs[i].adj[j]] != -99) { - if ( adjs[adjs[i].adj[j]].nadj != 0) { - adjs[i].adj[numAdjSaved] = adjs[i].adj[j]; - numAdjSaved++; - } - - } - adjs[i].nadj = numAdjSaved; - - } - -/* + // unlink particles adjacent to mock galaxies for (i = 0; i < mockIndex; i++) { -printf("ADJ: %d %d : ", i, adjs[i].nadj); for (j = 0; j < adjs[i].nadj; j++) { -printf(" %d", adjs[i].adj[j]); + if (adjs[i].adj[j] > mockIndex) { +//printf("KILLING %d\n", i); + vols[i] = 1.e-29; + adjs[i].nadj = 0; + numRemoved++; + break; + } } -printf("\n"); } -*/ - + // update all other adjacencies + for (i = 0; i < mockIndex; i++) { + + int numAdjSaved = 0; + for (j = 0; j < adjs[i].nadj; j++) { - printf("Removed %d mock galaxies and %d adjacent galaxies.\n", np-mockIndex, + if ( adjs[adjs[i].adj[j]].nadj != 0) { + adjs[i].adj[numAdjSaved] = adjs[i].adj[j]; + numAdjSaved++; + } + + } + adjs[i].nadj = numAdjSaved; + } + + printf("Removed %d mock galaxies and %d adjacent galaxies.\n", np-mockIndex, numRemoved); - printf("There are %d galaxies remaining.\n", mockIndex-numRemoved); + printf("There are %d galaxies remaining.\n", mockIndex-numRemoved); // END PMS @@ -238,10 +228,7 @@ printf("\n"); printf("Unable to open %s\n",adjfile); exit(0); } -// PMS fwrite(&mockIndex,1, sizeof(int),adj); - //fwrite(&np,1, sizeof(int),adj); -// END OMS /* Adjacencies: first the numbers of adjacencies, and the number we're actually going to write per particle */ @@ -249,19 +236,15 @@ printf("\n"); for(i=0;i 0) { -// END PMS nout = 0; - for (j=0;j i) nout++; + for (j=0;j i) + nout++; fwrite(&nout,1,sizeof(int),adj); for (j=0;j Date: Thu, 28 Mar 2013 20:37:28 -0500 Subject: [PATCH 32/39] pruneVoids now (rather clumsily) loads tree information; rejects voids that are not leaf nodes --- c_tools/stacking/pruneVoids.cpp | 41 ++++++++++++++++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/c_tools/stacking/pruneVoids.cpp b/c_tools/stacking/pruneVoids.cpp index 26c2abd..513a046 100644 --- a/c_tools/stacking/pruneVoids.cpp +++ b/c_tools/stacking/pruneVoids.cpp @@ -40,6 +40,9 @@ #include #include "pruneVoids_conf.h" #include +#include "assert.h" +#include "voidTree.hpp" +#include "loadZobov.hpp" #define LIGHT_SPEED 299792.458 #define MPC2Z 100./LIGHT_SPEED @@ -70,6 +73,7 @@ typedef struct voidStruct { float center[3], barycenter[3]; int accepted; int voidType; + int parentID, numChildren; gsl_vector *eval; gsl_matrix *evec; } VOID; @@ -306,6 +310,27 @@ int main(int argc, char **argv) { fclose(fp); free(temp); + // load voids *again* using Guilhem's code so we can get tree + printf(" Re-loading voids and building tree..\n"); + ZobovRep zobovCat; + if (!loadZobov(args.voidDesc_arg, args.zone2Part_arg, + args.void2Zone_arg, + 0, zobovCat)) { + printf("Error loading catalog!\n"); + return -1; + } + VoidTree *tree; + tree = new VoidTree(zobovCat); + zobovCat.allZones.erase(zobovCat.allZones.begin(), zobovCat.allZones.end()); + //zobovCat.allZones.erase(zobovCat.allVoids.begin(), zobovCat.allVoids.end()); + + // copy tree information to our own data structures + for (iVoid = 0; iVoid < numVoids; iVoid++) { + voidID = voids[iVoid].voidID; + voids[iVoid].parentID = tree->getParent(voidID); + voids[iVoid].numChildren = tree->getChildren(voidID).size(); + } + // check boundaries printf(" Computing void properties...\n"); @@ -541,6 +566,7 @@ int main(int argc, char **argv) { int numCentral = 0; int numEdge = 0; int numNearZ = 0; + int numAreParents = 0; int numTooSmall = 0; printf(" Picking winners and losers...\n"); @@ -607,7 +633,20 @@ int main(int argc, char **argv) { } } voids.resize(iGood); - printf(" 4th filter: rejected %d too close to low redshift boundaries\n", numNearZ); + printf(" 4th filter: rejected %d below redshift boundaries\n", numNearZ); + + numAreParents = 0; + iGood = 0; + for (iVoid = 0; iVoid < voids.size(); iVoid++) { + if (voids[iVoid].numChildren > 0) { + numAreParents++; + } else { + voids[iGood++] = voids[iVoid]; + } + } + voids.resize(iGood); + printf(" 5th filter: rejected %d that are not leaf nodes\n", numAreParents); + for (iVoid = 0; iVoid < voids.size(); iVoid++) { if (voids[iVoid].centralDen > args.maxCentralDen_arg) { From 94a4261bb9043965fe55b8593c652ce69a68983d Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 28 Mar 2013 21:31:16 -0500 Subject: [PATCH 33/39] switched to shorter 'parm' argument, more informative naming of subsamples --- .../pipeline_source/prepareCatalogs.in.py | 29 +++++++++++++++++-- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/python_tools/pipeline_source/prepareCatalogs.in.py b/python_tools/pipeline_source/prepareCatalogs.in.py index 839a544..15daf55 100644 --- a/python_tools/pipeline_source/prepareCatalogs.in.py +++ b/python_tools/pipeline_source/prepareCatalogs.in.py @@ -50,13 +50,13 @@ parser.add_argument('--hod', dest='hod', action='store_const', parser.add_argument('--all', dest='all', action='store_const', const=True, default=False, help='write everything') -parser.add_argument('--parmFile', dest='parmFile', +parser.add_argument('--parm', dest='parm', default="", help='path to parameter file') args = parser.parse_args() -filename = args.parmFile +filename = args.parm print " Loading parameters from", filename if not os.access(filename, os.F_OK): print " Cannot find parameter file %s!" % filename @@ -76,6 +76,26 @@ def getSampleName(setName, redshift, useVel, iSlice=-1, iVol=-1): return sampleName +#------------------------------------------------------------------------------ +def getNickName(sampleName): + + splitName = sampleName.split('_') + + if "ss" in splitName[1]: + nickName = "Subsample = " + splitName[1].replace("ss","") + nickName += ", z = " + splitName[2].replace("z","") + elif "hod" in splitName[1]: + nickName = "HOD = " + splitName[2] + nickName += ", z = " + splitName[3].replace("z","") + elif "halos" in splitName[1]: + nickName = "Halos, min mass = " + splitName[2].replace("min","") + nickName += ", z = " + splitName[3].replace("z","") + else: + nickName = sampleName + + return nickName + + #------------------------------------------------------------------------------ # for given dataset parameters, outputs a script for use with analyzeVoids def writeScript(setName, dataFileNameBase, dataFormat, @@ -137,7 +157,7 @@ newSample = Sample(dataFile = "{dataFile}", dataFormat = "{dataFormat}", dataUnit = {dataUnit}, fullName = "{sampleName}", - nickName = "{sampleName}", + nickName = "{nickName}", dataType = "simulation", zBoundary = ({zMin}, {zMax}), zRange = ({zMin}, {zMax}), @@ -218,11 +238,14 @@ newSample.addStack(0.0, 5.0, 90, 95, False, False) sampleName = getSampleName(setName, sliceMin, useVel, iSlice=iSlice, iVol=mySubvolume) + nickName = getNickName(sampleName) + scriptFile.write(sampleInfo.format(dataFile=dataFileName, dataFormat=dataFormat, dataUnit=dataUnit, sampleName=sampleName, + nickName=nickName, zMin=sliceMin, zMax=sliceMax, zMinMpc=sliceMinMpc, From 5259137de7f5601bff0336d5f01d759c32f2da74 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 28 Mar 2013 21:31:38 -0500 Subject: [PATCH 34/39] switched to shorter 'parm' argument From 765ba3b500be764566c70c61a58639a52644c7b5 Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Thu, 28 Mar 2013 21:33:29 -0500 Subject: [PATCH 35/39] removed temporary child/parent output; better handling of acceptable ID list From 92ce95f5d2e8a422bbfa2577d9c627f1a301874b Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Sat, 30 Mar 2013 15:56:34 -0400 Subject: [PATCH 36/39] Introduced error checking at the end of the tesselation to be sure if it is worth continuing. --- zobov/voz1b1.c | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/zobov/voz1b1.c b/zobov/voz1b1.c index c497a23..17d9e22 100644 --- a/zobov/voz1b1.c +++ b/zobov/voz1b1.c @@ -277,6 +277,11 @@ int main(int argc, char *argv[]) { /* Do tesselation*/ printf("File read. Tessellating ...\n"); fflush(stdout); exitcode = delaunadj(parts, nvp, nvpbuf, nvpall, &adjs); + if (exitcode != 0) + { + printf("Error while tesselating. Stopping here."); fflush(stdout); + exit(1); + } /* Now calculate volumes*/ printf("Now finding volumes ...\n"); fflush(stdout); From 7770f40aff7ce9037299629544294e6a698a0dc2 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Sat, 30 Mar 2013 16:17:34 -0400 Subject: [PATCH 37/39] Early check for Qhull overflow --- zobov/vozinit.c | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/zobov/vozinit.c b/zobov/vozinit.c index d9b9973..c93f3f6 100644 --- a/zobov/vozinit.c +++ b/zobov/vozinit.c @@ -5,6 +5,8 @@ #define DL for (d=0;d<3;d++) #define BF 1e30 +#define QHULL_MAX_PARTICLES (1L<<24-1) + int posread(char *posfile, float ***p, float fact); int main(int argc, char *argv[]) { @@ -22,6 +24,7 @@ int main(int argc, char *argv[]) { float width, width2, totwidth, totwidth2, bf, s, g; float border, boxsize; float c[3]; + int numGuards; int b[3]; int numThreads; int mockIndex; @@ -145,6 +148,14 @@ int main(int argc, char *argv[]) { printf("Nvp range: %d,%d\n",nvpmin,nvpmax); printf("Nvpbuf range: %d,%d\n",nvpbufmin,nvpbufmax); + numGuards = 6*(NGUARD+1)*(NGUARD+1); + if (nvpbufmax+numGuards >= QHULL_MAX_PARTICLES) + { + printf("Too many particles to tesselate per division (Qhull will overflow). Increase divisions or reduce buffer size."); + fflush(stdout); + exit(1); + } + /* Output script file */ sprintf(scrfile,"scr%s",suffix); printf("Writing script file to %s.\n",scrfile);fflush(stdout); @@ -152,7 +163,7 @@ int main(int argc, char *argv[]) { if (scr == NULL) { printf("Problem opening script file.\n"); fflush(stdout); - exit(0); + exit(1); } fprintf(scr,"#!/bin/bash -f\n"); p = 0; From 151c605335cd257ea708f0a70e8ab3d03e40759a Mon Sep 17 00:00:00 2001 From: "P.M. Sutter" Date: Sat, 30 Mar 2013 15:24:45 -0500 Subject: [PATCH 38/39] pruneVoids now cleans out non-leaf voids; adjusted stackVoidsZero launcher to accept custom stacks --- c_tools/stacking/pruneVoids.cpp | 199 +++++++++++------- .../void_python_tools/backend/launchers.py | 17 +- 2 files changed, 131 insertions(+), 85 deletions(-) diff --git a/c_tools/stacking/pruneVoids.cpp b/c_tools/stacking/pruneVoids.cpp index 513a046..8bc2bf6 100644 --- a/c_tools/stacking/pruneVoids.cpp +++ b/c_tools/stacking/pruneVoids.cpp @@ -51,6 +51,8 @@ #define CENTRAL_VOID 1 #define EDGE_VOID 2 +using namespace std; + typedef struct partStruct { float x, y, z, vol; } PART; @@ -74,12 +76,25 @@ typedef struct voidStruct { int accepted; int voidType; int parentID, numChildren; + bool isLeaf, hasHighCentralDen; gsl_vector *eval; gsl_matrix *evec; } VOID; +void openFiles(string outputDir, string sampleName, string name, + int mockIndex, int numKept, + FILE** fpZobov, FILE** fpCenters, + FILE** fpCentersNoCut, + FILE** fpBarycenter, FILE** fpDistances, FILE** fpShapes, + FILE** fpSkyPositions); + +void closeFiles(FILE* fpZobov, FILE* fpCenters, + FILE* fpCentersNoCut, + FILE* fpBarycenter, FILE* fpDistances, FILE* fpShapes, + FILE* fpSkyPositions); + void outputVoid(int iVoid, VOID outVoid, FILE* fpZobov, FILE* fpCenters, - FILE* fpCenterNoCut, + FILE* fpCentersNoCut, FILE* fpSkyPositions, FILE* fpBarycenters, FILE* fpDistances, FILE* fpShapes, bool isObservation, double *boxLen); @@ -112,15 +127,12 @@ int main(int argc, char **argv) { int i, p, p2, numPartTot, numZonesTot, dummy, iVoid, iZ; int numVoids, mockIndex, numKept; double tolerance; - FILE *fp, *fpZobovCentral, *fpZobovAll, *fpCentersCentral, *fpCentersAll, - *fpCentersNoCutCentral, *fpCentersNoCutAll, *fpBarycenterCentral, - *fpBarycenterAll, *fpDistancesCentral, *fpDistancesAll, - *fpShapesCentral, *fpShapesAll, *fpSkyPositionsCentral, - *fpSkyPositionsAll; + FILE *fp, *fpZobov, *fpCenters, *fpCentersNoCut, *fpBarycenter, + *fpDistances, *fpShapes, *fpSkyPositions; PART *part, *voidPart; ZONE2PART *zones2Parts; VOID2ZONE *void2Zones; - std::vector voids; + vector voids; float *temp, junk, voidVol; int junkInt, voidID, numPart, numZones, zoneID, partID, maxNumPart; int coreParticle, zoneNumPart; @@ -128,6 +140,7 @@ int main(int argc, char **argv) { float centralRad, centralDen; double nearestEdge, redshift; char line[500], junkStr[10]; + string outputDir, sampleName, name; int mask_index; double ranges[3][2], boxLen[3], mul; double volNorm, radius; @@ -253,6 +266,9 @@ int main(int argc, char **argv) { voids[i-1].radius = pow(voidVol/volNorm*3./4./M_PI, 1./3.); voids[i-1].accepted = 1; + + voids[i-1].isLeaf = true; + voids[i-1].hasHighCentralDen = false; voids[i-1].eval = gsl_vector_alloc(3); voids[i-1].evec = gsl_matrix_alloc(3,3); @@ -640,18 +656,23 @@ int main(int argc, char **argv) { for (iVoid = 0; iVoid < voids.size(); iVoid++) { if (voids[iVoid].numChildren > 0) { numAreParents++; + voids[iVoid].isLeaf = false; } else { - voids[iGood++] = voids[iVoid]; + //voids[iGood++] = voids[iVoid]; + voids[iVoid].isLeaf = true; } } - voids.resize(iGood); - printf(" 5th filter: rejected %d that are not leaf nodes\n", numAreParents); + //voids.resize(iGood); + //printf(" 5th filter: rejected %d that are not leaf nodes\n", numAreParents); for (iVoid = 0; iVoid < voids.size(); iVoid++) { if (voids[iVoid].centralDen > args.maxCentralDen_arg) { voids[iVoid].accepted = -1; + voids[iVoid].hasHighCentralDen = true; numHighDen++; + } else { + voids[iVoid].hasHighCentralDen = false; } } @@ -669,80 +690,46 @@ int main(int argc, char **argv) { printf(" We have %d edge voids\n", numEdge); printf(" We have %d central voids\n", numCentral); printf(" We have %d too high central density\n", numHighDen); - + printf(" We have %d that are not leaf nodes\n", numAreParents); + printf(" Output...\n"); - fpZobovCentral = fopen((std::string(args.outputDir_arg)+"/voidDesc_central_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpZobovCentral, "%d particles, %d voids.\n", mockIndex, numKept); - fprintf(fpZobovCentral, "Void# FileVoid# CoreParticle CoreDens ZoneVol Zone#Part Void#Zones VoidVol Void#Part VoidDensContrast VoidProb\n"); - - fpZobovAll = fopen((std::string(args.outputDir_arg)+"/voidDesc_all_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpZobovAll, "%d particles, %d voids.\n", mockIndex, numKept); - fprintf(fpZobovAll, "Void# FileVoid# CoreParticle CoreDens ZoneVol Zone#Part Void#Zones VoidVol Void#Part VoidDensContrast VoidProb\n"); - - fpBarycenterCentral = fopen((std::string(args.outputDir_arg)+"/barycenters_central_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fpBarycenterAll = fopen((std::string(args.outputDir_arg)+"/barycenters_all_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - - fpCentersCentral = fopen((std::string(args.outputDir_arg)+"/centers_central_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpCentersCentral, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part\n"); - - fpCentersAll = fopen((std::string(args.outputDir_arg)+"/centers_all_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpCentersAll, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part\n"); - - fpCentersNoCutCentral = fopen((std::string(args.outputDir_arg)+"/centers_nocut_central_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpCentersNoCutCentral, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part\n"); - - fpCentersNoCutAll = fopen((std::string(args.outputDir_arg)+"/centers_nocut_all_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpCentersNoCutAll, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part\n"); - - - fpDistancesCentral = fopen((std::string(args.outputDir_arg)+"boundaryDistancesCentral_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fpDistancesAll = fopen((std::string(args.outputDir_arg)+"boundaryDistancesAll_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - - fpSkyPositionsCentral = fopen((std::string(args.outputDir_arg)+"/sky_positions_central_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpSkyPositionsCentral, "# RA, dec, redshift, radius (Mpc/h), void ID\n"); - - fpSkyPositionsAll = fopen((std::string(args.outputDir_arg)+"/sky_positions_all_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpSkyPositionsAll, "# RA, dec, redshift, radius (Mpc/h), void ID\n"); - - fpShapesCentral = fopen((std::string(args.outputDir_arg)+"/shapes_central_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpShapesCentral, "# void ID, eig(1), eig(2), eig(3), eigv(1)-x, eiv(1)-y, eigv(1)-z, eigv(2)-x, eigv(2)-y, eigv(2)-z, eigv(3)-x, eigv(3)-y, eigv(3)-z\n"); - fpShapesAll = fopen((std::string(args.outputDir_arg)+"/shapes_all_"+std::string(args.sampleName_arg)+".out").c_str(), "w"); - fprintf(fpShapesAll, "# void ID, eig(1), eig(2), eig(3), eigv(1)-x, eiv(1)-y, eigv(1)-z, eigv(2)-x, eigv(2)-y, eigv(2)-z, eigv(3)-x, eigv(3)-y, eigv(3)-z\n"); - - + outputDir = string(args.outputDir_arg); + sampleName = (string(args.sampleName_arg)+".out"); + + name = "central"; + openFiles(outputDir, sampleName, name, + mockIndex, numKept, + &fpZobov, &fpCenters, &fpCentersNoCut, &fpBarycenter, + &fpDistances, &fpShapes, &fpSkyPositions); + for (iVoid = 0; iVoid < voids.size(); iVoid++) { - if (voids[iVoid].voidType == CENTRAL_VOID) { - outputVoid(iVoid, voids[iVoid], fpZobovCentral, fpCentersCentral, - fpCentersNoCutCentral, fpSkyPositionsCentral, - fpBarycenterCentral, fpDistancesCentral, fpShapesCentral, + outputVoid(iVoid, voids[iVoid], fpZobov, fpCenters, + fpCentersNoCut, fpSkyPositions, + fpBarycenter, fpDistances, fpShapes, args.isObservation_flag, boxLen); - } + } + } + closeFiles(fpZobov, fpCenters, fpCentersNoCut, fpBarycenter, + fpDistances, fpShapes, fpSkyPositions); + name = "all"; + openFiles(outputDir, sampleName, name, + mockIndex, numKept, + &fpZobov, &fpCenters, &fpCentersNoCut, &fpBarycenter, + &fpDistances, &fpShapes, &fpSkyPositions); + for (iVoid = 0; iVoid < voids.size(); iVoid++) { if (voids[iVoid].voidType == EDGE_VOID || voids[iVoid].voidType == CENTRAL_VOID) { - outputVoid(iVoid, voids[iVoid], fpZobovAll, fpCentersAll, - fpCentersNoCutAll, fpSkyPositionsAll, - fpBarycenterAll, fpDistancesAll, fpShapesAll, + outputVoid(iVoid, voids[iVoid], fpZobov, fpCenters, + fpCentersNoCut, fpSkyPositions, + fpBarycenter, fpDistances, fpShapes, args.isObservation_flag, boxLen); } - } - - fclose(fpZobovCentral); - fclose(fpZobovAll); - fclose(fpCentersCentral); - fclose(fpCentersAll); - fclose(fpCentersNoCutCentral); - fclose(fpCentersNoCutAll); - fclose(fpBarycenterCentral); - fclose(fpBarycenterAll); - fclose(fpDistancesCentral); - fclose(fpDistancesAll); - fclose(fpShapesCentral); - fclose(fpShapesAll); - fclose(fpSkyPositionsCentral); - fclose(fpSkyPositionsAll); + } + closeFiles(fpZobov, fpCenters, fpCentersNoCut, fpBarycenter, + fpDistances, fpShapes, fpSkyPositions); clock2 = clock(); printf(" Time: %f sec (for %d voids)\n", @@ -755,9 +742,57 @@ int main(int argc, char **argv) { } // end main +// ---------------------------------------------------------------------------- +void openFiles(string outputDir, string sampleName, string name, + int mockIndex, int numKept, + FILE** fpZobov, FILE** fpCenters, + FILE** fpCentersNoCut, + FILE** fpBarycenter, FILE** fpDistances, FILE** fpShapes, + FILE** fpSkyPositions) { + + *fpZobov = fopen((outputDir+"/voidDesc_"+name+"_"+sampleName).c_str(), "w"); + fprintf(*fpZobov, "%d particles, %d voids.\n", mockIndex, numKept); + fprintf(*fpZobov, "Void# FileVoid# CoreParticle CoreDens ZoneVol Zone#Part Void#Zones VoidVol Void#Part VoidDensContrast VoidProb\n"); + + *fpBarycenter = fopen((outputDir+"/barycenters_"+name+"_"+sampleName).c_str(), "w"); + + *fpCenters = fopen((outputDir+"/centers_"+name+"_"+sampleName).c_str(), "w"); + fprintf(*fpCenters, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part, parent ID, number of children\n"); + + *fpCentersNoCut = fopen((outputDir+"/centers_nocut_"+name+"_"+sampleName).c_str(), "w"); + fprintf(*fpCentersNoCut, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part, parent ID, number of children\n"); + + + *fpDistances = fopen((outputDir+"boundaryDistances_"+name+"_"+sampleName).c_str(), "w"); + + *fpSkyPositions = fopen((outputDir+"/sky_positions_"+name+"_"+sampleName).c_str(), "w"); + fprintf(*fpSkyPositions, "# RA, dec, redshift, radius (Mpc/h), void ID\n"); + + *fpShapes = fopen((outputDir+"/shapes_"+name+"_"+sampleName).c_str(), "w"); + fprintf(*fpShapes, "# void ID, eig(1), eig(2), eig(3), eigv(1)-x, eiv(1)-y, eigv(1)-z, eigv(2)-x, eigv(2)-y, eigv(2)-z, eigv(3)-x, eigv(3)-y, eigv(3)-z\n"); + +} // end openFiles + + +// ---------------------------------------------------------------------------- +void closeFiles(FILE* fpZobov, FILE* fpCenters, + FILE* fpCentersNoCut, + FILE* fpBarycenter, FILE* fpDistances, FILE* fpShapes, + FILE* fpSkyPositions) { + + fclose(fpZobov); + fclose(fpCenters); + fclose(fpCentersNoCut); + fclose(fpBarycenter); + fclose(fpDistances); + fclose(fpShapes); + fclose(fpSkyPositions); + +} // end closeFile + // ---------------------------------------------------------------------------- void outputVoid(int iVoid, VOID outVoid, FILE* fpZobov, FILE* fpCenters, - FILE* fpCenterNoCut, FILE* fpSkyPositions, + FILE* fpCentersNoCut, FILE* fpSkyPositions, FILE* fpBarycenters, FILE* fpDistances, FILE* fpShapes, bool isObservation, double *boxLen) { @@ -795,8 +830,8 @@ void outputVoid(int iVoid, VOID outVoid, FILE* fpZobov, FILE* fpCenters, outCenter[2] = (outVoid.barycenter[2]-boxLen[2]/2.)*100.; } - if (outVoid.accepted == 1) { - fprintf(fpCenters, "%.2f %.2f %.2f %.2f %.2f %.5f %.2f %d %f %d\n", + if (!outVoid.hasHighCentralDen && outVoid.isLeaf) { + fprintf(fpCenters, "%.2f %.2f %.2f %.2f %.2f %.5f %.2f %d %f %d %d %d\n", outCenter[0], outCenter[1], outCenter[2], @@ -806,10 +841,12 @@ void outputVoid(int iVoid, VOID outVoid, FILE* fpZobov, FILE* fpCenters, 4./3.*M_PI*pow(outVoid.radius, 3), outVoid.voidID, outVoid.densCon, - outVoid.numPart); + outVoid.numPart, + outVoid.parentID, + outVoid.numChildren); } - fprintf(fpCenterNoCut, "%.2f %.2f %.2f %.2f %.2f %.5f %.2f %d %f %d\n", + fprintf(fpCentersNoCut, "%.2f %.2f %.2f %.2f %.2f %.5f %.2f %d %f %d %d %d\n", outCenter[0], outCenter[1], outCenter[2], @@ -819,7 +856,9 @@ void outputVoid(int iVoid, VOID outVoid, FILE* fpZobov, FILE* fpCenters, 4./3.*M_PI*pow(outVoid.radius, 3), outVoid.voidID, outVoid.densCon, - outVoid.numPart); + outVoid.numPart, + outVoid.parentID, + outVoid.numChildren); fprintf(fpSkyPositions, "%.2f %.2f %.5f %.2f %d\n", atan((outVoid.barycenter[1]-boxLen[1]/2.) / diff --git a/python_tools/void_python_tools/backend/launchers.py b/python_tools/void_python_tools/backend/launchers.py index 0589ed8..3011ec7 100644 --- a/python_tools/void_python_tools/backend/launchers.py +++ b/python_tools/void_python_tools/backend/launchers.py @@ -420,6 +420,7 @@ def launchVoidOverlap(sample1, sample2, sample1Dir, sample2Dir, cmd += periodicLine cmd += " --outfile=" + outputFile cmd += " &> " + logFile + open("temp.par",'w').write(cmd) os.system(cmd) if jobSuccessful(logFile, "Done!\n"): @@ -438,7 +439,7 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, zobovDir=None, INCOHERENT=False, ranSeed=None, summaryFile=None, continueRun=None, dataType=None, prefixRun="", - idList=None): + idList=None, rescaleOverride=None): sampleName = sample.fullName @@ -485,13 +486,18 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, else: nullTestFlag = "" - if stack.rescaleMode == "rmax": + if rescaleOverride == None: rescaleOverride = stack.rescaleMode + if rescaleOverride == "rmax": rescaleFlag = "rescale" else: rescaleFlag = "" + idListFile = "idlist.temp" if idList != None: - idListFlag = "idList '" + str(idList).strip('[]') + "'" + idListFlag = "idList " + idListFile + idFile = open(idListFile, 'w') + for id in idList: idFile.write(str(id)+"\n") + idFile.close() rMinToUse = 0. rMaxToUse = 100000. centralRadius = rMaxToUse @@ -696,8 +702,9 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None, os.system("mv %s %s" % ("normalizations.txt", voidDir+"/")) os.system("mv %s %s" % ("boundaryDistances.txt", voidDir+"/")) - if os.access(parmFile, os.F_OK): - os.unlink(parmFile) + if os.access(idListFile, os.F_OK): os.unlink(idListFile) + + if os.access(parmFile, os.F_OK): os.unlink(parmFile) return From 9c329086aff70226dcfb13a658cacde8b493c491 Mon Sep 17 00:00:00 2001 From: Guilhem Lavaux Date: Sat, 30 Mar 2013 15:26:37 -0500 Subject: [PATCH 39/39] Fixed computation of the maximum number of particles accepted by Qhull --- zobov/vozinit.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/zobov/vozinit.c b/zobov/vozinit.c index c93f3f6..c1e7865 100644 --- a/zobov/vozinit.c +++ b/zobov/vozinit.c @@ -5,7 +5,7 @@ #define DL for (d=0;d<3;d++) #define BF 1e30 -#define QHULL_MAX_PARTICLES (1L<<24-1) +#define QHULL_MAX_PARTICLES ((1L<<24)-1) int posread(char *posfile, float ***p, float fact); @@ -149,9 +149,10 @@ int main(int argc, char *argv[]) { printf("Nvpbuf range: %d,%d\n",nvpbufmin,nvpbufmax); numGuards = 6*(NGUARD+1)*(NGUARD+1); + printf("Total max particles: %d\n" , nvpbufmax+numGuards); if (nvpbufmax+numGuards >= QHULL_MAX_PARTICLES) { - printf("Too many particles to tesselate per division (Qhull will overflow). Increase divisions or reduce buffer size."); + printf("Too many particles to tesselate per division (Qhull will overflow). Increase divisions or reduce buffer size.\n"); fflush(stdout); exit(1); }