mirror of
https://bitbucket.org/cosmicvoids/vide_public.git
synced 2025-07-05 07:41:11 +00:00
Merge branch 'master' of bitbucket.org:cosmicvoids/void_identification
This commit is contained in:
commit
85073516a0
16 changed files with 328 additions and 620 deletions
|
@ -61,8 +61,9 @@ typedef struct voidZoneStruct {
|
|||
|
||||
typedef struct matchProps {
|
||||
int matchID;
|
||||
float commonVol;
|
||||
float commonVolOrig, commonVolProg;
|
||||
float dist;
|
||||
float merit;
|
||||
} MATCHPROPS;
|
||||
|
||||
typedef struct voidStruct {
|
||||
|
@ -71,6 +72,7 @@ typedef struct voidStruct {
|
|||
float maxRadius, nearestMock, centralDen, redshift, redshiftInMpc;
|
||||
float nearestEdge;
|
||||
float barycenter[3];
|
||||
float ellipticity;
|
||||
std::vector<MATCHPROPS> matches;
|
||||
int numMatches;
|
||||
int numBigMatches;
|
||||
|
@ -89,12 +91,14 @@ typedef struct catalog {
|
|||
void loadCatalog(const char *partFile, const char *volFile,
|
||||
const char *voidFile, const char *zoneFile,
|
||||
const char *infoFile, const char *centerFile,
|
||||
const char *shapeFile,
|
||||
const char *zonePartFile, CATALOG& catalog);
|
||||
|
||||
float getDist(CATALOG& catalog1, CATALOG& catalog2, int& iVoid1, int& iVoid2,
|
||||
bool periodicX, bool periodicY, bool operiodicZ);
|
||||
|
||||
void sortMatches(std::vector<MATCHPROPS>& matches);
|
||||
void sortMatches(std::vector<MATCHPROPS>& matches,
|
||||
CATALOG& catalog2, int mode);
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
|
@ -110,7 +114,9 @@ int main(int argc, char **argv) {
|
|||
float closestMatchDist;
|
||||
float commonVolRatio;
|
||||
MATCHPROPS newMatch;
|
||||
int MAX_MATCHES = 10;
|
||||
int MAX_MATCHES = 100;
|
||||
float matchDist = 0.25;
|
||||
bool alreadyMatched;
|
||||
|
||||
CATALOG catalog1, catalog2;
|
||||
|
||||
|
@ -137,10 +143,12 @@ int main(int argc, char **argv) {
|
|||
|
||||
loadCatalog(args.partFile1_arg, args.volFile1_arg, args.voidFile1_arg,
|
||||
args.zoneFile1_arg, args.infoFile1_arg, args.centerFile1_arg,
|
||||
args.shapeFile1_arg,
|
||||
args.zonePartFile1_arg, catalog1);
|
||||
|
||||
loadCatalog(args.partFile2_arg, args.volFile2_arg, args.voidFile2_arg,
|
||||
args.zoneFile2_arg, args.infoFile2_arg, args.centerFile2_arg,
|
||||
args.shapeFile2_arg,
|
||||
args.zonePartFile2_arg, catalog2);
|
||||
|
||||
// check for periodic box
|
||||
|
@ -165,9 +173,12 @@ int main(int argc, char **argv) {
|
|||
periodicX, periodicY, periodicZ);
|
||||
|
||||
newMatch.matchID = iVoid2;
|
||||
newMatch.commonVol = 0;
|
||||
newMatch.commonVolOrig = 0;
|
||||
newMatch.commonVolProg = 0;
|
||||
newMatch.dist = rdist;
|
||||
|
||||
if (rdist/catalog1.voids[iVoid1].radius > 1.5) continue;
|
||||
|
||||
if (catalog1.voids[iVoid1].matches.size() < MAX_MATCHES) {
|
||||
catalog1.voids[iVoid1].matches.push_back(newMatch);
|
||||
} else {
|
||||
|
@ -201,6 +212,8 @@ int main(int argc, char **argv) {
|
|||
for (p1 = 0; p1 < catalog1.zones2Parts[zoneID1].numPart; p1++) {
|
||||
partID1 = catalog1.zones2Parts[zoneID1].partIDs[p1];
|
||||
|
||||
alreadyMatched = false;
|
||||
|
||||
for (iZ2 = 0; iZ2 < catalog2.void2Zones[voidID2].numZones; iZ2++) {
|
||||
zoneID2 = catalog2.void2Zones[voidID2].zoneIDs[iZ2];
|
||||
|
||||
|
@ -231,12 +244,17 @@ int main(int argc, char **argv) {
|
|||
catalog1.numPartTot, 1./3.);
|
||||
r2 = pow(3./4./M_PI*catalog2.part[partID2].volume /
|
||||
catalog2.numPartTot, 1./3.);
|
||||
if (rdist <= 0.1*r1 || rdist <= 0.1*r2) match = true;
|
||||
if (rdist <= matchDist*(r1+r2)) match = true;
|
||||
}
|
||||
|
||||
if (match) {
|
||||
catalog1.voids[iVoid1].matches[iMatch].commonVol +=
|
||||
if (!alreadyMatched) {
|
||||
catalog1.voids[iVoid1].matches[iMatch].commonVolOrig +=
|
||||
catalog1.part[partID1].volume;
|
||||
}
|
||||
catalog1.voids[iVoid1].matches[iMatch].commonVolProg +=
|
||||
catalog2.part[partID2].volume;
|
||||
alreadyMatched = true;
|
||||
} // end if match
|
||||
|
||||
} // end p2
|
||||
|
@ -244,11 +262,28 @@ int main(int argc, char **argv) {
|
|||
} // end p1
|
||||
} // end iZ1
|
||||
} // end iVoid2
|
||||
|
||||
for (iMatch = 0; iMatch < catalog1.voids[iVoid1].matches.size(); iMatch++) {
|
||||
int matchID = catalog1.voids[iVoid1].matches[iMatch].matchID;
|
||||
catalog1.voids[iVoid1].matches[iMatch].merit =
|
||||
catalog1.voids[iVoid1].matches[iMatch].commonVolOrig *
|
||||
catalog1.voids[iVoid1].matches[iMatch].commonVolProg /
|
||||
catalog1.voids[iVoid1].vol /
|
||||
catalog2.voids[matchID].vol;
|
||||
//pow(catalog1.voids[iVoid1].matches[iMatch].commonVol,2) /
|
||||
//catalog1.voids[iVoid1].vol /
|
||||
//catalog2.voids[matchID].vol;
|
||||
}
|
||||
// sortMatches(catalog1.voids[iVoid1].matches, catalog2);
|
||||
//printf("BEST VOL %e\n", catalog2.voids[catalog1.voids[iVoid1].matches[0].matchID].vol/catalog1.voids[iVoid1].vol);
|
||||
} // end match finding
|
||||
|
||||
printf(" Sorting matches...\n");
|
||||
for (iVoid1 = 0; iVoid1 < catalog1.numVoids; iVoid1++) {
|
||||
sortMatches(catalog1.voids[iVoid1].matches);
|
||||
sortMatches(catalog1.voids[iVoid1].matches, catalog2, 0);
|
||||
if (catalog1.voids[iVoid1].matches.size() > 0 &&
|
||||
catalog1.voids[iVoid1].matches[0].merit < 1.e-6)
|
||||
sortMatches(catalog1.voids[iVoid1].matches, catalog2, 1);
|
||||
}
|
||||
|
||||
// count up significant matches
|
||||
|
@ -256,10 +291,10 @@ int main(int argc, char **argv) {
|
|||
for (iVoid1 = 0; iVoid1 < catalog1.numVoids; iVoid1++) {
|
||||
closestMatchDist = 0.;
|
||||
for (iMatch = 0; iMatch < catalog1.voids[iVoid1].matches.size(); iMatch++) {
|
||||
commonVolRatio = catalog1.voids[iVoid1].matches[iMatch].commonVol /
|
||||
commonVolRatio = catalog1.voids[iVoid1].matches[iMatch].commonVolOrig /
|
||||
// catalog1.voids[iVoid1].numPart;
|
||||
catalog1.voids[iVoid1].vol;
|
||||
if (commonVolRatio > 0.1) catalog1.voids[iVoid1].numBigMatches++;
|
||||
if (commonVolRatio > 0.2) catalog1.voids[iVoid1].numBigMatches++;
|
||||
}
|
||||
catalog1.voids[iVoid1].numMatches = catalog1.voids[iVoid1].matches.size();
|
||||
}
|
||||
|
@ -270,22 +305,25 @@ int main(int argc, char **argv) {
|
|||
filename = string(args.outfile_arg);
|
||||
filename = filename.append("summary.out");
|
||||
fp = fopen(filename.c_str(), "w");
|
||||
fprintf(fp, "# void ID, radius, radius ratio, common volume ratio, common volume ratio 2, relative dist, num matches, num significant matches, match ID\n");
|
||||
fprintf(fp, "# void ID, radius, radius ratio, common volume ratio (to original), common volume ratio (to match), relative dist, num matches, num significant matches, match ID, merit, ellipticity ratio\n");
|
||||
for (iVoid1 = 0; iVoid1 < catalog1.numVoids; iVoid1++) {
|
||||
int voidID = catalog1.voids[iVoid1].voidID;
|
||||
if (catalog1.voids[iVoid1].numMatches > 0) {
|
||||
iVoid2 = catalog1.voids[iVoid1].matches[0].matchID;
|
||||
float rRatio = catalog2.voids[iVoid2].radius /
|
||||
catalog1.voids[iVoid1].radius;
|
||||
commonVolRatio = catalog1.voids[iVoid1].matches[0].commonVol /
|
||||
float ellipRatio = catalog2.voids[iVoid2].ellipticity /
|
||||
catalog1.voids[iVoid1].ellipticity;
|
||||
commonVolRatio = catalog1.voids[iVoid1].matches[0].commonVolOrig /
|
||||
//catalog1.voids[iVoid1].numPart;
|
||||
catalog1.voids[iVoid1].vol;
|
||||
float volRatio = catalog1.voids[iVoid1].matches[0].commonVol /
|
||||
float volRatio = catalog1.voids[iVoid1].matches[0].commonVolProg /
|
||||
catalog2.voids[iVoid2].vol;
|
||||
rdist = catalog1.voids[iVoid1].matches[0].dist;
|
||||
rdist /= catalog1.voids[iVoid1].radius;
|
||||
|
||||
fprintf(fp, "%d %.4f %.4f %.4f %.4f %.4f %d %d %d\n", voidID,
|
||||
fprintf(fp, "%d %.4f %.4f %e %e %.4f %d %d %d %e %e\n", voidID,
|
||||
//fprintf(fp, "%d %.4f %.4f %.4f %.4f %.4f %d %d %d\n", voidID,
|
||||
catalog1.voids[iVoid1].radiusMpc,
|
||||
rRatio,
|
||||
commonVolRatio,
|
||||
|
@ -293,10 +331,12 @@ int main(int argc, char **argv) {
|
|||
rdist,
|
||||
catalog1.voids[iVoid1].numMatches,
|
||||
catalog1.voids[iVoid1].numBigMatches,
|
||||
catalog2.voids[iVoid2].voidID);
|
||||
catalog2.voids[iVoid2].voidID,
|
||||
catalog1.voids[iVoid1].matches[0].merit,
|
||||
ellipRatio);
|
||||
|
||||
} else {
|
||||
fprintf(fp, "%d %.2f 0.0 0.0 0.0 0.0 0 0\n", voidID,
|
||||
fprintf(fp, "%d %.4f 0.0 0.0 0.0 0.0 0.0 0 0 0 0.0\n", voidID,
|
||||
catalog1.voids[iVoid1].radiusMpc);
|
||||
}
|
||||
} // end printing
|
||||
|
@ -314,7 +354,7 @@ int main(int argc, char **argv) {
|
|||
fprintf(fp,"%d: ", voidID);
|
||||
for (iMatch = 0; iMatch < MAX_OUT; iMatch++) {
|
||||
if (iMatch < catalog1.voids[iVoid1].matches.size()) {
|
||||
commonVolRatio = catalog1.voids[iVoid1].matches[iMatch].commonVol /
|
||||
commonVolRatio = catalog1.voids[iVoid1].matches[iMatch].commonVolOrig /
|
||||
//catalog1.voids[iVoid1].numPart;
|
||||
catalog1.voids[iVoid1].vol;
|
||||
|
||||
|
@ -339,6 +379,7 @@ int main(int argc, char **argv) {
|
|||
void loadCatalog(const char *partFile, const char *volFile,
|
||||
const char *voidFile, const char *zoneFile,
|
||||
const char *infoFile, const char *centerFile,
|
||||
const char *shapeFile,
|
||||
const char *zonePartFile, CATALOG& catalog) {
|
||||
|
||||
int i, p, numPartTot, numZonesTot, dummy, iVoid, iZ, numVolTot;
|
||||
|
@ -435,7 +476,7 @@ void loadCatalog(const char *partFile, const char *volFile,
|
|||
fgets(line, sizeof(line), fp);
|
||||
sscanf(line, "%d %s %d %s", &junkInt, junkStr, &catalog.numVoids, junkStr);
|
||||
fgets(line, sizeof(line), fp);
|
||||
|
||||
|
||||
catalog.voids.resize(catalog.numVoids);
|
||||
i = 0;
|
||||
while (fgets(line, sizeof(line), fp) != NULL) {
|
||||
|
@ -474,12 +515,16 @@ void loadCatalog(const char *partFile, const char *volFile,
|
|||
float tempFloat;
|
||||
int tempInt;
|
||||
iVoid = 0;
|
||||
//fgets(line, sizeof(line), fp);
|
||||
while (fgets(line, sizeof(line), fp) != NULL) {
|
||||
sscanf(line, "%f %f %f %f %f %f %f %d %f %d %d %d\n",
|
||||
&tempBary[0], &tempBary[1], &tempBary[2],
|
||||
&tempFloat, &tempFloat, &tempFloat, &tempFloat, &tempInt,
|
||||
&tempFloat, &tempInt, &tempInt);
|
||||
sscanf(line, "%d %f %f %f\n",
|
||||
&tempInt, &tempBary[0], &tempBary[1], &tempBary[2]);
|
||||
//sscanf(line, "%f %f %f %f %f %f %f %d %f %d %d %d %d\n",
|
||||
// &tempBary[0], &tempBary[1], &tempBary[2],
|
||||
// &tempFloat, &tempFloat, &tempFloat, &tempFloat, &tempInt,
|
||||
// &tempFloat, &tempInt, &tempInt, &tempInt, &tempInt);
|
||||
|
||||
//if (iVoid < 10) printf("BARY %d %d %e %e %e\n", iVoid, catalog.voids[iVoid].voidID, tempBary[0], tempBary[1], tempBary[2]);
|
||||
tempBary[0] = (tempBary[0] - ranges[0][0])/catalog.boxLen[0];
|
||||
tempBary[1] = (tempBary[1] - ranges[1][0])/catalog.boxLen[1];
|
||||
tempBary[2] = (tempBary[2] - ranges[2][0])/catalog.boxLen[2];
|
||||
|
@ -490,6 +535,25 @@ void loadCatalog(const char *partFile, const char *volFile,
|
|||
}
|
||||
fclose(fp);
|
||||
|
||||
printf(" Loading shapes\n");
|
||||
fp = fopen(shapeFile, "r");
|
||||
iVoid = 0;
|
||||
float tempEllip;
|
||||
fgets(line, sizeof(line), fp);
|
||||
while (fgets(line, sizeof(line), fp) != NULL) {
|
||||
sscanf(line, "%d %f %f %f %f %f %f %f %f %f %f %f %f %f\n",
|
||||
&tempInt, &tempEllip, &tempFloat, &tempFloat, &tempFloat,
|
||||
&tempFloat, &tempFloat, &tempFloat,
|
||||
&tempFloat, &tempFloat, &tempFloat,
|
||||
&tempFloat, &tempFloat, &tempFloat);
|
||||
|
||||
//if (iVoid < 10) printf("SHAPE %d %d %e\n", iVoid, catalog.voids[iVoid].voidID, tempEllip);
|
||||
catalog.voids[iVoid].ellipticity = tempEllip;
|
||||
iVoid++;
|
||||
}
|
||||
fclose(fp);
|
||||
|
||||
|
||||
// load up the zone membership for each void
|
||||
printf(" Loading zone-void membership info...\n");
|
||||
fp = fopen(zoneFile, "r");
|
||||
|
@ -557,11 +621,13 @@ float getDist(CATALOG& catalog1, CATALOG& catalog2, int& iVoid1, int& iVoid2,
|
|||
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
void sortMatches(std::vector<MATCHPROPS>& matches) {
|
||||
void sortMatches(std::vector<MATCHPROPS>& matches,
|
||||
CATALOG& catalog2, int mode) {
|
||||
|
||||
//printf("SORTING %d\n", matches.size());
|
||||
MATCHPROPS tempMatch;
|
||||
bool swapped;
|
||||
int matchID, matchID2;
|
||||
|
||||
if (matches.size() <= 1) return;
|
||||
|
||||
|
@ -569,8 +635,10 @@ void sortMatches(std::vector<MATCHPROPS>& matches) {
|
|||
while (swapped) {
|
||||
swapped = false;
|
||||
for (int iMatch = 0; iMatch < matches.size() - 1; iMatch++) {
|
||||
if (matches[iMatch].dist > matches[iMatch+1].dist) {
|
||||
//if (matches[iMatch].commonVol < matches[iMatch+1].commonVol) {
|
||||
matchID = matches[iMatch].matchID;
|
||||
matchID2 = matches[iMatch+1].matchID;
|
||||
if ((mode == 0 && matches[iMatch].merit < matches[iMatch+1].merit) ||
|
||||
(mode == 1 && matches[iMatch].dist > matches[iMatch+1].dist)) {
|
||||
tempMatch = matches[iMatch+1];
|
||||
matches[iMatch+1] = matches[iMatch];
|
||||
matches[iMatch] = tempMatch;
|
||||
|
|
|
@ -11,6 +11,7 @@ option "infoFile1" - "Extra info file for catalog 1" string yes
|
|||
option "zoneFile1" - "Zone file for catalog 1" string yes
|
||||
option "zonePartFile1" - "Zone-particle file for catalog 1" string yes
|
||||
option "centerFile1" - "Barycenter file for catalog 1" string yes
|
||||
option "shapeFile1" - "Shape file for catalog 1" string yes
|
||||
|
||||
# void data for file 2
|
||||
option "partFile2" - "Particle file for catalog 2" string yes
|
||||
|
@ -20,6 +21,7 @@ option "infoFile2" - "Extra info file for catalog 2" string yes
|
|||
option "zoneFile2" - "Zone file for catalog 2" string yes
|
||||
option "zonePartFile2" - "Zone-particle file for catalog 2" string yes
|
||||
option "centerFile2" - "Barycenter file for catalog 2" string yes
|
||||
option "shapeFile2" - "Shape file for catalog 2" string yes
|
||||
|
||||
# options
|
||||
option "outfile" - "Output file" string yes
|
||||
|
|
|
@ -152,6 +152,22 @@ void metricTransform(SimuData *data, int axis, bool reshift, bool pecvel, double
|
|||
|
||||
}
|
||||
|
||||
// slightly perturb particle positions
|
||||
void joggleParticles(SimuData *data) {
|
||||
cout << "Joggling particle positions..." << endl;
|
||||
gsl_rng *myRng = gsl_rng_alloc(gsl_rng_taus);
|
||||
int seed = 314159;
|
||||
gsl_rng_set(myRng, seed);
|
||||
for (uint32_t i = 0; i < data->NumPart; i++) {
|
||||
data->Pos[0][i] += 1.e-3*gsl_rng_uniform(myRng);
|
||||
data->Pos[1][i] += 1.e-3*gsl_rng_uniform(myRng);
|
||||
data->Pos[2][i] += 1.e-3*gsl_rng_uniform(myRng);
|
||||
data->Pos[0][i] -= 1.e-3*gsl_rng_uniform(myRng);
|
||||
data->Pos[1][i] -= 1.e-3*gsl_rng_uniform(myRng);
|
||||
data->Pos[2][i] -= 1.e-3*gsl_rng_uniform(myRng);
|
||||
}
|
||||
} // end joggleParticles
|
||||
|
||||
void generateOutput(SimuData *data, int axis,
|
||||
const std::string& fname)
|
||||
{
|
||||
|
@ -652,11 +668,11 @@ int main(int argc, char **argv)
|
|||
{
|
||||
loader = flashLoader(args_info.flash_arg, NEED_POSITION|NEED_VELOCITY|NEED_GADGET_ID, preselector);
|
||||
}
|
||||
#ifdef SDF_SUPPORT
|
||||
else if (args_info.multidark_given)
|
||||
{
|
||||
loader = multidarkLoader(args_info.multidark_arg, preselector);
|
||||
}
|
||||
#ifdef SDF_SUPPORT
|
||||
else if (args_info.sdf_given)
|
||||
{
|
||||
loader = sdfLoader(args_info.sdf_arg, NEED_POSITION|NEED_VELOCITY|NEED_GADGET_ID, args_info.sdf_splitting_arg, preselector);
|
||||
|
@ -723,6 +739,9 @@ int main(int argc, char **argv)
|
|||
delete[] efac;
|
||||
}
|
||||
|
||||
if (args_info.joggleParticles_flag)
|
||||
joggleParticles(simuOut);
|
||||
|
||||
saveBox(simuOut, args_info.outputParameter_arg, args_info);
|
||||
generateOutput(simuOut, args_info.axis_arg,
|
||||
args_info.output_arg);
|
||||
|
|
|
@ -41,3 +41,4 @@ option "subsample_seed" - "Seed for random number generation to select the subsa
|
|||
option "resubsample" - "Resubsampling factor compared to the subsampled simulation" double optional
|
||||
option "resubsample_seed" - "Seed for resubsampling from a subsampled simulation" int optional default="20132011"
|
||||
|
||||
option "joggleParticles" - "Slightly joggle the input particle positions" flag off
|
||||
|
|
|
@ -98,7 +98,7 @@ public:
|
|||
SingleParticle p;
|
||||
|
||||
fp >> p.ID >> p.Pos[0] >> p.Pos[1]
|
||||
>> p.Pos[2] >> p.Vel[2] >> tempData >> tempData;
|
||||
>> p.Pos[2] >> p.Vel[2] >> tempData >> tempData >> tempData;
|
||||
|
||||
if (p.ID == -99 &&
|
||||
p.Pos[0] == -99 && p.Pos[1] == -99 &&
|
||||
|
|
|
@ -75,10 +75,11 @@ typedef struct voidStruct {
|
|||
float center[3], barycenter[3];
|
||||
int accepted;
|
||||
int voidType;
|
||||
int parentID, numChildren;
|
||||
int parentID, numChildren, level;
|
||||
bool isLeaf, hasHighCentralDen;
|
||||
gsl_vector *eval;
|
||||
gsl_matrix *evec;
|
||||
float ellip;
|
||||
} VOID;
|
||||
|
||||
void openFiles(string outputDir, string sampleName,
|
||||
|
@ -98,7 +99,7 @@ void outputVoids(string outputDir, string sampleName, string prefix,
|
|||
string dataPortion, int mockIndex,
|
||||
vector<VOID> voids,
|
||||
bool isObservation, double *boxLen,
|
||||
bool doTrim);
|
||||
bool doTrim, bool doCentralDenCut);
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
|
||||
|
@ -280,9 +281,11 @@ int main(int argc, char **argv) {
|
|||
voids[i-1].hasHighCentralDen = false;
|
||||
voids[i-1].numChildren = 0;
|
||||
voids[i-1].parentID = -1;
|
||||
voids[i-1].level = 0;
|
||||
|
||||
voids[i-1].eval = gsl_vector_alloc(3);
|
||||
voids[i-1].evec = gsl_matrix_alloc(3,3);
|
||||
voids[i-1].ellip = 0;
|
||||
}
|
||||
fclose(fp);
|
||||
|
||||
|
@ -356,6 +359,15 @@ int main(int argc, char **argv) {
|
|||
voidID = voids[iVoid].voidID;
|
||||
voids[iVoid].parentID = tree->getParent(voidID);
|
||||
voids[iVoid].numChildren = tree->getChildren(voidID).size();
|
||||
|
||||
// compute level in tree
|
||||
int level = 0;
|
||||
int parentID = tree->getParent(voidID);
|
||||
while (parentID != -1) {
|
||||
level++;
|
||||
parentID = tree->getParent(parentID);
|
||||
}
|
||||
voids[iVoid].level = level;
|
||||
}
|
||||
} // end re-load
|
||||
|
||||
|
@ -571,6 +583,13 @@ int main(int argc, char **argv) {
|
|||
dist[1] = voidPart[p].y - voids[iVoid].barycenter[1];
|
||||
dist[2] = voidPart[p].z - voids[iVoid].barycenter[2];
|
||||
|
||||
if (periodicX && fabs(dist[0]) > boxLen[0]/2.)
|
||||
dist[0] = dist[0] - copysign(boxLen[0], dist[0]);
|
||||
if (periodicY && fabs(dist[1]) > boxLen[1]/2.)
|
||||
dist[1] = dist[1] - copysign(boxLen[1], dist[1]);
|
||||
if (periodicZ && fabs(dist[2]) > boxLen[2]/2.)
|
||||
dist[2] = dist[2] - copysign(boxLen[2], dist[2]);
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
for (int j = i; j < 3; j++) {
|
||||
if (i == j) inertia[i*3+j] += dist[0]*dist[0] + dist[1]*dist[1] +
|
||||
|
@ -585,6 +604,20 @@ int main(int argc, char **argv) {
|
|||
|
||||
gsl_matrix_view m = gsl_matrix_view_array(inertia, 3, 3);
|
||||
gsl_eigen_symmv(&m.matrix, voids[iVoid].eval, voids[iVoid].evec, eigw);
|
||||
|
||||
float a = sqrt(2.5*(gsl_vector_get(voids[iVoid].eval,1) +
|
||||
gsl_vector_get(voids[iVoid].eval,2) -
|
||||
gsl_vector_get(voids[iVoid].eval,0)));
|
||||
float b = sqrt(2.5*(gsl_vector_get(voids[iVoid].eval,2) +
|
||||
gsl_vector_get(voids[iVoid].eval,0) -
|
||||
gsl_vector_get(voids[iVoid].eval,1)));
|
||||
float c = sqrt(2.5*(gsl_vector_get(voids[iVoid].eval,0) +
|
||||
gsl_vector_get(voids[iVoid].eval,1) -
|
||||
gsl_vector_get(voids[iVoid].eval,2)));
|
||||
float ca = c/a;
|
||||
float cb = c/b;
|
||||
voids[iVoid].ellip = 1.0 - c/a;
|
||||
|
||||
} // iVoid
|
||||
|
||||
gsl_eigen_symmv_free(eigw);
|
||||
|
@ -665,34 +698,17 @@ int main(int argc, char **argv) {
|
|||
voids.resize(iGood);
|
||||
printf(" 4th filter: rejected %d below redshift boundaries\n", numNearZ);
|
||||
|
||||
numAreParents = 0;
|
||||
iGood = 0;
|
||||
for (iVoid = 0; iVoid < voids.size(); iVoid++) {
|
||||
if (voids[iVoid].numChildren > 0) {
|
||||
numAreParents++;
|
||||
voids[iVoid].isLeaf = false;
|
||||
} else {
|
||||
//voids[iGood++] = voids[iVoid];
|
||||
voids[iVoid].isLeaf = true;
|
||||
}
|
||||
}
|
||||
|
||||
// TEST - grabbing only top-level voids
|
||||
// take only top-level voids
|
||||
numAreParents = 0;
|
||||
iGood = 0;
|
||||
for (iVoid = 0; iVoid < voids.size(); iVoid++) {
|
||||
if (voids[iVoid].parentID != -1) {
|
||||
numAreParents++;
|
||||
voids[iVoid].isLeaf = false;
|
||||
} else {
|
||||
//voids[iGood++] = voids[iVoid];
|
||||
voids[iVoid].isLeaf = true;
|
||||
} else {
|
||||
voids[iVoid].isLeaf = false;
|
||||
}
|
||||
}
|
||||
// END TEST
|
||||
|
||||
//voids.resize(iGood);
|
||||
//printf(" 5th filter: rejected %d that are not leaf nodes\n", numAreParents);
|
||||
|
||||
|
||||
for (iVoid = 0; iVoid < voids.size(); iVoid++) {
|
||||
|
@ -721,7 +737,6 @@ int main(int argc, char **argv) {
|
|||
printf(" We have %d too high central density\n", numHighDen);
|
||||
printf(" We have %d that are not leaf nodes\n", numAreParents);
|
||||
|
||||
printf(" Output trimmed catalog...\n");
|
||||
|
||||
outputDir = string(args.outputDir_arg);
|
||||
sampleName = (string(args.sampleName_arg)+".out");
|
||||
|
@ -729,6 +744,7 @@ int main(int argc, char **argv) {
|
|||
dataPortions[0] = "central";
|
||||
dataPortions[1] = "all";
|
||||
|
||||
printf(" Output fully trimmed catalog...\n");
|
||||
prefix = "";
|
||||
for (int i = 0; i < 2; i++) {
|
||||
dataPortion = dataPortions[i];
|
||||
|
@ -736,10 +752,10 @@ int main(int argc, char **argv) {
|
|||
outputVoids(outputDir, sampleName, prefix, dataPortion,
|
||||
mockIndex,
|
||||
voids,
|
||||
args.isObservation_flag, boxLen, true);
|
||||
args.isObservation_flag, boxLen, true, true);
|
||||
}
|
||||
|
||||
printf(" Output untrimmed catalog...\n");
|
||||
printf(" Output fully untrimmed catalog...\n");
|
||||
prefix = "untrimmed_";
|
||||
for (int i = 0; i < 2; i++) {
|
||||
dataPortion = dataPortions[i];
|
||||
|
@ -747,9 +763,33 @@ int main(int argc, char **argv) {
|
|||
outputVoids(outputDir, sampleName, prefix, dataPortion,
|
||||
mockIndex,
|
||||
voids,
|
||||
args.isObservation_flag, boxLen, false);
|
||||
args.isObservation_flag, boxLen, false, false);
|
||||
}
|
||||
|
||||
printf(" Output partially-trimmed catalogs...\n");
|
||||
prefix = "untrimmed_dencut_";
|
||||
for (int i = 0; i < 2; i++) {
|
||||
dataPortion = dataPortions[i];
|
||||
|
||||
outputVoids(outputDir, sampleName, prefix, dataPortion,
|
||||
mockIndex,
|
||||
voids,
|
||||
args.isObservation_flag, boxLen, false, true);
|
||||
}
|
||||
|
||||
prefix = "trimmed_nodencut_";
|
||||
for (int i = 0; i < 2; i++) {
|
||||
dataPortion = dataPortions[i];
|
||||
|
||||
outputVoids(outputDir, sampleName, prefix, dataPortion,
|
||||
mockIndex,
|
||||
voids,
|
||||
args.isObservation_flag, boxLen, true, false);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
clock2 = clock();
|
||||
printf(" Time: %f sec (for %d voids)\n",
|
||||
|
@ -776,7 +816,7 @@ void openFiles(string outputDir, string sampleName,
|
|||
*fpBarycenter = fopen((outputDir+"/"+prefix+"barycenters_"+dataPortion+"_"+sampleName).c_str(), "w");
|
||||
|
||||
*fpCenters = fopen((outputDir+"/"+prefix+"centers_"+dataPortion+"_"+sampleName).c_str(), "w");
|
||||
fprintf(*fpCenters, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part, parent ID, number of children\n");
|
||||
fprintf(*fpCenters, "# center x,y,z (Mpc/h), volume (normalized), radius (Mpc/h), redshift, volume (Mpc/h^3), void ID, density contrast, num part, parent ID, tree level, number of children, central density\n");
|
||||
|
||||
*fpDistances = fopen((outputDir+"/"+prefix+"boundaryDistances_"+dataPortion+"_"+sampleName).c_str(), "w");
|
||||
|
||||
|
@ -807,7 +847,8 @@ void closeFiles(FILE* fpZobov, FILE* fpCenters,
|
|||
void outputVoids(string outputDir, string sampleName, string prefix,
|
||||
string dataPortion, int mockIndex,
|
||||
vector<VOID> voids,
|
||||
bool isObservation, double *boxLen, bool doTrim) {
|
||||
bool isObservation, double *boxLen, bool doTrim,
|
||||
bool doCentralDenCut) {
|
||||
|
||||
int iVoid;
|
||||
VOID outVoid;
|
||||
|
@ -829,8 +870,11 @@ void outputVoids(string outputDir, string sampleName, string prefix,
|
|||
continue;
|
||||
}
|
||||
|
||||
//if (doTrim && (outVoid.hasHighCentralDen)) {
|
||||
if (doTrim && (outVoid.hasHighCentralDen || !outVoid.isLeaf)) {
|
||||
if (doTrim && outVoid.isLeaf) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (doCentralDenCut && outVoid.hasHighCentralDen) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -869,7 +913,7 @@ void outputVoids(string outputDir, string sampleName, string prefix,
|
|||
outVoid.voidID,
|
||||
outVoid.nearestMock);
|
||||
|
||||
fprintf(fpCenters, "%.2f %.2f %.2f %.2f %.2f %.5f %.2f %d %f %d %d %d\n",
|
||||
fprintf(fpCenters, "%.2f %.2f %.2f %.2f %.2f %.5f %.2f %d %f %d %d %d %d %.2f\n",
|
||||
outCenter[0],
|
||||
outCenter[1],
|
||||
outCenter[2],
|
||||
|
@ -881,7 +925,9 @@ void outputVoids(string outputDir, string sampleName, string prefix,
|
|||
outVoid.densCon,
|
||||
outVoid.numPart,
|
||||
outVoid.parentID,
|
||||
outVoid.numChildren);
|
||||
outVoid.level,
|
||||
outVoid.numChildren,
|
||||
outVoid.centralDen);
|
||||
|
||||
fprintf(fpSkyPositions, "%.2f %.2f %.5f %.2f %d\n",
|
||||
atan((outVoid.barycenter[1]-boxLen[1]/2.) /
|
||||
|
@ -892,8 +938,9 @@ void outputVoids(string outputDir, string sampleName, string prefix,
|
|||
outVoid.radius,
|
||||
outVoid.voidID);
|
||||
|
||||
fprintf(fpShapes, "%d %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f\n",
|
||||
fprintf(fpShapes, "%d %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f\n",
|
||||
outVoid.voidID,
|
||||
outVoid.ellip,
|
||||
gsl_vector_get(outVoid.eval, 0),
|
||||
gsl_vector_get(outVoid.eval, 1),
|
||||
gsl_vector_get(outVoid.eval, 2),
|
||||
|
|
|
@ -178,6 +178,13 @@ void buildZones(PARTICLE *p, pid_t np, pid_t *&jumped,
|
|||
{
|
||||
if (numinh[i] > 0) {
|
||||
z[h].core = i;
|
||||
z[h].vol = 0;
|
||||
z[h].np = z[h].npjoin = z[h].nadj = z[h].nhl = 0;
|
||||
z[h].leak = 0;
|
||||
z[h].adj = 0;
|
||||
z[h].slv = 0;
|
||||
z[h].denscontrast = 0;
|
||||
z[h].vol = z[h].voljoin = 0;
|
||||
zonenum[i] = h;
|
||||
h++;
|
||||
} else {
|
||||
|
|
|
@ -1,129 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#+
|
||||
# VIDE -- Void IDEntification pipeline -- ./pipeline/apAnalysis.py
|
||||
# Copyright (C) 2010-2013 Guilhem Lavaux
|
||||
# Copyright (C) 2011-2013 P. M. Sutter
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#+
|
||||
|
||||
# takes a set of voids with given radii, and stacks and plots matches in
|
||||
# other catalogs
|
||||
|
||||
from void_python_tools.backend import *
|
||||
import imp
|
||||
import pickle
|
||||
import os
|
||||
import numpy as np
|
||||
import argparse
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
mergerNameBase = "mergerTree"
|
||||
|
||||
parser = argparse.ArgumentParser(description='Analyze.')
|
||||
parser.add_argument('--parm', dest='parm', default='datasetsToAnalyze.py', help='path to parameter file')
|
||||
args = parser.parse_args()
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
filename = args.parm
|
||||
print " Loading parameters from", filename
|
||||
if not os.access(filename, os.F_OK):
|
||||
print " Cannot find parameter file %s!" % filename
|
||||
exit(-1)
|
||||
parms = imp.load_source("name", filename)
|
||||
globals().update(vars(parms))
|
||||
|
||||
if not os.access(dataDir, os.F_OK):
|
||||
os.makedirs(dataDir)
|
||||
|
||||
if not os.access(logDir, os.F_OK):
|
||||
os.makedirs(logDir)
|
||||
|
||||
mergerFileBase = dataDir + "/" + mergerNameBase
|
||||
|
||||
# get list of base voids
|
||||
with open(workDir+baseSampleDir+"/sample_info.dat", 'rb') as input:
|
||||
baseSample = pickle.load(input)
|
||||
baseSampleName = baseSample.fullName
|
||||
baseVoidList = np.loadtxt(workDir+baseSampleDir+"/untrimmed_centers_central_"+\
|
||||
baseSampleName+".out")
|
||||
|
||||
for stack in baseSample.stacks:
|
||||
print " Stack:", stack.rMin
|
||||
|
||||
accepted = (baseVoidList[:,4] > stack.rMin) & (baseVoidList[:,4] < stack.rMax)
|
||||
baseIDList = baseVoidList[:,7][accepted]
|
||||
|
||||
for (iSample, sampleDir) in enumerate(sampleDirList):
|
||||
with open(workDir+sampleDir+"/sample_info.dat", 'rb') as input:
|
||||
sample = pickle.load(input)
|
||||
|
||||
print " Working with", sample.fullName, "..."
|
||||
sys.stdout.flush()
|
||||
sampleName = sample.fullName
|
||||
|
||||
# get list of appropriate voids
|
||||
if sample.fullName == baseSample.fullName:
|
||||
idList = baseIDList
|
||||
else:
|
||||
matchList = np.loadtxt(mergerFileBase+"_"+baseSampleName+"_"+sampleName+\
|
||||
"_summary.out")
|
||||
idList = []
|
||||
for i,testID in enumerate(matchList[:,8]):
|
||||
if np.any(testID == baseIDList):
|
||||
idList.append(matchList[i,0])
|
||||
idList = np.array(idList)
|
||||
|
||||
idList = idList.astype(int)
|
||||
|
||||
print " Found", len(idList), "voids to work with"
|
||||
|
||||
voidBaseDir = workDir+"/"+sampleDir+"stacks"
|
||||
|
||||
runSuffix = getStackSuffix(stack.zMin, stack.zMax, stack.rMin,
|
||||
stack.rMax, dataPortion,
|
||||
customLine="selected")
|
||||
voidDir = voidBaseDir+"_"+runSuffix
|
||||
|
||||
if not os.access(voidDir,os.F_OK): os.makedirs(voidDir)
|
||||
|
||||
if len(idList) == 0:
|
||||
print " No voids here anyway, skipping..."
|
||||
continue
|
||||
|
||||
print " Stacking voids...",
|
||||
sys.stdout.flush()
|
||||
|
||||
STACK_PATH = CTOOLS_PATH+"/stacking/stackVoidsZero"
|
||||
|
||||
launchStack(sample, stack, STACK_PATH,
|
||||
thisDataPortion=dataPortion,
|
||||
logDir=logDir, voidDir=voidDir,
|
||||
zobovDir=workDir+"/"+sampleDir,
|
||||
freshStack=True, INCOHERENT=False,
|
||||
ranSeed=101010, summaryFile=None,
|
||||
continueRun=False,
|
||||
dataType=sample.dataType,
|
||||
idList=idList, rescaleOverride="")
|
||||
|
||||
print " Profiling stacks...",
|
||||
sys.stdout.flush()
|
||||
logFile = logDir+"/profile_"+sampleName+"_"+runSuffix+".out"
|
||||
launchProfile(sample, stack, voidDir=voidDir,
|
||||
logFile=logFile, continueRun=False)
|
||||
|
||||
print " Done!"
|
|
@ -1,44 +0,0 @@
|
|||
#+
|
||||
# VIDE -- Void IDEntification pipeline -- ./crossCompare/analysis/datasetsToAnalyze.py
|
||||
# Copyright (C) 2010-2013 Guilhem Lavaux
|
||||
# Copyright (C) 2011-2013 P. M. Sutter
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#+
|
||||
#!/usr/bin/env python
|
||||
|
||||
|
||||
workDir = "/home/psutter2/workspace/Voids/"
|
||||
dataDir = "/home/psutter2/workspace/Voids/crossCompare/mergerTree/"
|
||||
|
||||
|
||||
CTOOLS_PATH = "../../c_tools/"
|
||||
|
||||
baseSampleDir = "mergertree512/mt_ss0.1/sample_md_ss0.1_z0.00_d00/"
|
||||
|
||||
sampleDirList = [
|
||||
"mergertree512/mt_ss1e-05/sample_md_ss1e-05_z0.00_d00/",
|
||||
#"mergertree512/mt_ss0.000175/sample_md_ss0.000175_z0.00_d00/",
|
||||
#"mergertree512/mt_ss0.0004/sample_md_ss0.0004_z0.00_d00/",
|
||||
#"mergertree512/mt_ss0.001/sample_md_ss0.001_z0.00_d00/",
|
||||
#"mergertree512/mt_ss0.002/sample_md_ss0.002_z0.00_d00/",
|
||||
"mergertree512/mt_ss0.01/sample_md_ss0.01_z0.00_d00/",
|
||||
"mergertree512/mt_hod_dr72dim2/sample_md_hod_dr72dim2_z0.00_d00/",
|
||||
"mergertree512/mt_hod_dr9mid/sample_md_hod_dr9mid_z0.00_d00/",
|
||||
"mergertree512/mt_halos_min1.2e+13/sample_md_halos_min1.2e+13_z0.00_d00/",
|
||||
]
|
||||
|
||||
dataPortion = "central"
|
||||
|
|
@ -1,104 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#+
|
||||
# VIDE -- Void IDEntification pipeline -- ./crossCompare/analysis/mergerTree.py
|
||||
# Copyright (C) 2010-2013 Guilhem Lavaux
|
||||
# Copyright (C) 2011-2013 P. M. Sutter
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#+
|
||||
|
||||
from void_python_tools.backend import *
|
||||
from void_python_tools.plotting import *
|
||||
import imp
|
||||
import pickle
|
||||
import os
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import argparse
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
dataNameBase = "mergerTree"
|
||||
|
||||
parser = argparse.ArgumentParser(description='Analyze.')
|
||||
parser.add_argument('--parm', dest='parm', default='datasetsToAnalyze.py',
|
||||
help='path to parameter file')
|
||||
args = parser.parse_args()
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
filename = args.parm
|
||||
print " Loading parameters from", filename
|
||||
if not os.access(filename, os.F_OK):
|
||||
print " Cannot find parameter file %s!" % filename
|
||||
exit(-1)
|
||||
parms = imp.load_source("name", filename)
|
||||
globals().update(vars(parms))
|
||||
|
||||
if not os.access(dataDir, os.F_OK):
|
||||
os.makedirs(dataDir)
|
||||
|
||||
outFileName = dataDir + "/" + dataNameBase #+ ".dat"
|
||||
|
||||
with open(workDir+baseSampleDir+"/sample_info.dat", 'rb') as input:
|
||||
baseSample = pickle.load(input)
|
||||
|
||||
for (iSample, sampleDir) in enumerate(sampleDirList):
|
||||
|
||||
with open(workDir+sampleDir+"/sample_info.dat", 'rb') as input:
|
||||
sample = pickle.load(input)
|
||||
|
||||
print " Working with", sample.fullName, "...",
|
||||
sys.stdout.flush()
|
||||
|
||||
sampleName = sample.fullName
|
||||
|
||||
binPath = CTOOLS_PATH+"/analysis/voidOverlap"
|
||||
logFile = logDir+"/mergertree_"+baseSample.fullName+"_"+sampleName+".out"
|
||||
stepOutputFileName = outFileName + "_" + baseSample.fullName + "_" + \
|
||||
sampleName + "_"
|
||||
#stepOutputFileName = os.getcwd()+"/data/overlap_"
|
||||
|
||||
launchVoidOverlap(sample, baseSample, workDir+sampleDir,
|
||||
workDir+baseSampleDir, binPath,
|
||||
thisDataPortion="central", logFile=logFile,
|
||||
continueRun=False, workDir=workDir,
|
||||
outputFile=stepOutputFileName,
|
||||
matchMethod="useID")
|
||||
#matchMethod="prox")
|
||||
|
||||
# attach columns to summary file
|
||||
#if iSample == 1:
|
||||
# os.system("cp %s %s" % (stepOutputFileName, outFileName))
|
||||
#else:
|
||||
# outFile = open("temp.out", "w")
|
||||
# stepOutFile1 = open(outFileName, "r")
|
||||
# stepOutFile2 = open(stepOutputFileName, "r")
|
||||
#
|
||||
# for line1 in stepOutFile1:
|
||||
# line1 = line1.rstrip()
|
||||
# line2 = stepOutFile2.readline()
|
||||
# outFile.write(line1+" "+line2)
|
||||
#
|
||||
# os.system("cp %s %s" % ("temp.out", outFileName))
|
||||
# outFile.close()
|
||||
# stepOutFile1.close()
|
||||
# stepOutFile2.close()
|
||||
|
||||
#if os.access("mergerTree.log", os.F_OK): os.unlink("mergerTree.log")
|
||||
#if os.access("temp.out", os.F_OK): os.unlink("temp.out")
|
||||
#if os.access("thisStep.out", os.F_OK): os.unlink("thisStep.out")
|
||||
|
||||
print " Done!"
|
|
@ -1,81 +0,0 @@
|
|||
#+
|
||||
# VIDE -- Void IDEntification pipeline -- ./pipeline/datasets/mock_dr9mid.py
|
||||
# Copyright (C) 2010-2013 Guilhem Lavaux
|
||||
# Copyright (C) 2011-2013 P. M. Sutter
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#+
|
||||
import os
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
# CONFIGURATION
|
||||
|
||||
# directory for the input simulation/observational particle files
|
||||
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/mock_dr9mid/"
|
||||
|
||||
# path to HOD code
|
||||
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
|
||||
|
||||
# path to mask
|
||||
maskFile = os.getenv("HOME")+"/workspace/Voids/catalogs/boss/final_boss_mask.fits")
|
||||
|
||||
# where to put the final void catalog, figures, and output logs
|
||||
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/mock_dr9mid/"
|
||||
figDir = os.getenv("PWD")+"/../figs/mock_dr9mid/"
|
||||
logDir = os.getenv("PWD")+"/../logs/mock_dr9mid/"
|
||||
|
||||
# where to place the pipeline scripts
|
||||
scriptDir = os.getenv("PWD")+"/mock_dr9mid/"
|
||||
|
||||
# simulation or observation?
|
||||
dataType = "observation"
|
||||
|
||||
# available formats for simulation: gadget, multidark
|
||||
dataFormat = "multidark"
|
||||
dataUnit = 1 # as multiple of Mpc/h
|
||||
|
||||
# place particles on the lightcone?
|
||||
useLightCone = True
|
||||
|
||||
# list of file numbers for the particle files
|
||||
# to get particle file name, we take particleFileBase+fileNum
|
||||
fileNums = (("0.53"))
|
||||
|
||||
# redshift range of the mock
|
||||
redshiftRange = (0.53, 0.6)
|
||||
|
||||
# prefix to give all outputs
|
||||
prefix = "mock_"
|
||||
|
||||
# common filename of halo files
|
||||
haloFileBase = "mdr1_halos_z"
|
||||
|
||||
# adjust these two parameters given the memory contraints on your system:
|
||||
# numZobovDivisions: how many sub-volumes per dimension will zobov process
|
||||
# numZobovThreads: how many sub-volumes to process at once?
|
||||
numZobovDivisions = 2
|
||||
numZobovThreads = 2
|
||||
|
||||
# simulation information
|
||||
numPart = 1024*1024*1024
|
||||
lbox = 1000 # Mpc/h
|
||||
omegaM = 0.27
|
||||
hubble = 0.70
|
||||
|
||||
|
||||
# END CONFIGURATION
|
||||
# -----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
|
@ -1,137 +0,0 @@
|
|||
#+
|
||||
# VIDE -- Void IDEntification pipeline -- ./pipeline/datasets/multidark.py
|
||||
# Copyright (C) 2010-2013 Guilhem Lavaux
|
||||
# Copyright (C) 2011-2013 P. M. Sutter
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#+
|
||||
import os
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
# CONFIGURATION
|
||||
|
||||
# directory for the input simulation/observational particle files
|
||||
catalogDir = os.getenv("HOME")+"/workspace/Voids/catalogs/multidark/"
|
||||
|
||||
# path to HOD code
|
||||
hodPath = os.getenv("HOME")+"/projects/Voids/hod/HOD.x"
|
||||
|
||||
# where to put the final void catalog, figures, and output logs
|
||||
voidOutputDir = os.getenv("HOME")+"/workspace/Voids/multidark/"
|
||||
figDir = os.getenv("PWD")+"/../figs/multidark/"
|
||||
logDir = os.getenv("PWD")+"/../logs/multidark/"
|
||||
|
||||
# where to place the pipeline scripts
|
||||
scriptDir = os.getenv("PWD")+"/multidark/"
|
||||
|
||||
# simulation or observation?
|
||||
dataType = "simulation"
|
||||
|
||||
# available formats for simulation: gadget, multidark
|
||||
dataFormat = "multidark"
|
||||
dataUnit = 1 # as multiple of Mpc/h
|
||||
|
||||
# place particles on the lightcone?
|
||||
useLightCone = False
|
||||
|
||||
# common filename of particle files
|
||||
particleFileBase = "mdr1_particles_z"
|
||||
particleFileDummy = ''
|
||||
|
||||
# list of file numbers for the particle files
|
||||
# to get particle file name, we take particleFileBase+fileNum
|
||||
#fileNums = ["0.53"]
|
||||
fileNums = ["0.0"]
|
||||
#fileNums = ["0.0", "0.53", "1.0"]
|
||||
|
||||
# redshift of each file in the above list
|
||||
#redshifts = ["0.53"]
|
||||
redshifts = ["0.0"]
|
||||
#redshifts = ["0.0", "0.53"]
|
||||
#redshifts = ["0.0", "0.53", "1.0"]
|
||||
|
||||
# how many independent slices along the z-axis?
|
||||
numSlices = 1
|
||||
#numSlices = 4
|
||||
|
||||
# how many subdivisions along the x- and y- axis?
|
||||
# ( = 2 will make 4 subvolumes for each slice, = 3 will make 9, etc.)
|
||||
numSubvolumes = 1
|
||||
|
||||
# prefix to give all outputs
|
||||
prefix = "md_"
|
||||
|
||||
# list of desired subsamples - these are in unts of h Mpc^-3!
|
||||
#subSamples = [0.01]
|
||||
subSamples = [0.1, 0.05, 0.01, 0.002, 0.001, 0.0004, 0.000175, 0.00001]
|
||||
|
||||
# common filename of halo files, leave blank to ignore halos
|
||||
haloFileBase = "mdr1_halos_z"
|
||||
haloFileDummy = ''
|
||||
|
||||
# minimum halo mass cuts to apply for the halo catalog
|
||||
# use "none" to get all halos
|
||||
minHaloMasses = [1.2e13]
|
||||
#minHaloMasses = ["none", 1.2e13]
|
||||
|
||||
# locations of data in the halo catalog
|
||||
haloFileMCol = 6
|
||||
haloFileXCol = 0
|
||||
haloFileYCol = 1
|
||||
haloFileZCol = 2
|
||||
haloFileVXCol = 3
|
||||
haloFileVYCol = 4
|
||||
haloFileVZCol = 5
|
||||
haloFileColSep = ','
|
||||
haloFileNumComLines = 0
|
||||
|
||||
# adjust these two parameters given the memory contraints on your system:
|
||||
# numZobovDivisions: how many sub-volumes per dimension will zobov process
|
||||
# numZobovThreads: how many sub-volumes to process at once?
|
||||
numZobovDivisions = 2
|
||||
numZobovThreads = 2
|
||||
|
||||
# simulation information
|
||||
numPart = 100000000
|
||||
#numPart = 2048*2048*2048
|
||||
lbox = 1000 # Mpc/h
|
||||
omegaM = 0.27
|
||||
hubble = 0.70
|
||||
|
||||
#galDens = 0.000225
|
||||
hodParmList = [
|
||||
{'name' : "dr9mid", #BOSS: Manera et al. 2012, eq. 26
|
||||
'Mmin' : 0.0,
|
||||
'M1' : 1.e14,
|
||||
'sigma_logM' : 0.596,
|
||||
'alpha' : 1.0127,
|
||||
'Mcut' : 1.19399e13,
|
||||
'galDens' : 0.00016,
|
||||
},
|
||||
|
||||
{'name' : "dr7dim2",
|
||||
'Mmin' : 1.99525e12,
|
||||
'M1' : 3.80189e13,
|
||||
'sigma_logM' : 0.21,
|
||||
'alpha' : 1.12,
|
||||
'Mcut' : 6.91831e11,
|
||||
'galDens' : 0.02,
|
||||
}
|
||||
]
|
||||
|
||||
# END CONFIGURATION
|
||||
# -----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
|
@ -81,6 +81,9 @@ redshifts = ["0.5", "1.0"]
|
|||
# how many independent slices along the z-axis?
|
||||
numSlices = 1
|
||||
|
||||
# how many slices for analysis?
|
||||
numAPSlices = 1
|
||||
|
||||
# how many subdivisions along the x- and y- axis?
|
||||
# ( = 2 will make 4 subvolumes for each slice, = 3 will make 9, etc.)
|
||||
numSubvolumes = 1
|
||||
|
|
|
@ -199,27 +199,27 @@ dataSampleList.append(newSample)
|
|||
if stackMode == "fixed":
|
||||
stackInfo = """
|
||||
# {zMin}, {zMax}, {minRadius}
|
||||
newSample.addStack(0.0, 5.0, 5 , 10, False, False)
|
||||
newSample.addStack(0.0, 5.0, 10, 15, False, False)
|
||||
newSample.addStack(0.0, 5.0, 15, 20, False, False)
|
||||
newSample.addStack(0.0, 5.0, 20, 25, False, False)
|
||||
newSample.addStack(0.0, 5.0, 30, 35, False, False)
|
||||
newSample.addStack(0.0, 5.0, 40, 45, False, False)
|
||||
newSample.addStack(0.0, 5.0, 50, 55, False, False)
|
||||
newSample.addStack(0.0, 5.0, 60, 65, False, False)
|
||||
newSample.addStack(0.0, 5.0, 70, 75, False, False)
|
||||
newSample.addStack(0.0, 5.0, 80, 85, False, False)
|
||||
newSample.addStack(0.0, 5.0, 90, 95, False, False)
|
||||
newSample.addStack(0.0, 5.0, 5 , 10, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 10, 15, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 15, 20, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 20, 25, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 30, 35, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 40, 45, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 50, 55, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 60, 65, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 70, 75, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 80, 85, False, False, rescaleMode="rv")
|
||||
newSample.addStack(0.0, 5.0, 90, 95, False, False, rescaleMode="rv")
|
||||
"""
|
||||
|
||||
elif stackMode == "auto":
|
||||
stackInfo = """
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+2, True, False)
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+4, True, False)
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+2, 2*{minRadius}+6, True, False)
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+6, 2*{minRadius}+10, True, False)
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+10, 2*{minRadius}+18, True, False)
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, False)
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+2, True, False, rescaleMode="rv")
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius} , 2*{minRadius}+4, True, False, rescaleMode="rv")
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+2, 2*{minRadius}+6, True, False, rescaleMode="rv")
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+6, 2*{minRadius}+10, True, False, rescaleMode="rv")
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+10, 2*{minRadius}+18, True, False, rescaleMode="rv")
|
||||
newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, False, rescaleMode="rv")
|
||||
"""
|
||||
else:
|
||||
stackInfo = """
|
||||
|
@ -259,8 +259,8 @@ newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, Fal
|
|||
|
||||
sliceMin = "%0.2f" % sliceMin
|
||||
sliceMax = "%0.2f" % sliceMax
|
||||
sliceMinMpc = "%0.1f" % sliceMinMpc
|
||||
sliceMaxMpc = "%0.1f" % sliceMaxMpc
|
||||
sliceMinMpc = "%0.2f" % sliceMinMpc
|
||||
sliceMaxMpc = "%0.2f" % sliceMaxMpc
|
||||
|
||||
if (dataFileNameList != None):
|
||||
dataFileName = dataFileNameList[iFile]
|
||||
|
@ -295,9 +295,15 @@ newSample.addStack({zMin}, {zMax}, 2*{minRadius}+18, 2*{minRadius}+24, True, Fal
|
|||
useLightCone=useLightCone,
|
||||
subsample=str(subsample).strip('[]')))
|
||||
|
||||
scriptFile.write(stackInfo.format(zMin=sliceMin,
|
||||
zMax=sliceMax,
|
||||
minRadius=minRadius))
|
||||
for iAPSlice in xrange(numAPSlices):
|
||||
sliceWidth = float(sliceMax) - float(sliceMin)
|
||||
sliceAPMin = float(sliceMin) + iAPSlice*sliceWidth/numAPSlices
|
||||
sliceAPMax = float(sliceMin) + (iAPSlice+1)*sliceWidth/numAPSlices
|
||||
sliceAPMin = "%0.2f" % sliceAPMin
|
||||
sliceAPMax = "%0.2f" % sliceAPMax
|
||||
scriptFile.write(stackInfo.format(zMin=sliceAPMin,
|
||||
zMax=sliceAPMax,
|
||||
minRadius=minRadius))
|
||||
|
||||
|
||||
scriptFile.close()
|
||||
|
@ -390,7 +396,7 @@ for iSubSample in xrange(len(subSamples)):
|
|||
dataFileNameList=partFileList)
|
||||
|
||||
|
||||
if args.subsample or args.all:
|
||||
if (args.subsample or args.all) and doSubSampling:
|
||||
print " Doing subsample", thisSubSample
|
||||
sys.stdout.flush()
|
||||
|
||||
|
@ -422,7 +428,7 @@ for iSubSample in xrange(len(subSamples)):
|
|||
rescale_position = hubble/1000./scale
|
||||
shift = lbox/2.
|
||||
rescale_velocity = 3.08567802e16/3.1558149984e16
|
||||
command = "%s %s x y z vz vy vx | awk '{print $1*%g+%g, $2*%g+%g, $3*%g+%g, $4*%g, $5*%g, $6*%g}' > %s" % (SDFcvt_PATH, dataFile,
|
||||
command = "%s %s x y z vz vy vx mass | awk '{print $1*%g+%g, $2*%g+%g, $3*%g+%g, $4*%g, $5*%g, $6*%g, $7}' > %s" % (SDFcvt_PATH, dataFile,
|
||||
rescale_position,
|
||||
shift,
|
||||
rescale_position,
|
||||
|
@ -470,12 +476,14 @@ for iSubSample in xrange(len(subSamples)):
|
|||
vz = float(line[3])
|
||||
vy = float(line[4])
|
||||
vx = float(line[5])
|
||||
mass = float(line[6])
|
||||
uniqueID = i
|
||||
outFile.write("%d %e %e %e %e %e %e\n" %(uniqueID,x,y,z,vz,vy,vx))
|
||||
outFile.write("%d %e %e %e %e %e %e %e\n" %(uniqueID,x,y,z,
|
||||
vz,vy,vx,mass))
|
||||
else:
|
||||
outFile.write(line)
|
||||
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99\n")
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99 -99\n")
|
||||
inFile.close()
|
||||
outFile.close()
|
||||
|
||||
|
@ -497,9 +505,9 @@ for iSubSample in xrange(len(subSamples)):
|
|||
y = np.random.uniform()*lbox
|
||||
z = np.random.uniform()*lbox
|
||||
|
||||
outFile.write("%d %e %e %e 0. 0. 0.\n" % (i, x,y,z))
|
||||
outFile.write("%d %e %e %e 0. 0. 0. 0.\n" % (i, x,y,z))
|
||||
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99\n")
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99 -99\n")
|
||||
outFile.close()
|
||||
|
||||
prevSubSample = thisSubSample
|
||||
|
@ -597,10 +605,10 @@ if (args.halos or args.all) and haloFileBase != "":
|
|||
if dataFormat == "sdf":
|
||||
SDFcvt_PATH = "@CMAKE_BINARY_DIR@/external/libsdf/apps/SDFcvt/SDFcvt.x86_64"
|
||||
if minHaloMass == "none": minHaloMass = 0.0
|
||||
command = "%s %s mass id x y z vz vy vx | awk '{if ($1>%g) print $2, $3, $4, $5, $6, $7, $8}'>>%s" % (SDFcvt_PATH, dataFile, minHaloMass, outFileName )
|
||||
command = "%s %s mass id x y z vz vy vx | awk '{if ($1>%g) print $2, $3, $4, $5, $6, $7, $8, $1}'>>%s" % (SDFcvt_PATH, dataFile, minHaloMass, outFileName )
|
||||
os.system(command)
|
||||
outFile = open(outFileName, 'a')
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99\n")
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99 -99\n")
|
||||
outFile.close()
|
||||
else:
|
||||
outFile = open(outFileName, 'a')
|
||||
|
@ -615,11 +623,13 @@ if (args.halos or args.all) and haloFileBase != "":
|
|||
vz = float(line[haloFileVZCol])
|
||||
vy = float(line[haloFileVYCol])
|
||||
vx = float(line[haloFileVXCol])
|
||||
mass = float(line[haloFileMCol])
|
||||
|
||||
# write to output file
|
||||
outFile.write("%d %e %e %e %e %e %e\n" %(iHalo,x,y,z,vz,vy,vx))
|
||||
outFile.write("%d %e %e %e %e %e %e %e\n" %(iHalo,x,y,z,
|
||||
vz,vy,vx,mass))
|
||||
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99\n")
|
||||
outFile.write("-99 -99 -99 -99 -99 -99 -99 -99\n")
|
||||
outFile.close()
|
||||
inFile.close()
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
# classes and routines used to support scripts
|
||||
|
||||
import os
|
||||
from numpy import abs
|
||||
|
||||
LIGHT_SPEED = 299792.458
|
||||
|
||||
|
@ -188,6 +189,15 @@ def getStackSuffix(zMin, zMax, rMin, rMax, dataPortion, customLine=""):
|
|||
return "z"+str(zMin)+"-"+str(zMax)+"_"+str(rMin)+"-"+str(rMax)+\
|
||||
"Mpc"+customLine+"_"+dataPortion
|
||||
|
||||
def getPeriodic(sample):
|
||||
periodicLine = ""
|
||||
if sample.dataType != "observation":
|
||||
if sample.numSubvolumes == 1: periodicLine += "xy"
|
||||
if abs(sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \
|
||||
sample.boxLen) <= 1.e-1:
|
||||
periodicLine += "z"
|
||||
return periodicLine
|
||||
|
||||
def my_import(name):
|
||||
mod = __import__(name)
|
||||
components = name.split('.')
|
||||
|
|
|
@ -84,9 +84,8 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None,
|
|||
|
||||
parmFile = os.getcwd()+"/generate_"+sample.fullName+".par"
|
||||
|
||||
file(parmFile, mode="w").write(conf)
|
||||
|
||||
if regenerate or not (continueRun and jobSuccessful(logFile, "Done!\n")):
|
||||
file(parmFile, mode="w").write(conf)
|
||||
cmd = "%s --configFile=%s &> %s" % (binPath,parmFile,logFile)
|
||||
os.system(cmd)
|
||||
if jobSuccessful(logFile, "Done!\n"):
|
||||
|
@ -98,8 +97,7 @@ def launchGenerate(sample, binPath, workDir=None, inputDataDir=None,
|
|||
else:
|
||||
print "already done!"
|
||||
|
||||
if os.access(parmFile, os.F_OK):
|
||||
os.unlink(parmFile)
|
||||
if os.access(parmFile, os.F_OK): os.unlink(parmFile)
|
||||
|
||||
if os.access("contour_map.fits", os.F_OK):
|
||||
os.system("mv %s %s" % ("contour_map.fits", zobovDir))
|
||||
|
@ -316,18 +314,20 @@ def launchPrune(sample, binPath,
|
|||
totalPart = open(zobovDir+"/total_particles.txt", "r").read()
|
||||
maxDen = 0.2*float(mockIndex)/float(totalPart)
|
||||
observationLine = " --isObservation"
|
||||
periodicLine = " --periodic=''"
|
||||
#periodicLine = " --periodic=''"
|
||||
else:
|
||||
mockIndex = -1
|
||||
maxDen = 0.2
|
||||
observationLine = ""
|
||||
|
||||
periodicLine = " --periodic='"
|
||||
if sample.numSubvolumes == 1: periodicLine += "xy"
|
||||
if np.abs(sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \
|
||||
sample.boxLen) <= 1.e-1:
|
||||
periodicLine += "z"
|
||||
periodicLine += "' "
|
||||
#periodicLine = " --periodic='"
|
||||
#if sample.numSubvolumes == 1: periodicLine += "xy"
|
||||
#if np.abs(sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \
|
||||
# sample.boxLen) <= 1.e-1:
|
||||
# periodicLine += "z"
|
||||
#periodicLine += "' "
|
||||
|
||||
periodicLine = " --periodic='" + getPeriodic(sample) + "'"
|
||||
|
||||
if not (continueRun and (jobSuccessful(logFile, "NetCDF: Not a valid ID\n") \
|
||||
or jobSuccessful(logFile, "Done!\n"))):
|
||||
|
@ -361,7 +361,7 @@ def launchPrune(sample, binPath,
|
|||
print "done"
|
||||
else:
|
||||
print "FAILED!"
|
||||
exit(-1)
|
||||
#exit(-1)
|
||||
|
||||
else:
|
||||
print "already done!"
|
||||
|
@ -377,12 +377,12 @@ def launchVoidOverlap(sample1, sample2, sample1Dir, sample2Dir,
|
|||
sampleName1 = sample1.fullName
|
||||
sampleName2 = sample2.fullName
|
||||
|
||||
periodicLine = " --periodic='"
|
||||
if sample1.dataType != "observation":
|
||||
if sample1.numSubvolumes == 1: periodicLine += "xy"
|
||||
if np.abs(sample1.zBoundaryMpc[1] - sample1.zBoundaryMpc[0] - sample1.boxLen) <= 1.e-1:
|
||||
periodicLine += "z"
|
||||
periodicLine += "' "
|
||||
periodicLine = " --periodic='" + getPeriodic(sample1) + "' "
|
||||
#if sample1.dataType != "observation":
|
||||
# if sample1.numSubvolumes == 1: periodicLine += "xy"
|
||||
# if np.abs(sample1.zBoundaryMpc[1] - sample1.zBoundaryMpc[0] - sample1.boxLen) <= 1.e-1:
|
||||
# periodicLine += "z"
|
||||
#periodicLine += "' "
|
||||
|
||||
if not (continueRun and jobSuccessful(logFile, "Done!\n")):
|
||||
cmd = binPath
|
||||
|
@ -390,12 +390,14 @@ def launchVoidOverlap(sample1, sample2, sample1Dir, sample2Dir,
|
|||
str(sampleName1)
|
||||
cmd += " --volFile1=" + sample1Dir+"/vol_" + \
|
||||
str(sampleName1)+".dat"
|
||||
cmd += " --voidFile1=" + sample1Dir+"/untrimmed_voidDesc_" + \
|
||||
cmd += " --voidFile1=" + sample1Dir+"/trimmed_nodencut_voidDesc_" + \
|
||||
thisDataPortion+"_"+str(sampleName1)+".out"
|
||||
cmd += " --infoFile1=" + sample1Dir+"/zobov_slice_" + \
|
||||
str(sampleName1)+".par"
|
||||
cmd += " --centerFile1=" + sample1Dir + \
|
||||
"/untrimmed_centers_"+thisDataPortion+"_"+str(sampleName1)+".out"
|
||||
"/trimmed_nodencut_barycenters_"+thisDataPortion+"_"+str(sampleName1)+".out"
|
||||
cmd += " --shapeFile1=" + sample1Dir + \
|
||||
"/trimmed_nodencut_shapes_"+thisDataPortion+"_"+str(sampleName1)+".out"
|
||||
cmd += " --zoneFile1=" + sample1Dir+"/voidZone_" + \
|
||||
str(sampleName1)+".dat"
|
||||
cmd += " --zonePartFile1=" + sample1Dir+"/voidPart_" + \
|
||||
|
@ -405,12 +407,14 @@ def launchVoidOverlap(sample1, sample2, sample1Dir, sample2Dir,
|
|||
str(sampleName2)
|
||||
cmd += " --volFile2=" + sample2Dir+"/vol_" + \
|
||||
str(sampleName2)+".dat"
|
||||
cmd += " --voidFile2=" + sample2Dir+"/untrimmed_voidDesc_" + \
|
||||
cmd += " --voidFile2=" + sample2Dir+"/trimmed_nodencut_voidDesc_" + \
|
||||
thisDataPortion+"_"+str(sampleName2)+".out"
|
||||
cmd += " --infoFile2=" + sample2Dir+"/zobov_slice_" + \
|
||||
str(sampleName2)+".par"
|
||||
cmd += " --centerFile2=" + sample2Dir + \
|
||||
"/untrimmed_centers_"+thisDataPortion+"_"+str(sampleName2)+".out"
|
||||
"/trimmed_nodencut_barycenters_"+thisDataPortion+"_"+str(sampleName2)+".out"
|
||||
cmd += " --shapeFile2=" + sample1Dir + \
|
||||
"/trimmed_nodencut_shapes_"+thisDataPortion+"_"+str(sampleName1)+".out"
|
||||
cmd += " --zoneFile2=" + sample2Dir+"/voidZone_" + \
|
||||
str(sampleName2)+".dat"
|
||||
cmd += " --zonePartFile2=" + sample2Dir+"/voidPart_" + \
|
||||
|
@ -506,6 +510,17 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
|||
rMinToUse = stack.rMin
|
||||
rMaxToUse = stack.rMax
|
||||
|
||||
periodicLine = "periodic='" + getPeriodic(sample) + "'"
|
||||
#if sample.dataType == "observation":
|
||||
# periodicLine = "periodic=''"
|
||||
#else:
|
||||
# periodicLine = "periodic='"
|
||||
# if sample.numSubvolumes == 1: periodicLine += "xy"
|
||||
# if np.abs(sample.zBoundaryMpc[1] - sample.zBoundaryMpc[0] - \
|
||||
# sample.boxLen) <= 1.e-1:
|
||||
# periodicLine += "z"
|
||||
# periodicLine += "' "
|
||||
|
||||
conf="""
|
||||
desc %s
|
||||
partzone %s
|
||||
|
@ -529,6 +544,7 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
|||
boundaryDistances %s
|
||||
%s
|
||||
%s
|
||||
%s
|
||||
""" % \
|
||||
(zobovDir+"/voidDesc_"+thisDataPortion+"_"+sampleName+".out",
|
||||
zobovDir+"/voidPart_"+sampleName+".dat",
|
||||
|
@ -551,11 +567,11 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
|||
zobovDir+"/barycenters_"+thisDataPortion+"_"+sampleName+".out",
|
||||
zobovDir+"/boundaryDistances_"+thisDataPortion+"_"+sampleName+".out",
|
||||
rescaleFlag,
|
||||
idListFlag
|
||||
idListFlag,
|
||||
periodicLine
|
||||
)
|
||||
|
||||
parmFile = os.getcwd()+("/%sstack_"%prefixRun)+sample.fullName+".par"
|
||||
|
||||
fp = file(parmFile, mode="w")
|
||||
fp.write(conf)
|
||||
|
||||
|
@ -584,6 +600,7 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
|||
|
||||
else:
|
||||
print "already done!"
|
||||
if os.access(parmFile, os.F_OK): os.unlink(parmFile)
|
||||
return
|
||||
|
||||
minDist = None
|
||||
|
@ -662,7 +679,17 @@ def launchStack(sample, stack, binPath, thisDataPortion=None, logDir=None,
|
|||
nbar = 1.0
|
||||
else:
|
||||
nbar = 1.0
|
||||
boxVol = sample.boxLen**3
|
||||
|
||||
iX = float(sample.mySubvolume[0])
|
||||
iY = float(sample.mySubvolume[1])
|
||||
xMin = iX/sample.numSubvolumes * sample.boxLen
|
||||
yMin = iY/sample.numSubvolumes * sample.boxLen
|
||||
xMax = (iX+1)/sample.numSubvolumes * sample.boxLen
|
||||
yMax = (iY+1)/sample.numSubvolumes * sample.boxLen
|
||||
zMin = sample.zBoundaryMpc[0]
|
||||
zMax = sample.zBoundaryMpc[1]
|
||||
|
||||
boxVol = (xMax-xMin)*(yMax-yMin)*(zMax-zMin)
|
||||
|
||||
summaryLine = runSuffix + " " + \
|
||||
thisDataPortion + " " + \
|
||||
|
@ -1091,10 +1118,8 @@ def launchFit(sample, stack, logFile=None, voidDir=None, figDir=None,
|
|||
else:
|
||||
rescaleFactor = 1.0
|
||||
|
||||
vp.draw_fit(*args, delta_tick=0.2, vmax=10.0, delta_v=0.1,
|
||||
nocontour=True, model_max=10.0, delta_model=1.0,
|
||||
#vp.draw_fit(*args, delta_tick=0.2, vmax=1.0, delta_v=0.01,
|
||||
# nocontour=True, model_max=1.0, delta_model=0.1,
|
||||
vp.draw_fit(*args, delta_tick=0.2, vmax=1.0, delta_v=0.01,
|
||||
nocontour=True, model_max=1.0, delta_model=0.1,
|
||||
plotTitle=plotTitle, show_ratio=showRatio,
|
||||
showContours=showContours,
|
||||
rescaleFactor=rescaleFactor)
|
||||
|
@ -1513,14 +1538,20 @@ def launchLikelihood(dataPortions=None, logDir=None, workDir=None,
|
|||
|
||||
# -----------------------------------------------------------------------------
|
||||
def launchEstimateErrorBars(workDir=None, nullDir=None, numNulls=None,
|
||||
dataPortion=None):
|
||||
dataPortion=None, nullDirList=None):
|
||||
|
||||
IVALUE = 0
|
||||
IERROR = 1
|
||||
|
||||
# open first null file to get sizes
|
||||
nullFile = nullDir + "/" + str(1) + "/calculatedExpansions_" + \
|
||||
str(dataPortion) + ".txt"
|
||||
if nullDirList == None:
|
||||
nullFile = nullDir + "/" + str(1) + "/calculatedExpansions_" + \
|
||||
str(dataPortion) + ".txt"
|
||||
else:
|
||||
nullFile = nullDirList[0] + "/calculatedExpansions_" + str(dataPortion) + \
|
||||
".txt"
|
||||
numNulls = len(nullDirList)
|
||||
|
||||
fp = open(nullFile, "r")
|
||||
binLine = fp.readline()
|
||||
numSamples = int(binLine.split(" ")[0])
|
||||
|
@ -1534,8 +1565,13 @@ def launchEstimateErrorBars(workDir=None, nullDir=None, numNulls=None,
|
|||
nullData = np.zeros([numNulls, numBins, 2])
|
||||
|
||||
for iNull in xrange(numNulls):
|
||||
nullFile = nullDir + "/" + str(iNull) + "/calculatedExpansions_" + \
|
||||
str(dataPortion) + ".txt"
|
||||
if nullDirList == None:
|
||||
nullFile = nullDir + "/" + str(iNull) + "/calculatedExpansions_" + \
|
||||
str(dataPortion) + ".txt"
|
||||
else:
|
||||
nullFile = nullDirList[iNull] + "/calculatedExpansions_" + \
|
||||
str(dataPortion) + ".txt"
|
||||
|
||||
fp = open(nullFile, "r")
|
||||
|
||||
binLine = fp.readline()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue