Compare commits
No commits in common. "d6222df77d0bb75b39ad11b9eb2251bbd9e61956" and "78d747d5aa265806a62b5625e4d9375785d3628e" have entirely different histories.
d6222df77d
...
78d747d5aa
27 changed files with 342 additions and 1406 deletions
21
.gitignore
vendored
21
.gitignore
vendored
|
@ -1,3 +1,10 @@
|
||||||
|
# Ignore feature_* directory (usually used as git worktree)
|
||||||
|
feature_*/
|
||||||
|
|
||||||
|
# Ignore extensions directory
|
||||||
|
extensions/*
|
||||||
|
!extensions/__init__.py
|
||||||
|
|
||||||
# Ignore some symbolic links
|
# Ignore some symbolic links
|
||||||
install
|
install
|
||||||
|
|
||||||
|
@ -7,5 +14,17 @@ install
|
||||||
.vscode/
|
.vscode/
|
||||||
*.pyc
|
*.pyc
|
||||||
__pycache__/
|
__pycache__/
|
||||||
sbmy_control.egg-info/
|
pysbmy.egg-info/
|
||||||
*.h5
|
*.h5
|
||||||
|
*.rng
|
||||||
|
*.gadget1
|
||||||
|
*.gadget2
|
||||||
|
*.gadget3
|
||||||
|
log.txt
|
||||||
|
testpdf.txt
|
||||||
|
tests/
|
||||||
|
params/
|
||||||
|
results/
|
||||||
|
slurm_logs/
|
||||||
|
slurm_scripts/
|
||||||
|
work/
|
||||||
|
|
|
@ -3,9 +3,9 @@ def main_ICs(parsed_args):
|
||||||
"""
|
"""
|
||||||
Main function for the initial conditions generator.
|
Main function for the initial conditions generator.
|
||||||
"""
|
"""
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
from .parameters_card import parse_arguments_card_for_ICs
|
from parameters_card import parse_arguments_card_for_ICs
|
||||||
from .low_level import print_starting_module, print_message, print_ending_module
|
from low_level import print_starting_module, print_message, print_ending_module
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
|
|
||||||
main_dict = parse_arguments_main(parsed_args)
|
main_dict = parse_arguments_main(parsed_args)
|
||||||
|
@ -52,10 +52,10 @@ def main_ICs(parsed_args):
|
||||||
|
|
||||||
|
|
||||||
def ICs_monofonic(parsed_args):
|
def ICs_monofonic(parsed_args):
|
||||||
from .monofonic import main_monofonic
|
from monofonic import main_monofonic
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .low_level import print_message
|
from low_level import print_message
|
||||||
from .parameters_monofonic import parse_arguments_monofonic
|
from parameters_monofonic import parse_arguments_monofonic
|
||||||
|
|
||||||
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
||||||
|
|
||||||
|
@ -67,9 +67,9 @@ def ICs_monofonic(parsed_args):
|
||||||
|
|
||||||
|
|
||||||
def ICs_sbmy(parsed_args):
|
def ICs_sbmy(parsed_args):
|
||||||
from .low_level import print_starting_module, print_message
|
from low_level import print_starting_module, print_message
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .parameters_card import parse_arguments_card_for_ICs
|
from parameters_card import parse_arguments_card_for_ICs
|
||||||
|
|
||||||
print_starting_module("sbmy IC", verbose=parsed_args.verbose)
|
print_starting_module("sbmy IC", verbose=parsed_args.verbose)
|
||||||
|
|
||||||
|
@ -99,14 +99,14 @@ def ICs_sbmy(parsed_args):
|
||||||
|
|
||||||
|
|
||||||
def create_sbmy_power_spectrum_file(parsed_args, card_dict, power_spectrum_file):
|
def create_sbmy_power_spectrum_file(parsed_args, card_dict, power_spectrum_file):
|
||||||
from .cosmo_params import parse_arguments_cosmo
|
from cosmo_params import parse_arguments_cosmo
|
||||||
from pysbmy.power import PowerSpectrum
|
from pysbmy.power import PowerSpectrum
|
||||||
|
|
||||||
cosmo_dict = parse_arguments_cosmo(parsed_args)
|
cosmo_dict = parse_arguments_cosmo(parsed_args)
|
||||||
|
|
||||||
if parsed_args.verbose < 2:
|
if parsed_args.verbose < 2:
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from .low_level import stdout_redirector, stderr_redirector
|
from low_level import stdout_redirector, stderr_redirector
|
||||||
f = BytesIO()
|
f = BytesIO()
|
||||||
g = BytesIO()
|
g = BytesIO()
|
||||||
with stdout_redirector(f):
|
with stdout_redirector(f):
|
||||||
|
@ -138,7 +138,7 @@ def create_sbmy_power_spectrum_file(parsed_args, card_dict, power_spectrum_file)
|
||||||
def create_sbmy_white_noise_field(parsed_args, card_dict, white_noise_field_file):
|
def create_sbmy_white_noise_field(parsed_args, card_dict, white_noise_field_file):
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from gc import collect
|
from gc import collect
|
||||||
from .low_level import print_message
|
from low_level import print_message
|
||||||
from pysbmy.field import BaseField
|
from pysbmy.field import BaseField
|
||||||
|
|
||||||
print_message(f"Seed: {parsed_args.seed}", 3, "sbmy IC", verbose=parsed_args.verbose)
|
print_message(f"Seed: {parsed_args.seed}", 3, "sbmy IC", verbose=parsed_args.verbose)
|
||||||
|
@ -161,7 +161,7 @@ def create_sbmy_white_noise_field(parsed_args, card_dict, white_noise_field_file
|
||||||
|
|
||||||
if parsed_args.verbose < 2:
|
if parsed_args.verbose < 2:
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from .low_level import stdout_redirector, stderr_redirector
|
from low_level import stdout_redirector, stderr_redirector
|
||||||
f = BytesIO()
|
f = BytesIO()
|
||||||
g = BytesIO()
|
g = BytesIO()
|
||||||
with stdout_redirector(f):
|
with stdout_redirector(f):
|
||||||
|
@ -175,11 +175,11 @@ def create_sbmy_white_noise_field(parsed_args, card_dict, white_noise_field_file
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .parameters_card import register_arguments_card_for_ICs
|
from parameters_card import register_arguments_card_for_ICs
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
from .parameters_monofonic import register_arguments_monofonic
|
from parameters_monofonic import register_arguments_monofonic
|
||||||
from .slurm_submission import register_arguments_slurm
|
from slurm_submission import register_arguments_slurm
|
||||||
|
|
||||||
parser = ArgumentParser(description="Generate initial conditions for a Simbelmyne simulation.")
|
parser = ArgumentParser(description="Generate initial conditions for a Simbelmyne simulation.")
|
||||||
# TODO: reduce the volume of arguments
|
# TODO: reduce the volume of arguments
|
BIN
__init__.py
Normal file
BIN
__init__.py
Normal file
Binary file not shown.
|
@ -1,29 +1,10 @@
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import os
|
|
||||||
|
|
||||||
kmin = 1e-1
|
kmin = 1e-1
|
||||||
kmax = 2e0
|
kmax = 2e0
|
||||||
Nk = 50
|
Nk = 50
|
||||||
AliasingCorr=False
|
AliasingCorr=False
|
||||||
|
|
||||||
def crop_field(field, Ncrop):
|
|
||||||
|
|
||||||
if Ncrop is None or Ncrop == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
elif Ncrop > 0:
|
|
||||||
field.data = field.data[Ncrop:-Ncrop, Ncrop:-Ncrop, Ncrop:-Ncrop]
|
|
||||||
d0 = field.L0/field.N0
|
|
||||||
d1 = field.L1/field.N1
|
|
||||||
d2 = field.L2/field.N2
|
|
||||||
field.N0 -= 2*Ncrop
|
|
||||||
field.N1 -= 2*Ncrop
|
|
||||||
field.N2 -= 2*Ncrop
|
|
||||||
field.L0 = field.N0*d0
|
|
||||||
field.L1 = field.N1*d1
|
|
||||||
field.L2 = field.N2*d2
|
|
||||||
|
|
||||||
|
|
||||||
def get_power_spectrum(field, kmin=kmin, kmax=kmax, Nk=Nk, G=None):
|
def get_power_spectrum(field, kmin=kmin, kmax=kmax, Nk=Nk, G=None):
|
||||||
from pysbmy.power import PowerSpectrum
|
from pysbmy.power import PowerSpectrum
|
||||||
from pysbmy.fft import FourierGrid
|
from pysbmy.fft import FourierGrid
|
||||||
|
@ -34,10 +15,10 @@ def get_power_spectrum(field, kmin=kmin, kmax=kmax, Nk=Nk, G=None):
|
||||||
field.L0,
|
field.L0,
|
||||||
field.L1,
|
field.L1,
|
||||||
field.L2,
|
field.L2,
|
||||||
int(field.N0),
|
field.N0,
|
||||||
int(field.N1),
|
field.N1,
|
||||||
int(field.N2),
|
field.N2,
|
||||||
k_modes=np.concat([PowerSpectrum(field.L0,field.L1,field.L2,int(field.N0),int(field.N1),int(field.N2),).FourierGrid.k_modes[:10],np.logspace(
|
k_modes=np.concat([PowerSpectrum(field.L0,field.L1,field.L2,field.N0,field.N1,field.N2,).FourierGrid.k_modes[:10],np.logspace(
|
||||||
np.log10(kmin),
|
np.log10(kmin),
|
||||||
np.log10(kmax),
|
np.log10(kmax),
|
||||||
Nk,
|
Nk,
|
||||||
|
@ -62,10 +43,10 @@ def get_cross_correlations(field_A, field_B, kmin=kmin, kmax=kmax, Nk=Nk, G=None
|
||||||
field_A.L0,
|
field_A.L0,
|
||||||
field_A.L1,
|
field_A.L1,
|
||||||
field_A.L2,
|
field_A.L2,
|
||||||
int(field_A.N0),
|
field_A.N0,
|
||||||
int(field_A.N1),
|
field_A.N1,
|
||||||
int(field_A.N2),
|
field_A.N2,
|
||||||
k_modes=np.concat([PowerSpectrum(field_A.L0,field_A.L1,field_A.L2,int(field_A.N0),int(field_A.N1),int(field_A.N2),).FourierGrid.k_modes[:10],np.logspace(
|
k_modes=np.concat([PowerSpectrum(field_A.L0,field_A.L1,field_A.L2,field_A.N0,field_A.N1,field_A.N2,).FourierGrid.k_modes[:10],np.logspace(
|
||||||
np.log10(kmin),
|
np.log10(kmin),
|
||||||
np.log10(kmax),
|
np.log10(kmax),
|
||||||
Nk,
|
Nk,
|
||||||
|
@ -110,8 +91,7 @@ def plot_power_spectra(filenames,
|
||||||
figsize=(8,4),
|
figsize=(8,4),
|
||||||
dpi=300,
|
dpi=300,
|
||||||
ax=None,
|
ax=None,
|
||||||
fig=None,
|
fig=None,):
|
||||||
Ncrop=None,):
|
|
||||||
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from pysbmy.field import read_field
|
from pysbmy.field import read_field
|
||||||
|
@ -130,7 +110,6 @@ def plot_power_spectra(filenames,
|
||||||
|
|
||||||
for i, filename in enumerate(filenames):
|
for i, filename in enumerate(filenames):
|
||||||
field = read_field(filename)
|
field = read_field(filename)
|
||||||
crop_field(field, Ncrop)
|
|
||||||
_, G, k, _ = add_power_spectrum_to_plot(ax=ax,
|
_, G, k, _ = add_power_spectrum_to_plot(ax=ax,
|
||||||
field=field,
|
field=field,
|
||||||
Pk_ref=Pk_ref,
|
Pk_ref=Pk_ref,
|
||||||
|
@ -149,7 +128,7 @@ def plot_power_spectra(filenames,
|
||||||
ax.set_ylim(ylims)
|
ax.set_ylim(ylims)
|
||||||
if yticks is not None:
|
if yticks is not None:
|
||||||
ax.set_yticks(yticks)
|
ax.set_yticks(yticks)
|
||||||
ax.set_xlabel(r'$k$ [$h/\mathrm{Mpc}$]', labelpad=-0)
|
ax.set_xlabel(r'$k$ [$h/\mathrm{Mpc}$]', labelpad=-10)
|
||||||
|
|
||||||
if Pk_ref is not None:
|
if Pk_ref is not None:
|
||||||
ax.set_ylabel(r'$P(k)/P_\mathrm{ref}(k)$')
|
ax.set_ylabel(r'$P(k)/P_\mathrm{ref}(k)$')
|
||||||
|
@ -184,9 +163,7 @@ def plot_cross_correlations(filenames_A,
|
||||||
figsize=(8,4),
|
figsize=(8,4),
|
||||||
dpi=300,
|
dpi=300,
|
||||||
ax=None,
|
ax=None,
|
||||||
fig=None,
|
fig=None,):
|
||||||
Ncrop=None,
|
|
||||||
):
|
|
||||||
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from pysbmy.field import read_field
|
from pysbmy.field import read_field
|
||||||
|
@ -204,11 +181,9 @@ def plot_cross_correlations(filenames_A,
|
||||||
markers = [None for f in filenames_A]
|
markers = [None for f in filenames_A]
|
||||||
|
|
||||||
field_B = read_field(filename_B)
|
field_B = read_field(filename_B)
|
||||||
crop_field(field_B, Ncrop)
|
|
||||||
|
|
||||||
for i, filename_A in enumerate(filenames_A):
|
for i, filename_A in enumerate(filenames_A):
|
||||||
field_A = read_field(filename_A)
|
field_A = read_field(filename_A)
|
||||||
crop_field(field_A, Ncrop)
|
|
||||||
_, G, k, _ = add_cross_correlations_to_plot(ax=ax,
|
_, G, k, _ = add_cross_correlations_to_plot(ax=ax,
|
||||||
field_A=field_A,
|
field_A=field_A,
|
||||||
field_B=field_B,
|
field_B=field_B,
|
||||||
|
@ -227,7 +202,7 @@ def plot_cross_correlations(filenames_A,
|
||||||
ax.set_ylim(ylims)
|
ax.set_ylim(ylims)
|
||||||
if yticks is not None:
|
if yticks is not None:
|
||||||
ax.set_yticks(yticks)
|
ax.set_yticks(yticks)
|
||||||
ax.set_xlabel(r'$k$ [$h/\mathrm{Mpc}$]', labelpad=-0)
|
ax.set_xlabel(r'$k$ [$h/\mathrm{Mpc}$]', labelpad=-10)
|
||||||
ax.set_ylabel('$R(k)$')
|
ax.set_ylabel('$R(k)$')
|
||||||
|
|
||||||
if bound1 is not None:
|
if bound1 is not None:
|
||||||
|
@ -263,15 +238,13 @@ def get_ylims_and_yticks(ylims):
|
||||||
return ylims, yticks
|
return ylims, yticks
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
def console_main():
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
parser = ArgumentParser(description='Plot power spectra of fields')
|
parser = ArgumentParser(description='Plot power spectra of fields')
|
||||||
|
|
||||||
parser.add_argument('-ps', '--power_spectrum', action='store_true', help='Plot power spectra.')
|
parser.add_argument('-ps', '--power_spectrum', action='store_true', help='Plot power spectra.')
|
||||||
parser.add_argument('-cc', '--cross_correlation', action='store_true', help='Plot cross correlations.')
|
parser.add_argument('-cc', '--cross_correlation', action='store_true', help='Plot cross correlations.')
|
||||||
parser.add_argument('-d', '--directory', type=str, default='./', help='Directory containing the fields files.')
|
parser.add_argument('-d', '--directory', type=str, required=True, help='Directory containing the fields files.')
|
||||||
parser.add_argument('-ref', '--reference', type=str, default=None, help='Reference field file.')
|
parser.add_argument('-ref', '--reference', type=str, default=None, help='Reference field file.')
|
||||||
parser.add_argument('-f', '--filenames', type=str, nargs='+', required=True, help='Field files to be plotted.')
|
parser.add_argument('-f', '--filenames', type=str, nargs='+', required=True, help='Field files to be plotted.')
|
||||||
parser.add_argument('-o', '--output', type=str, default=None, help='Output plot file name.')
|
parser.add_argument('-o', '--output', type=str, default=None, help='Output plot file name.')
|
||||||
|
@ -282,7 +255,6 @@ def console_main():
|
||||||
parser.add_argument('-t','--title', type=str, default=None, help='Title of the plot.')
|
parser.add_argument('-t','--title', type=str, default=None, help='Title of the plot.')
|
||||||
parser.add_argument('-yrp', '--ylim_power', type=float, nargs=2, default=[0.9,1.1], help='Y-axis limits.')
|
parser.add_argument('-yrp', '--ylim_power', type=float, nargs=2, default=[0.9,1.1], help='Y-axis limits.')
|
||||||
parser.add_argument('-yrc', '--ylim_corr', type=float, nargs=2, default=[0.99,1.001], help='Y-axis limits.')
|
parser.add_argument('-yrc', '--ylim_corr', type=float, nargs=2, default=[0.99,1.001], help='Y-axis limits.')
|
||||||
parser.add_argument('--crop', type=int, default=None, help='Remove the outter N pixels of the fields.')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -290,15 +262,9 @@ def console_main():
|
||||||
print('You must choose between power_spectrum and cross_correlation.')
|
print('You must choose between power_spectrum and cross_correlation.')
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
for _k,f in enumerate(args.filenames):
|
|
||||||
if not os.path.exists(args.directory+f):
|
|
||||||
raise FileNotFoundError(f"File {args.directory+f} does not exist.")
|
|
||||||
|
|
||||||
if args.reference is not None:
|
if args.reference is not None:
|
||||||
from pysbmy.field import read_field
|
from pysbmy.field import read_field
|
||||||
F_ref = read_field(args.directory+args.reference)
|
G, _, Pk_ref = get_power_spectrum(read_field(args.directory+args.reference), kmin=kmin, kmax=kmax, Nk=Nk)
|
||||||
crop_field(F_ref, args.crop)
|
|
||||||
G, _, Pk_ref = get_power_spectrum(F_ref, kmin=kmin, kmax=kmax, Nk=Nk)
|
|
||||||
else:
|
else:
|
||||||
Pk_ref = None
|
Pk_ref = None
|
||||||
G = None
|
G = None
|
||||||
|
@ -313,7 +279,6 @@ def console_main():
|
||||||
if args.power_spectrum and args.cross_correlation:
|
if args.power_spectrum and args.cross_correlation:
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
fig, axes = plt.subplots(2, 1, figsize=(8,8))
|
fig, axes = plt.subplots(2, 1, figsize=(8,8))
|
||||||
fig.subplots_adjust(hspace=0.3)
|
|
||||||
plot_power_spectra(filenames=filenames,
|
plot_power_spectra(filenames=filenames,
|
||||||
labels=args.labels,
|
labels=args.labels,
|
||||||
colors=args.colors,
|
colors=args.colors,
|
||||||
|
@ -329,9 +294,7 @@ def console_main():
|
||||||
kmax=kmax,
|
kmax=kmax,
|
||||||
Nk=Nk,
|
Nk=Nk,
|
||||||
ax=axes[0],
|
ax=axes[0],
|
||||||
fig=fig,
|
fig=fig)
|
||||||
Ncrop=args.crop,
|
|
||||||
)
|
|
||||||
|
|
||||||
plot_cross_correlations(filenames_A=filenames,
|
plot_cross_correlations(filenames_A=filenames,
|
||||||
filename_B=args.directory+args.reference,
|
filename_B=args.directory+args.reference,
|
||||||
|
@ -348,9 +311,7 @@ def console_main():
|
||||||
kmax=kmax,
|
kmax=kmax,
|
||||||
Nk=Nk,
|
Nk=Nk,
|
||||||
ax=axes[1],
|
ax=axes[1],
|
||||||
fig=fig,
|
fig=fig)
|
||||||
Ncrop=args.crop,
|
|
||||||
)
|
|
||||||
|
|
||||||
axes[1].legend(loc='lower left')
|
axes[1].legend(loc='lower left')
|
||||||
axes[0].set_title("Power Spectrum")
|
axes[0].set_title("Power Spectrum")
|
||||||
|
@ -373,9 +334,7 @@ def console_main():
|
||||||
bound2=0.02,
|
bound2=0.02,
|
||||||
kmin=kmin,
|
kmin=kmin,
|
||||||
kmax=kmax,
|
kmax=kmax,
|
||||||
Nk=Nk,
|
Nk=Nk)
|
||||||
Ncrop=args.crop,
|
|
||||||
)
|
|
||||||
ax.legend()
|
ax.legend()
|
||||||
if args.title is not None:
|
if args.title is not None:
|
||||||
ax.set_title(args.title)
|
ax.set_title(args.title)
|
||||||
|
@ -394,9 +353,7 @@ def console_main():
|
||||||
bound2=0.002,
|
bound2=0.002,
|
||||||
kmin=kmin,
|
kmin=kmin,
|
||||||
kmax=kmax,
|
kmax=kmax,
|
||||||
Nk=Nk,
|
Nk=Nk)
|
||||||
Ncrop=args.crop,
|
|
||||||
)
|
|
||||||
ax.legend(loc='lower left')
|
ax.legend(loc='lower left')
|
||||||
if args.title is not None:
|
if args.title is not None:
|
||||||
ax.set_title(args.title)
|
ax.set_title(args.title)
|
||||||
|
@ -407,6 +364,3 @@ def console_main():
|
||||||
fig.savefig(args.directory+'power_spectrum.png')
|
fig.savefig(args.directory+'power_spectrum.png')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
console_main()
|
|
||||||
|
|
176
analysis/slices.py
Normal file
176
analysis/slices.py
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
import sys
|
||||||
|
sys.path.append('/home/aubin/Simbelmyne/sbmy_control/')
|
||||||
|
from cosmo_params import register_arguments_cosmo, parse_arguments_cosmo
|
||||||
|
|
||||||
|
fs = 18
|
||||||
|
fs_titles = fs -4
|
||||||
|
|
||||||
|
def plot_imshow_with_reference( data_list,
|
||||||
|
reference=None,
|
||||||
|
titles=None,
|
||||||
|
vmin=None,
|
||||||
|
vmax=None,
|
||||||
|
L=None,
|
||||||
|
cmap='viridis'):
|
||||||
|
"""
|
||||||
|
Plot the imshow of a list of 2D arrays with two rows: one for the data itself,
|
||||||
|
one for the data compared to a reference. Each row will have a common colorbar.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- data_list: list of 2D arrays to be plotted
|
||||||
|
- reference: 2D array to be used as reference for comparison
|
||||||
|
- titles: list of titles for each subplot
|
||||||
|
- cmap: colormap to be used for plotting
|
||||||
|
"""
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
if titles is None:
|
||||||
|
titles = [None for f in data_list]
|
||||||
|
|
||||||
|
if L is None:
|
||||||
|
L = [len(data) for data in data_list]
|
||||||
|
elif isinstance(L, int) or isinstance(L, float):
|
||||||
|
L = [L for data in data_list]
|
||||||
|
|
||||||
|
sep = 10 if L[0] < 50 else 20 if L[0] < 200 else 100
|
||||||
|
ticks = [np.arange(0, l+1, sep)*len(dat)/l for l, dat in zip(L,data_list)]
|
||||||
|
tick_labels = [np.arange(0, l+1, sep) for l in L]
|
||||||
|
|
||||||
|
def score(data, reference):
|
||||||
|
return np.linalg.norm(data-reference)/np.linalg.norm(reference)
|
||||||
|
|
||||||
|
n = len(data_list)
|
||||||
|
fig, axes = plt.subplots(1 if reference is None else 2, n, figsize=(5 * n, 5 if reference is None else 5*2), dpi=max(500, data_list[0].shape[0]//2))
|
||||||
|
|
||||||
|
if vmin is None or vmax is None:
|
||||||
|
vmin = min(np.quantile(data,0.01) for data in data_list)
|
||||||
|
vmax = max(np.quantile(data,0.99) for data in data_list)
|
||||||
|
|
||||||
|
if reference is not None:
|
||||||
|
vmin_diff = min(np.quantile((data-reference),0.01) for data in data_list)
|
||||||
|
vmax_diff = max(np.quantile((data-reference),0.99) for data in data_list)
|
||||||
|
else:
|
||||||
|
vmin_diff = vmin
|
||||||
|
vmax_diff = vmax
|
||||||
|
|
||||||
|
if reference is not None:
|
||||||
|
# Plot the data itself
|
||||||
|
for i, data in enumerate(data_list):
|
||||||
|
im = axes[0, i].imshow(data, cmap=cmap, origin='lower', vmin=vmin, vmax=vmax)
|
||||||
|
axes[0, i].set_title(titles[i], fontsize=fs_titles)
|
||||||
|
axes[0, i].set_xticks(ticks[i])
|
||||||
|
axes[0, i].set_yticks(ticks[i])
|
||||||
|
axes[0, i].set_xticklabels(tick_labels[i])
|
||||||
|
axes[0, i].set_yticklabels(tick_labels[i])
|
||||||
|
axes[0, i].set_xlabel('Mpc/h')
|
||||||
|
fig.colorbar(im, ax=axes[0, :], orientation='vertical')
|
||||||
|
|
||||||
|
# Plot the data compared to the reference
|
||||||
|
for i, data in enumerate(data_list):
|
||||||
|
im = axes[1, i].imshow(data - reference, cmap=cmap, origin='lower', vmin=vmin_diff, vmax=vmax_diff)
|
||||||
|
axes[1, i].set_title(f'{titles[i]} - Reference', fontsize=fs_titles)
|
||||||
|
axes[1, i].set_xticks(ticks[i])
|
||||||
|
axes[1, i].set_yticks(ticks[i])
|
||||||
|
axes[1, i].set_xticklabels(tick_labels[i])
|
||||||
|
axes[1, i].set_yticklabels(tick_labels[i])
|
||||||
|
axes[1, i].set_xlabel('Mpc/h')
|
||||||
|
fig.colorbar(im, ax=axes[1, :], orientation='vertical')
|
||||||
|
|
||||||
|
# Add the score on the plots
|
||||||
|
for i, data in enumerate(data_list):
|
||||||
|
axes[1, i].text(0.5, 0.9, f"Score: {score(data, reference):.2e}", fontsize=10, transform=axes[1, i].transAxes, color='white')
|
||||||
|
# plt.tight_layout()
|
||||||
|
else:
|
||||||
|
|
||||||
|
if len(data_list) == 1:
|
||||||
|
data_list = data_list[0]
|
||||||
|
im = axes.imshow(data_list, cmap=cmap, origin='lower', vmin=vmin, vmax=vmax)
|
||||||
|
axes.set_title(titles[0], fontsize=fs_titles)
|
||||||
|
axes.set_xticks(ticks[0])
|
||||||
|
axes.set_yticks(ticks[0])
|
||||||
|
axes.set_xticklabels(tick_labels[0])
|
||||||
|
axes.set_yticklabels(tick_labels[0])
|
||||||
|
axes.set_xlabel('Mpc/h')
|
||||||
|
fig.colorbar(im, ax=axes, orientation='vertical')
|
||||||
|
|
||||||
|
else:
|
||||||
|
for i, data in enumerate(data_list):
|
||||||
|
im = axes[i].imshow(data, cmap=cmap, origin='lower', vmin=vmin, vmax=vmax)
|
||||||
|
axes[i].set_title(titles[i], fontsize=fs_titles)
|
||||||
|
axes[i].set_xticks(ticks[i])
|
||||||
|
axes[i].set_yticks(ticks[i])
|
||||||
|
axes[i].set_xticklabels(tick_labels[i])
|
||||||
|
axes[i].set_yticklabels(tick_labels[i])
|
||||||
|
axes[i].set_xlabel('Mpc/h')
|
||||||
|
fig.colorbar(im, ax=axes[:], orientation='vertical')
|
||||||
|
|
||||||
|
return fig, axes
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
parser = ArgumentParser(description='Comparisons of fields slices.')
|
||||||
|
|
||||||
|
parser.add_argument('-a','--axis', type=int, default=0, help='Axis along which the slices will be taken.')
|
||||||
|
parser.add_argument('-i','--index', type=int, default=None, help='Index of the slice along the axis.')
|
||||||
|
parser.add_argument('-d', '--directory', type=str, required=True, help='Directory containing the fields files.')
|
||||||
|
parser.add_argument('-ref', '--reference', type=str, default=None, help='Reference field file.')
|
||||||
|
parser.add_argument('-f', '--filenames', type=str, nargs='+', required=True, help='Field files to be plotted.')
|
||||||
|
parser.add_argument('-o', '--output', type=str, default=None, help='Output plot file name.')
|
||||||
|
parser.add_argument('-l', '--labels', type=str, nargs='+', default=None, help='Labels for each field.')
|
||||||
|
parser.add_argument('-c', '--cmap', type=str, default='viridis', help='Colormap to be used for plotting.')
|
||||||
|
parser.add_argument('-vmin', type=float, default=None, help='Minimum value for the colorbar.')
|
||||||
|
parser.add_argument('-vmax', type=float, default=None, help='Maximum value for the colorbar.')
|
||||||
|
parser.add_argument('-t', '--title', type=str, default=None, help='Title for the plot.')
|
||||||
|
parser.add_argument('-log','--log_scale', action='store_true', help='Use log scale for the data.')
|
||||||
|
|
||||||
|
register_arguments_cosmo(parser)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
from pysbmy.field import read_field
|
||||||
|
from pysbmy.cosmology import d_plus
|
||||||
|
|
||||||
|
ref_field = read_field(args.directory+args.reference) if args.reference is not None else None
|
||||||
|
fields = [read_field(args.directory+f) for f in args.filenames]
|
||||||
|
|
||||||
|
if args.index is None:
|
||||||
|
index = fields[0].N0//2
|
||||||
|
else:
|
||||||
|
index=args.index
|
||||||
|
|
||||||
|
# args.labels=[f"a={f.time:.2f}" for f in fields]
|
||||||
|
L = [f.L0 for f in fields]
|
||||||
|
|
||||||
|
match args.axis:
|
||||||
|
case 0 | 'x':
|
||||||
|
reference = ref_field.data[index,:,:] if ref_field is not None else None
|
||||||
|
fields = [f.data[index,:,:] for f in fields]
|
||||||
|
# reference = ref_field.data[index,:,:]/d_plus(1e-3,ref_field.time,parse_arguments_cosmo(args))
|
||||||
|
# fields = [f.data[index,:,:]/d_plus(1e-3,f.time,parse_arguments_cosmo(args)) for f in fields]
|
||||||
|
# reference = ref_field.data[index,:,:]/d_plus(1e-3,0.05,parse_arguments_cosmo(args))
|
||||||
|
# fields = [f.data[index,:,:]/d_plus(1e-3,time,parse_arguments_cosmo(args)) for f,time in zip(fields,[0.05, 1.0])]
|
||||||
|
case 1 | 'y':
|
||||||
|
reference = ref_field.data[:,index,:] if ref_field is not None else None
|
||||||
|
fields = [f.data[:,index,:] for f in fields]
|
||||||
|
case 2 | 'z':
|
||||||
|
reference = ref_field.data[:,:,index] if ref_field is not None else None
|
||||||
|
fields = [f.data[:,:,index] for f in fields]
|
||||||
|
case _:
|
||||||
|
raise ValueError(f"Wrong axis provided : {args.axis}")
|
||||||
|
|
||||||
|
if args.log_scale:
|
||||||
|
reference = np.log10(2.+reference) if ref_field is not None else None
|
||||||
|
fields = [np.log10(2.+f) for f in fields]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
fig, axes = plot_imshow_with_reference(fields,reference,args.labels, vmin=args.vmin, vmax=args.vmax,cmap=args.cmap, L=L)
|
||||||
|
fig.suptitle(args.title)
|
||||||
|
|
||||||
|
if args.output is not None:
|
||||||
|
fig.savefig(args.output)
|
||||||
|
else:
|
||||||
|
fig.savefig(args.directory+'slices.png')
|
|
@ -225,8 +225,6 @@ def get_progress_from_logfile(filename):
|
||||||
pass
|
pass
|
||||||
elif "Fatal" in line or "Error" in line:
|
elif "Fatal" in line or "Error" in line:
|
||||||
return -1, -1
|
return -1, -1
|
||||||
elif "Everything done successfully, exiting." in line:
|
|
||||||
current_operation = total_operations
|
|
||||||
return current_operation, total_operations
|
return current_operation, total_operations
|
||||||
|
|
||||||
|
|
||||||
|
@ -239,6 +237,7 @@ def progress_bar_from_logfile(filename:str, desc:str="", verbose:int=1, **kwargs
|
||||||
k=0
|
k=0
|
||||||
limit=600
|
limit=600
|
||||||
update_interval=0.2
|
update_interval=0.2
|
||||||
|
sleep(2) # Wait for the process to be launched, and for the previous log file to be overwritten if necessary.
|
||||||
wait_until_file_exists(filename, verbose=verbose, limit=limit)
|
wait_until_file_exists(filename, verbose=verbose, limit=limit)
|
||||||
current_operation, total_operations = get_progress_from_logfile(filename)
|
current_operation, total_operations = get_progress_from_logfile(filename)
|
||||||
previous_operation = 0
|
previous_operation = 0
|
|
@ -1,9 +1,9 @@
|
||||||
|
|
||||||
|
|
||||||
def main(parsed_args):
|
def main(parsed_args):
|
||||||
from .low_level import print_starting_module, print_message, print_ending_module, wait_until_file_exists
|
from low_level import print_starting_module, print_message, print_ending_module, wait_until_file_exists
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
|
|
||||||
print_starting_module("control", verbose=parsed_args.verbose)
|
print_starting_module("control", verbose=parsed_args.verbose)
|
||||||
main_dict = parse_arguments_main(parsed_args)
|
main_dict = parse_arguments_main(parsed_args)
|
||||||
|
@ -12,22 +12,22 @@ def main(parsed_args):
|
||||||
|
|
||||||
case "ICs" | "InitialConditions" | "InitialConditionsGenerator" | "ICsGenerator" | "ICsGen" | "ini":
|
case "ICs" | "InitialConditions" | "InitialConditionsGenerator" | "ICsGenerator" | "ICsGen" | "ini":
|
||||||
print_message("Running initial conditions generator.", 1, "control", verbose=parsed_args.verbose)
|
print_message("Running initial conditions generator.", 1, "control", verbose=parsed_args.verbose)
|
||||||
from .ICs import main_ICs
|
from ICs import main_ICs
|
||||||
main_ICs(parsed_args)
|
main_ICs(parsed_args)
|
||||||
print_message("Initial conditions generator finished.", 1, "control", verbose=parsed_args.verbose)
|
print_message("Initial conditions generator finished.", 1, "control", verbose=parsed_args.verbose)
|
||||||
|
|
||||||
|
|
||||||
case "TS" | "timestepping":
|
case "TS" | "timestepping":
|
||||||
print_message("Running timestepping generator.", 1, "control", verbose=parsed_args.verbose)
|
print_message("Running timestepping generator.", 1, "control", verbose=parsed_args.verbose)
|
||||||
from .timestepping import main_timestepping
|
from timestepping import main_timestepping
|
||||||
main_timestepping(parsed_args)
|
main_timestepping(parsed_args)
|
||||||
print_message("Timestepping generator finished.", 1, "control", verbose=parsed_args.verbose)
|
print_message("Timestepping generator finished.", 1, "control", verbose=parsed_args.verbose)
|
||||||
|
|
||||||
|
|
||||||
case "PM" | "LPT" | "tCOLA" | "simbelmyne" | "sbmy":
|
case "PM" | "LPT" | "tCOLA" | "simbelmyne" | "sbmy":
|
||||||
print_message(f"Running Simbelmyne in mode {main_dict["mode"]}.", 1, "control", verbose=parsed_args.verbose)
|
print_message(f"Running Simbelmyne in mode {main_dict["mode"]}.", 1, "control", verbose=parsed_args.verbose)
|
||||||
from .simbelmyne import main_simbelmyne
|
from simbelmyne import main_simbelmyne
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
|
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
|
@ -47,8 +47,8 @@ def main(parsed_args):
|
||||||
|
|
||||||
case "pre_sCOLA":
|
case "pre_sCOLA":
|
||||||
print_message("Running pre-sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
print_message("Running pre-sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
||||||
from .scola import main_pre_scola
|
from scola import main_pre_scola
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
|
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
|
|
||||||
|
@ -60,8 +60,8 @@ def main(parsed_args):
|
||||||
|
|
||||||
case "post_sCOLA":
|
case "post_sCOLA":
|
||||||
print_message("Running post-sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
print_message("Running post-sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
||||||
from .scola import main_post_scola
|
from scola import main_post_scola
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
|
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
|
|
||||||
|
@ -73,8 +73,8 @@ def main(parsed_args):
|
||||||
|
|
||||||
case "sCOLA":
|
case "sCOLA":
|
||||||
print_message("Running sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
print_message("Running sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
||||||
from .scola import main_scola
|
from scola import main_scola
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
|
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
|
|
||||||
|
@ -86,12 +86,12 @@ def main(parsed_args):
|
||||||
|
|
||||||
case "alltCOLA" | "allPM":
|
case "alltCOLA" | "allPM":
|
||||||
print_message(f"Running ICs and Simbelmyne in mode {main_dict["mode"]}.", 1, "control", verbose=parsed_args.verbose)
|
print_message(f"Running ICs and Simbelmyne in mode {main_dict["mode"]}.", 1, "control", verbose=parsed_args.verbose)
|
||||||
from .parameters_card import parse_arguments_card, main_parameter_card
|
from parameters_card import parse_arguments_card, main_parameter_card
|
||||||
from .timestepping import main_timestepping
|
from timestepping import main_timestepping
|
||||||
from .ICs import main_ICs
|
from ICs import main_ICs
|
||||||
from .simbelmyne import main_simbelmyne
|
from simbelmyne import main_simbelmyne
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .low_level import wait_until_file_exists
|
from low_level import wait_until_file_exists
|
||||||
|
|
||||||
card_dict = main_parameter_card(parsed_args)
|
card_dict = main_parameter_card(parsed_args)
|
||||||
|
|
||||||
|
@ -101,7 +101,7 @@ def main(parsed_args):
|
||||||
|
|
||||||
## Check consistency of ICs_gen and ICs
|
## Check consistency of ICs_gen and ICs
|
||||||
if main_dict["ICs_gen"] == "monofonic":
|
if main_dict["ICs_gen"] == "monofonic":
|
||||||
from .parameters_monofonic import parse_arguments_monofonic
|
from parameters_monofonic import parse_arguments_monofonic
|
||||||
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
||||||
if monofonic_dict["output"]+"DM_delta.h5" != card_dict["ICs"]:
|
if monofonic_dict["output"]+"DM_delta.h5" != card_dict["ICs"]:
|
||||||
raise ValueError(f"ICs {card_dict['ICs']} does not match monofonic output {monofonic_dict['output']+'DM_delta.h5'}")
|
raise ValueError(f"ICs {card_dict['ICs']} does not match monofonic output {monofonic_dict['output']+'DM_delta.h5'}")
|
||||||
|
@ -133,17 +133,10 @@ def main(parsed_args):
|
||||||
|
|
||||||
case "allsCOLA":
|
case "allsCOLA":
|
||||||
print_message(f"Running ICs, pre_sCOLA, sCOLA and post_sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
print_message(f"Running ICs, pre_sCOLA, sCOLA and post_sCOLA.", 1, "control", verbose=parsed_args.verbose)
|
||||||
<<<<<<< HEAD:main.py
|
|
||||||
from parameters_card import parse_arguments_card, main_parameter_card
|
from parameters_card import parse_arguments_card, main_parameter_card
|
||||||
from timestepping import main_timestepping
|
from timestepping import main_timestepping
|
||||||
from ICs import main_ICs
|
from ICs import main_ICs
|
||||||
from scola import main_scola, main_pre_scola, main_post_scola, have_all_tiles
|
from scola import main_scola, main_pre_scola, main_post_scola, have_all_tiles
|
||||||
=======
|
|
||||||
from .parameters_card import parse_arguments_card, main_parameter_card
|
|
||||||
from .timestepping import main_timestepping
|
|
||||||
from .ICs import main_ICs
|
|
||||||
from .scola import main_scola, main_pre_scola, main_post_scola, have_all_tiles
|
|
||||||
>>>>>>> main:sbmy_control/main.py
|
|
||||||
|
|
||||||
card_dict = main_parameter_card(parsed_args)
|
card_dict = main_parameter_card(parsed_args)
|
||||||
|
|
||||||
|
@ -153,7 +146,7 @@ def main(parsed_args):
|
||||||
|
|
||||||
## Check consistency of ICs_gen and OutputLPTPotential
|
## Check consistency of ICs_gen and OutputLPTPotential
|
||||||
if main_dict["ICs_gen"] == "monofonic":
|
if main_dict["ICs_gen"] == "monofonic":
|
||||||
from .parameters_monofonic import parse_arguments_monofonic
|
from parameters_monofonic import parse_arguments_monofonic
|
||||||
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
||||||
if monofonic_dict["output"]+"DM_phi.h5" != card_dict["OutputLPTPotential1"]:
|
if monofonic_dict["output"]+"DM_phi.h5" != card_dict["OutputLPTPotential1"]:
|
||||||
raise ValueError(f"OutputLPTPotential1 {card_dict['OutputLPTPotential1']} does not match monofonic output {monofonic_dict['output']+'DM_phi.h5'}")
|
raise ValueError(f"OutputLPTPotential1 {card_dict['OutputLPTPotential1']} does not match monofonic output {monofonic_dict['output']+'DM_phi.h5'}")
|
||||||
|
@ -186,7 +179,7 @@ def main(parsed_args):
|
||||||
main_scola(parsed_args)
|
main_scola(parsed_args)
|
||||||
if parsed_args.execution == "slurm" and not have_all_tiles(parsed_args):
|
if parsed_args.execution == "slurm" and not have_all_tiles(parsed_args):
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
from .low_level import progress_bar_from_logfile
|
from low_level import progress_bar_from_logfile
|
||||||
for b in tqdm(range(1,parsed_args.N_tiles**3+1), desc="sCOLA", unit="box", disable=(parsed_args.verbose==0)):
|
for b in tqdm(range(1,parsed_args.N_tiles**3+1), desc="sCOLA", unit="box", disable=(parsed_args.verbose==0)):
|
||||||
progress_bar_from_logfile(main_dict["logdir"]+main_dict["simname"]+".log_"+str(b), desc=f"Box {b}/{parsed_args.N_tiles**3}", verbose=parsed_args.verbose, leave=False)
|
progress_bar_from_logfile(main_dict["logdir"]+main_dict["simname"]+".log_"+str(b), desc=f"Box {b}/{parsed_args.N_tiles**3}", verbose=parsed_args.verbose, leave=False)
|
||||||
print_message("sCOLA finished.", 1, "control", verbose=parsed_args.verbose)
|
print_message("sCOLA finished.", 1, "control", verbose=parsed_args.verbose)
|
||||||
|
@ -227,15 +220,15 @@ def check_consistency(card_dict, mode):
|
||||||
raise ValueError(f"ModulePMCOLA is not 1: ModulePMCOLA={card_dict["ModulePMCOLA"]}")
|
raise ValueError(f"ModulePMCOLA is not 1: ModulePMCOLA={card_dict["ModulePMCOLA"]}")
|
||||||
|
|
||||||
|
|
||||||
def console_main():
|
if __name__ == "__main__":
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .timestepping import register_arguments_timestepping, main_timestepping
|
from timestepping import register_arguments_timestepping, main_timestepping
|
||||||
from .parameters_card import register_arguments_card, main_parameter_card
|
from parameters_card import register_arguments_card, main_parameter_card
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
from .parameters_monofonic import register_arguments_monofonic
|
from parameters_monofonic import register_arguments_monofonic
|
||||||
from .slurm_submission import register_arguments_slurm
|
from slurm_submission import register_arguments_slurm
|
||||||
from .low_level import wait_until_file_exists
|
from low_level import wait_until_file_exists
|
||||||
|
|
||||||
parser = ArgumentParser(description="Run sCOLA.")
|
parser = ArgumentParser(description="Run sCOLA.")
|
||||||
register_arguments_main(parser)
|
register_arguments_main(parser)
|
||||||
|
@ -246,8 +239,3 @@ def console_main():
|
||||||
register_arguments_cosmo(parser)
|
register_arguments_cosmo(parser)
|
||||||
parsed_args = parser.parse_args()
|
parsed_args = parser.parse_args()
|
||||||
main(parsed_args)
|
main(parsed_args)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
console_main()
|
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
path_to_monofonic_binary = "/home/aubin/monofonic/build/monofonIC"
|
path_to_monofonic_binary = "/home/aubin/monofonic/build/monofonIC"
|
||||||
|
|
||||||
def main_monofonic(parsed_args):
|
def main_monofonic(parsed_args):
|
||||||
from .parameters_monofonic import main_parameters_monofonic
|
from parameters_monofonic import main_parameters_monofonic
|
||||||
from .low_level import print_starting_module, print_message, print_ending_module
|
from low_level import print_starting_module, print_message, print_ending_module
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
@ -27,8 +27,8 @@ def main_monofonic(parsed_args):
|
||||||
print_message("Monofonic finished.", 1, "monofonic", verbose=parsed_args.verbose)
|
print_message("Monofonic finished.", 1, "monofonic", verbose=parsed_args.verbose)
|
||||||
|
|
||||||
elif parsed_args.execution == "slurm":
|
elif parsed_args.execution == "slurm":
|
||||||
from .slurm_submission import create_slurm_script, parse_arguments_slurm
|
from slurm_submission import create_slurm_script, parse_arguments_slurm
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
print_message("Running monofonic in slurm mode.", 1, "monofonic", verbose=parsed_args.verbose)
|
print_message("Running monofonic in slurm mode.", 1, "monofonic", verbose=parsed_args.verbose)
|
||||||
slurm_dict=parse_arguments_slurm(parsed_args)
|
slurm_dict=parse_arguments_slurm(parsed_args)
|
||||||
main_dict=parse_arguments_main(parsed_args)
|
main_dict=parse_arguments_main(parsed_args)
|
||||||
|
@ -67,11 +67,11 @@ def main_monofonic(parsed_args):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from .parameters_monofonic import register_arguments_monofonic
|
from parameters_monofonic import register_arguments_monofonic
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .parameters_card import register_arguments_card_for_ICs
|
from parameters_card import register_arguments_card_for_ICs
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
from .slurm_submission import register_arguments_slurm
|
from slurm_submission import register_arguments_slurm
|
||||||
|
|
||||||
parser = ArgumentParser(description="Run monofonIC initial conditions generator for a Simbelmyne simulation.")
|
parser = ArgumentParser(description="Run monofonIC initial conditions generator for a Simbelmyne simulation.")
|
||||||
# TODO: reduce the volume of arguments
|
# TODO: reduce the volume of arguments
|
|
@ -47,7 +47,6 @@ def register_arguments_card(parser:ArgumentParser):
|
||||||
parser.add_argument("--OutputFCsDensity", type=str, default=None, help="Output FCs density file.")
|
parser.add_argument("--OutputFCsDensity", type=str, default=None, help="Output FCs density file.")
|
||||||
parser.add_argument("--OutputFCsSnapshot", type=str, default=None, help="Output FCs snapshot file.")
|
parser.add_argument("--OutputFCsSnapshot", type=str, default=None, help="Output FCs snapshot file.")
|
||||||
parser.add_argument("--OutputRngStateLPT", type=str, default=None, help="Output RNG state file.")
|
parser.add_argument("--OutputRngStateLPT", type=str, default=None, help="Output RNG state file.")
|
||||||
<<<<<<< HEAD:parameters_card.py
|
|
||||||
## Tests with phiBCs and density
|
## Tests with phiBCs and density
|
||||||
parser.add_argument("--WriteGravPot", type=bool, default=False, help="Write gravitational potential.")
|
parser.add_argument("--WriteGravPot", type=bool, default=False, help="Write gravitational potential.")
|
||||||
parser.add_argument("--OutputGravitationalPotentialBase", type=str, default=None, help="Output gravitational potential base.")
|
parser.add_argument("--OutputGravitationalPotentialBase", type=str, default=None, help="Output gravitational potential base.")
|
||||||
|
@ -57,12 +56,6 @@ def register_arguments_card(parser:ArgumentParser):
|
||||||
parser.add_argument("--MeshDensity", type=int, default=None, help="Mesh for density.")
|
parser.add_argument("--MeshDensity", type=int, default=None, help="Mesh for density.")
|
||||||
parser.add_argument("--LoadPhiBCs", type=bool, default=False, help="Load phiBCs.")
|
parser.add_argument("--LoadPhiBCs", type=bool, default=False, help="Load phiBCs.")
|
||||||
parser.add_argument("--InputPhiBCsBase", type=str, default=None, help="Input phiBCs file base.")
|
parser.add_argument("--InputPhiBCsBase", type=str, default=None, help="Input phiBCs file base.")
|
||||||
=======
|
|
||||||
parser.add_argument("--WriteReferenceFrame", type=bool, default=False, help="Write reference frame (COCA).")
|
|
||||||
parser.add_argument("--OutputMomentaBase", type=str, default=None, help="Output momenta base (COCA).")
|
|
||||||
parser.add_argument("--ReadReferenceFrame", type=bool, default=False, help="Read reference frame (COCA).")
|
|
||||||
parser.add_argument("--InputMomentaBase", type=str, default=None, help="Read momenta base (COCA).")
|
|
||||||
>>>>>>> main:sbmy_control/parameters_card.py
|
|
||||||
|
|
||||||
|
|
||||||
def register_arguments_card_for_ICs(parser:ArgumentParser):
|
def register_arguments_card_for_ICs(parser:ArgumentParser):
|
||||||
|
@ -89,8 +82,8 @@ def parse_arguments_card(parsed_args):
|
||||||
"""
|
"""
|
||||||
Parse the arguments for the parameter card.
|
Parse the arguments for the parameter card.
|
||||||
"""
|
"""
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
from .cosmo_params import parse_arguments_cosmo
|
from cosmo_params import parse_arguments_cosmo
|
||||||
|
|
||||||
cosmo_dict=parse_arguments_cosmo(parsed_args)
|
cosmo_dict=parse_arguments_cosmo(parsed_args)
|
||||||
card_dict=dict(
|
card_dict=dict(
|
||||||
|
@ -128,10 +121,6 @@ def parse_arguments_card(parsed_args):
|
||||||
GenerateLightcone=parsed_args.GenerateLightcone,
|
GenerateLightcone=parsed_args.GenerateLightcone,
|
||||||
OutputAlsoFCs=parsed_args.OutputAlsoFCs,
|
OutputAlsoFCs=parsed_args.OutputAlsoFCs,
|
||||||
Observer=parsed_args.Observer,
|
Observer=parsed_args.Observer,
|
||||||
WriteReferenceFrame=parsed_args.WriteReferenceFrame,
|
|
||||||
OutputMomentaBase=parsed_args.OutputMomentaBase,
|
|
||||||
ReadReferenceFrame=parsed_args.ReadReferenceFrame,
|
|
||||||
InputMomentaBase=parsed_args.InputMomentaBase,
|
|
||||||
N_tiles=parsed_args.N_tiles,
|
N_tiles=parsed_args.N_tiles,
|
||||||
Np_buffer=parsed_args.Np_buffer,
|
Np_buffer=parsed_args.Np_buffer,
|
||||||
Np_lpt_buffer=parsed_args.Np_lpt_buffer,
|
Np_lpt_buffer=parsed_args.Np_lpt_buffer,
|
||||||
|
@ -233,7 +222,6 @@ def parse_arguments_card(parsed_args):
|
||||||
card_dict["OutputFCsSnapshot"] = main_dict["resultdir"]+"final_particles_"+main_dict["simname"]+".gadget3"
|
card_dict["OutputFCsSnapshot"] = main_dict["resultdir"]+"final_particles_"+main_dict["simname"]+".gadget3"
|
||||||
if card_dict["OutputRngStateLPT"] is None:
|
if card_dict["OutputRngStateLPT"] is None:
|
||||||
card_dict["OutputRngStateLPT"] = main_dict["workdir"]+"rng_state.h5"
|
card_dict["OutputRngStateLPT"] = main_dict["workdir"]+"rng_state.h5"
|
||||||
<<<<<<< HEAD:parameters_card.py
|
|
||||||
## Tests with phiBCs and density
|
## Tests with phiBCs and density
|
||||||
if card_dict["OutputGravitationalPotentialBase"] is None:
|
if card_dict["OutputGravitationalPotentialBase"] is None:
|
||||||
card_dict["OutputGravitationalPotentialBase"] = main_dict["workdir"]+"gravpot_"+main_dict["simname"]
|
card_dict["OutputGravitationalPotentialBase"] = main_dict["workdir"]+"gravpot_"+main_dict["simname"]
|
||||||
|
@ -245,12 +233,6 @@ def parse_arguments_card(parsed_args):
|
||||||
card_dict["MeshDensity"] = card_dict["N_PM_mesh"]
|
card_dict["MeshDensity"] = card_dict["N_PM_mesh"]
|
||||||
if card_dict["InputPhiBCsBase"] is None:
|
if card_dict["InputPhiBCsBase"] is None:
|
||||||
card_dict["InputPhiBCsBase"] = main_dict["workdir"]+"gravpot_tCOLA"
|
card_dict["InputPhiBCsBase"] = main_dict["workdir"]+"gravpot_tCOLA"
|
||||||
=======
|
|
||||||
if card_dict["OutputMomentaBase"] is None:
|
|
||||||
card_dict["OutputMomentaBase"] = main_dict["workdir"]+"momenta_"+main_dict["simname"]+"_"
|
|
||||||
if card_dict["InputMomentaBase"] is None:
|
|
||||||
card_dict["InputMomentaBase"] = main_dict["workdir"]+"momenta_"+main_dict["simname"]+"_"
|
|
||||||
>>>>>>> main:sbmy_control/parameters_card.py
|
|
||||||
|
|
||||||
return card_dict
|
return card_dict
|
||||||
|
|
||||||
|
@ -260,7 +242,7 @@ def parse_arguments_card_for_ICs(parsed_args):
|
||||||
"""
|
"""
|
||||||
Parse the arguments for the parameter card for ICs.
|
Parse the arguments for the parameter card for ICs.
|
||||||
"""
|
"""
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
|
|
||||||
main_dict = parse_arguments_main(parsed_args)
|
main_dict = parse_arguments_main(parsed_args)
|
||||||
card_dict = dict(
|
card_dict = dict(
|
||||||
|
@ -300,7 +282,7 @@ def parse_arguments_card_for_timestepping(parsed_args):
|
||||||
"""
|
"""
|
||||||
Parse the arguments for the parameter card for timestepping.
|
Parse the arguments for the parameter card for timestepping.
|
||||||
"""
|
"""
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
|
|
||||||
main_dict = parse_arguments_main(parsed_args)
|
main_dict = parse_arguments_main(parsed_args)
|
||||||
card_dict = dict(
|
card_dict = dict(
|
||||||
|
@ -369,7 +351,6 @@ def create_parameter_card_dict(
|
||||||
OutputFCsDensity:str = 'fcs_density.h5',
|
OutputFCsDensity:str = 'fcs_density.h5',
|
||||||
OutputFCsSnapshot:str = 'fcs_particles.gadget3',
|
OutputFCsSnapshot:str = 'fcs_particles.gadget3',
|
||||||
|
|
||||||
<<<<<<< HEAD:parameters_card.py
|
|
||||||
## Tests with phiBCs and density
|
## Tests with phiBCs and density
|
||||||
WriteGravPot:bool = True,
|
WriteGravPot:bool = True,
|
||||||
OutputGravitationalPotentialBase:str = 'gravitational_potential.h5',
|
OutputGravitationalPotentialBase:str = 'gravitational_potential.h5',
|
||||||
|
@ -379,13 +360,6 @@ def create_parameter_card_dict(
|
||||||
MeshDensity:int = 128,
|
MeshDensity:int = 128,
|
||||||
LoadPhiBCs:bool = False,
|
LoadPhiBCs:bool = False,
|
||||||
InputPhiBCsBase:str = 'gravitational_potential.h5',
|
InputPhiBCsBase:str = 'gravitational_potential.h5',
|
||||||
=======
|
|
||||||
## COCA parameters
|
|
||||||
WriteReferenceFrame:bool = False,
|
|
||||||
OutputMomentaBase:str = 'momenta_',
|
|
||||||
ReadReferenceFrame:bool = False,
|
|
||||||
InputMomentaBase:str = 'momenta_',
|
|
||||||
>>>>>>> main:sbmy_control/parameters_card.py
|
|
||||||
|
|
||||||
## Cosmological parameters
|
## Cosmological parameters
|
||||||
h:float = 0.6732,
|
h:float = 0.6732,
|
||||||
|
@ -441,10 +415,6 @@ def create_parameter_card_dict(
|
||||||
Observer2=Observer[2],
|
Observer2=Observer[2],
|
||||||
OutputFCsDensity=OutputFCsDensity,
|
OutputFCsDensity=OutputFCsDensity,
|
||||||
OutputFCsSnapshot=OutputFCsSnapshot,
|
OutputFCsSnapshot=OutputFCsSnapshot,
|
||||||
WriteReferenceFrame=int(WriteReferenceFrame),
|
|
||||||
OutputMomentaBase=OutputMomentaBase,
|
|
||||||
ReadReferenceFrame=int(ReadReferenceFrame),
|
|
||||||
InputMomentaBase=InputMomentaBase,
|
|
||||||
NumberOfTilesPerDimension=N_tiles,
|
NumberOfTilesPerDimension=N_tiles,
|
||||||
NumberOfParticlesInBuffer=Np_buffer,
|
NumberOfParticlesInBuffer=Np_buffer,
|
||||||
NumberOfParticlesInLPTBuffer=Np_lpt_buffer,
|
NumberOfParticlesInLPTBuffer=Np_lpt_buffer,
|
||||||
|
@ -482,7 +452,7 @@ def write_parameter_card(parameter_card_dict:dict, filename:str, verbose:int = 1
|
||||||
|
|
||||||
if verbose < 2:
|
if verbose < 2:
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from .low_level import stdout_redirector, stderr_redirector
|
from low_level import stdout_redirector, stderr_redirector
|
||||||
|
|
||||||
f = BytesIO()
|
f = BytesIO()
|
||||||
g = BytesIO()
|
g = BytesIO()
|
||||||
|
@ -501,7 +471,7 @@ def main_parameter_card(parsed_args):
|
||||||
"""
|
"""
|
||||||
Main function for the parameter card.
|
Main function for the parameter card.
|
||||||
"""
|
"""
|
||||||
from .low_level import print_message, print_starting_module, print_ending_module
|
from low_level import print_message, print_starting_module, print_ending_module
|
||||||
print_starting_module("card", verbose=parsed_args.verbose)
|
print_starting_module("card", verbose=parsed_args.verbose)
|
||||||
print_message("Parsing arguments for the parameter card.", 1, "card", verbose=parsed_args.verbose)
|
print_message("Parsing arguments for the parameter card.", 1, "card", verbose=parsed_args.verbose)
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
|
@ -517,8 +487,8 @@ def main_parameter_card(parsed_args):
|
||||||
return card_dict
|
return card_dict
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
|
|
||||||
parser = ArgumentParser(description="Create sbmy parameter card.")
|
parser = ArgumentParser(description="Create sbmy parameter card.")
|
||||||
register_arguments_main(parser)
|
register_arguments_main(parser)
|
|
@ -16,9 +16,9 @@ def parse_arguments_monofonic(parsed_args):
|
||||||
"""
|
"""
|
||||||
Parse the arguments for the monofonIC parameters.
|
Parse the arguments for the monofonIC parameters.
|
||||||
"""
|
"""
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
from .parameters_card import parse_arguments_card_for_ICs
|
from parameters_card import parse_arguments_card_for_ICs
|
||||||
from .cosmo_params import parse_arguments_cosmo
|
from cosmo_params import parse_arguments_cosmo
|
||||||
|
|
||||||
main_dict = parse_arguments_main(parsed_args)
|
main_dict = parse_arguments_main(parsed_args)
|
||||||
card_dict = parse_arguments_card_for_ICs(parsed_args)
|
card_dict = parse_arguments_card_for_ICs(parsed_args)
|
||||||
|
@ -71,7 +71,7 @@ def get_config_from_dict(monofonic_dict):
|
||||||
config["setup"] = {
|
config["setup"] = {
|
||||||
"GridRes": monofonic_dict["gridres"],
|
"GridRes": monofonic_dict["gridres"],
|
||||||
"BoxLength": monofonic_dict["boxlength"],
|
"BoxLength": monofonic_dict["boxlength"],
|
||||||
"zstart": 99.0,
|
"zstart": 999.0,
|
||||||
"LPTorder": 2,
|
"LPTorder": 2,
|
||||||
"DoBaryons": False,
|
"DoBaryons": False,
|
||||||
"DoBaryonVrel": False,
|
"DoBaryonVrel": False,
|
||||||
|
@ -131,7 +131,7 @@ def get_config_from_dict(monofonic_dict):
|
||||||
|
|
||||||
|
|
||||||
def main_parameters_monofonic(parsed_args):
|
def main_parameters_monofonic(parsed_args):
|
||||||
from .low_level import print_message
|
from low_level import print_message
|
||||||
|
|
||||||
print_message("Parsing arguments for the config file.", 1, "monofonic", verbose=parsed_args.verbose)
|
print_message("Parsing arguments for the config file.", 1, "monofonic", verbose=parsed_args.verbose)
|
||||||
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
monofonic_dict = parse_arguments_monofonic(parsed_args)
|
||||||
|
@ -144,9 +144,9 @@ def main_parameters_monofonic(parsed_args):
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .parameters_card import register_arguments_card_for_ICs
|
from parameters_card import register_arguments_card_for_ICs
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
|
|
||||||
parser = ArgumentParser(description="Create monofonIC configuration file.")
|
parser = ArgumentParser(description="Create monofonIC configuration file.")
|
||||||
register_arguments_main(parser)
|
register_arguments_main(parser)
|
|
@ -1,7 +1,7 @@
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
from .low_level import progress_bar_from_logfile
|
from low_level import progress_bar_from_logfile
|
||||||
|
|
||||||
parser = ArgumentParser(description="Progress bar from log files.")
|
parser = ArgumentParser(description="Progress bar from log files.")
|
||||||
parser.add_argument("-l","--logdir", type=str, help="Log directory.")
|
parser.add_argument("-l","--logdir", type=str, help="Log directory.")
|
|
@ -1,36 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
def register_colormaps(colormaps):
|
|
||||||
|
|
||||||
# Register cmasher
|
|
||||||
try:
|
|
||||||
import cmasher as cma
|
|
||||||
for name, cmap in cma.cm.cmap_d.items():
|
|
||||||
try:
|
|
||||||
colormaps.register(name=name, cmap=cmap)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Register cmocean
|
|
||||||
try:
|
|
||||||
import cmocean as cmo
|
|
||||||
for name, cmap in cmo.cm.cmap_d.items():
|
|
||||||
try:
|
|
||||||
colormaps.register(name=name, cmap=cmap)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Register cmcrameri
|
|
||||||
try:
|
|
||||||
import cmcrameri as cmc
|
|
||||||
for name, cmap in cmc.cm.cmaps.items():
|
|
||||||
try:
|
|
||||||
colormaps.register(name=name, cmap=cmap)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
|
@ -1,228 +0,0 @@
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
fs = 18
|
|
||||||
fs_titles = fs - 4
|
|
||||||
|
|
||||||
def add_ax_ticks(ax, ticks, tick_labels):
|
|
||||||
from matplotlib import ticker
|
|
||||||
ax.set_xticks(ticks)
|
|
||||||
ax.set_yticks(ticks)
|
|
||||||
ax.set_xticklabels([int(t) for t in tick_labels])
|
|
||||||
ax.set_yticklabels([int(t) for t in tick_labels])
|
|
||||||
ax.set_xlabel('Mpc/h')
|
|
||||||
# ax.xaxis.set_major_formatter(ticker.FormatStrFormatter('%d')) # Does not work
|
|
||||||
# ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%d'))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def plot_imshow_with_reference( data_list,
|
|
||||||
reference=None,
|
|
||||||
titles=None,
|
|
||||||
vmin=None,
|
|
||||||
vmax=None,
|
|
||||||
L=None,
|
|
||||||
cmap='viridis',
|
|
||||||
cmap_diff='PuOr',
|
|
||||||
ref_label="Reference"):
|
|
||||||
"""
|
|
||||||
Plot the imshow of a list of 2D arrays with two rows: one for the data itself,
|
|
||||||
one for the data compared to a reference. Each row will have a common colorbar.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
- data_list: list of 2D arrays to be plotted
|
|
||||||
- reference: 2D array to be used as reference for comparison
|
|
||||||
- titles: list of titles for each subplot
|
|
||||||
- cmap: colormap to be used for plotting
|
|
||||||
"""
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
|
|
||||||
from sbmy_control.analysis.colormaps import register_colormaps
|
|
||||||
register_colormaps(plt.colormaps)
|
|
||||||
|
|
||||||
if titles is None:
|
|
||||||
titles = [None for f in data_list]
|
|
||||||
|
|
||||||
if L is None:
|
|
||||||
L = [len(data) for data in data_list]
|
|
||||||
elif isinstance(L, int) or isinstance(L, float):
|
|
||||||
L = [L for data in data_list]
|
|
||||||
|
|
||||||
sep = 10 if L[0] < 50 else 20 if L[0] < 100 else 50 if L[0]<250 else 100 if L[0] < 500 else 200 if L[0] < 1000 else 500 if L[0] < 2500 else 1000
|
|
||||||
ticks = [np.arange(0, l+1, sep)*len(dat)/l for l, dat in zip(L,data_list)]
|
|
||||||
tick_labels = [np.arange(0, l+1, sep) for l in L]
|
|
||||||
|
|
||||||
def score(data, reference):
|
|
||||||
return np.linalg.norm(data-reference)/np.linalg.norm(reference)
|
|
||||||
|
|
||||||
n = len(data_list)
|
|
||||||
fig, axes = plt.subplots(1 if reference is None else 2, n, figsize=(5 * n, 5 if reference is None else 5*2), dpi=max(500, data_list[0].shape[0]//2), squeeze = False)
|
|
||||||
|
|
||||||
if vmin is None or vmax is None:
|
|
||||||
vmin = min(np.quantile(data,0.01) for data in data_list)
|
|
||||||
vmax = max(np.quantile(data,0.99) for data in data_list)
|
|
||||||
|
|
||||||
if reference is not None:
|
|
||||||
vmin_diff = min(np.quantile((data-reference),0.01) for data in data_list)
|
|
||||||
vmax_diff = max(np.quantile((data-reference),0.99) for data in data_list)
|
|
||||||
vmin_diff = min(vmin_diff, -vmax_diff)
|
|
||||||
vmax_diff = -vmin_diff
|
|
||||||
else:
|
|
||||||
vmin_diff = vmin
|
|
||||||
vmax_diff = vmax
|
|
||||||
|
|
||||||
# Plot the data itself
|
|
||||||
for i, data in enumerate(data_list):
|
|
||||||
im = axes[0, i].imshow(data, cmap=cmap, origin='lower', vmin=vmin, vmax=vmax)
|
|
||||||
axes[0, i].set_title(titles[i], fontsize=fs_titles)
|
|
||||||
add_ax_ticks(axes[0, i], ticks[i], tick_labels[i])
|
|
||||||
fig.colorbar(im, ax=axes[0, :], orientation='vertical')
|
|
||||||
|
|
||||||
if reference is not None:
|
|
||||||
# Plot the data compared to the reference
|
|
||||||
for i, data in enumerate(data_list):
|
|
||||||
im = axes[1, i].imshow(data - reference, cmap=cmap_diff, origin='lower', vmin=vmin_diff, vmax=vmax_diff)
|
|
||||||
axes[1, i].set_title(f'{titles[i]} - {ref_label}', fontsize=fs_titles)
|
|
||||||
add_ax_ticks(axes[1, i], ticks[i], tick_labels[i])
|
|
||||||
fig.colorbar(im, ax=axes[1, :], orientation='vertical')
|
|
||||||
|
|
||||||
# Add the score on the plots
|
|
||||||
for i, data in enumerate(data_list):
|
|
||||||
axes[1, i].text(0.5, 0.9, f"RMS: {score(data, reference):.2e}", fontsize=10, transform=axes[1, i].transAxes, color='black')
|
|
||||||
# plt.tight_layout()
|
|
||||||
|
|
||||||
return fig, axes
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def plot_imshow_diff(data_list,
|
|
||||||
reference,
|
|
||||||
titles,
|
|
||||||
vmin=None,
|
|
||||||
vmax=None,
|
|
||||||
L=None,
|
|
||||||
cmap='viridis',
|
|
||||||
ref_label="Reference"):
|
|
||||||
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
|
|
||||||
from sbmy_control.analysis.colormapss import register_colormaps
|
|
||||||
register_colormaps(plt.colormaps)
|
|
||||||
|
|
||||||
if reference is None:
|
|
||||||
raise ValueError("Reference field is None")
|
|
||||||
|
|
||||||
if titles is None:
|
|
||||||
titles = [None for f in data_list]
|
|
||||||
|
|
||||||
if L is None:
|
|
||||||
L = [len(data) for data in data_list]
|
|
||||||
elif isinstance(L, int) or isinstance(L, float):
|
|
||||||
L = [L for data in data_list]
|
|
||||||
|
|
||||||
sep = 10 if L[0] < 50 else 20 if L[0] < 100 else 50 if L[0]<250 else 100 if L[0] < 500 else 200 if L[0] < 1000 else 500 if L[0] < 2500 else 1000
|
|
||||||
ticks = [np.arange(0, l+1, sep)*len(dat)/l for l, dat in zip(L,data_list)]
|
|
||||||
tick_labels = [np.arange(0, l+1, sep) for l in L]
|
|
||||||
|
|
||||||
def score(data, reference):
|
|
||||||
return np.linalg.norm(data-reference)/np.linalg.norm(reference)
|
|
||||||
|
|
||||||
n = len(data_list)
|
|
||||||
fig, axes = plt.subplots(1, n, figsize=(5 * n, 5), dpi=max(500, data_list[0].shape[0]//2), squeeze = False)
|
|
||||||
|
|
||||||
if vmin is None or vmax is None:
|
|
||||||
vmin = min(np.quantile(data-reference,0.01) for data in data_list)
|
|
||||||
vmax = max(np.quantile(data-reference,0.99) for data in data_list)
|
|
||||||
vmin = min(vmin, -vmax)
|
|
||||||
vmax = -vmin
|
|
||||||
|
|
||||||
# Plot the data compared to the reference
|
|
||||||
for i, data in enumerate(data_list):
|
|
||||||
im = axes[0, i].imshow(data - reference, cmap=cmap, origin='lower', vmin=vmin, vmax=vmax)
|
|
||||||
axes[0, i].set_title(f'{titles[i]} - {ref_label}', fontsize=fs_titles)
|
|
||||||
add_ax_ticks(axes[0, i], ticks[i], tick_labels[i])
|
|
||||||
fig.colorbar(im, ax=axes[0, :], orientation='vertical')
|
|
||||||
|
|
||||||
return fig, axes
|
|
||||||
|
|
||||||
|
|
||||||
def console_main():
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
parser = ArgumentParser(description='Comparisons of fields slices.')
|
|
||||||
|
|
||||||
parser.add_argument('-a','--axis', type=int, default=0, help='Axis along which the slices will be taken.')
|
|
||||||
parser.add_argument('-i','--index', type=int, default=None, help='Index of the slice along the axis.')
|
|
||||||
parser.add_argument('-d', '--directory', type=str, default='./', help='Directory containing the fields files.')
|
|
||||||
parser.add_argument('-ref', '--reference', type=str, default=None, help='Reference field file.')
|
|
||||||
parser.add_argument('-f', '--filenames', type=str, nargs='+', required=True, help='Field files to be plotted.')
|
|
||||||
parser.add_argument('-o', '--output', type=str, default=None, help='Output plot file name.')
|
|
||||||
parser.add_argument('-l', '--labels', type=str, nargs='+', default=None, help='Labels for each field.')
|
|
||||||
parser.add_argument('-c', '--cmap', type=str, default='viridis', help='Colormap to be used for plotting.')
|
|
||||||
parser.add_argument('-vmin', type=float, default=None, help='Minimum value for the colorbar.')
|
|
||||||
parser.add_argument('-vmax', type=float, default=None, help='Maximum value for the colorbar.')
|
|
||||||
parser.add_argument('-t', '--title', type=str, default=None, help='Title for the plot.')
|
|
||||||
parser.add_argument('-log','--log_scale', action='store_true', help='Use log scale for the data.')
|
|
||||||
parser.add_argument('--diff', action='store_true', help='Plot only the difference with the reference field.')
|
|
||||||
parser.add_argument('--ref_label', type=str, default='Reference', help='Label for the reference field.')
|
|
||||||
parser.add_argument('--cmap_diff', type=str, default='PuOr', help='Colormap to be used for the difference plot.')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
from pysbmy.field import read_field
|
|
||||||
|
|
||||||
ref_label = args.ref_label
|
|
||||||
ref_field = read_field(args.directory+args.reference) if args.reference is not None else None
|
|
||||||
fields = []
|
|
||||||
for k,f in enumerate(args.filenames):
|
|
||||||
if not os.path.exists(args.directory+f):
|
|
||||||
raise FileNotFoundError(f"File {args.directory+f} does not exist.")
|
|
||||||
if args.reference is not None and f == args.reference:
|
|
||||||
fields.append(ref_field) # Simply copy the reference field instead of reading it again
|
|
||||||
if args.labels is not None:
|
|
||||||
ref_label = args.labels[k] # Use the label of the field as the reference label
|
|
||||||
else:
|
|
||||||
fields.append(read_field(args.directory+f))
|
|
||||||
|
|
||||||
if args.index is None:
|
|
||||||
index = fields[0].N0//2
|
|
||||||
else:
|
|
||||||
index=args.index
|
|
||||||
|
|
||||||
# args.labels=[f"a={f.time:.2f}" for f in fields]
|
|
||||||
L = [f.L0 for f in fields]
|
|
||||||
|
|
||||||
match args.axis:
|
|
||||||
case 0 | 'x':
|
|
||||||
reference = ref_field.data[index,:,:] if ref_field is not None else None
|
|
||||||
fields = [f.data[index,:,:] for f in fields]
|
|
||||||
case 1 | 'y':
|
|
||||||
reference = ref_field.data[:,index,:] if ref_field is not None else None
|
|
||||||
fields = [f.data[:,index,:] for f in fields]
|
|
||||||
case 2 | 'z':
|
|
||||||
reference = ref_field.data[:,:,index] if ref_field is not None else None
|
|
||||||
fields = [f.data[:,:,index] for f in fields]
|
|
||||||
case _:
|
|
||||||
raise ValueError(f"Wrong axis provided : {args.axis}")
|
|
||||||
|
|
||||||
if args.log_scale:
|
|
||||||
reference = np.log10(2.+reference) if ref_field is not None else None
|
|
||||||
fields = [np.log10(2.+f) for f in fields]
|
|
||||||
|
|
||||||
|
|
||||||
if args.diff:
|
|
||||||
fig, axes = plot_imshow_diff(fields,reference,args.labels, vmin=args.vmin, vmax=args.vmax,cmap=args.cmap_diff, L=L, ref_label=ref_label)
|
|
||||||
else:
|
|
||||||
fig, axes = plot_imshow_with_reference(fields,reference,args.labels, vmin=args.vmin, vmax=args.vmax,cmap=args.cmap, L=L, ref_label=ref_label, cmap_diff=args.cmap_diff)
|
|
||||||
fig.suptitle(args.title)
|
|
||||||
|
|
||||||
if args.output is not None:
|
|
||||||
fig.savefig(args.output,bbox_inches='tight')
|
|
||||||
else:
|
|
||||||
fig.savefig(args.directory+'slices.jpg',bbox_inches='tight')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
console_main()
|
|
|
@ -1,82 +0,0 @@
|
||||||
from pysbmy.density import get_density_pm_snapshot
|
|
||||||
from pysbmy.snapshot import read_snapshot
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
|
|
||||||
def convert_snapshot_to_density(snapshot_path, output_path, N=None, corner=(0.0, 0.0, 0.0)):
|
|
||||||
"""
|
|
||||||
Convert a snapshot to a density field.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
snapshot_path : str
|
|
||||||
Path to the snapshot file.
|
|
||||||
output_path : str
|
|
||||||
Path to the output density file.
|
|
||||||
N : int
|
|
||||||
Size of the density field grid (N x N x N).
|
|
||||||
corner : tuple of float
|
|
||||||
Corner of the box (x, y, z).
|
|
||||||
"""
|
|
||||||
# Read the snapshot
|
|
||||||
print("Reading snapshot...")
|
|
||||||
snap = read_snapshot(snapshot_path)
|
|
||||||
|
|
||||||
if N is None:
|
|
||||||
N = snap.Np0
|
|
||||||
|
|
||||||
# Calculate density
|
|
||||||
print("Calculating density...")
|
|
||||||
F = get_density_pm_snapshot(snap, N, N, N, corner[0], corner[1], corner[2])
|
|
||||||
|
|
||||||
# Write density to file
|
|
||||||
print("Writing density...")
|
|
||||||
F.write(output_path)
|
|
||||||
print("Density written to", output_path)
|
|
||||||
print("Done.")
|
|
||||||
|
|
||||||
|
|
||||||
def console_main():
|
|
||||||
parser = argparse.ArgumentParser(description="Convert snapshot to density.")
|
|
||||||
parser.add_argument(
|
|
||||||
"-S",
|
|
||||||
"--snapshot",
|
|
||||||
type=str,
|
|
||||||
required=True,
|
|
||||||
help="Path to the snapshot file.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-o",
|
|
||||||
"--output",
|
|
||||||
type=str,
|
|
||||||
required=True,
|
|
||||||
help="Path to the output density file.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-N",
|
|
||||||
"--N",
|
|
||||||
type=int,
|
|
||||||
default=None,
|
|
||||||
help="Size of the density field grid (N x N x N).",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-c",
|
|
||||||
"--corner",
|
|
||||||
type=float,
|
|
||||||
nargs=3,
|
|
||||||
default=[0.0, 0.0, 0.0],
|
|
||||||
help="Corner of the box (x, y, z).",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
convert_snapshot_to_density(
|
|
||||||
snapshot_path=args.snapshot,
|
|
||||||
output_path=args.output,
|
|
||||||
N=args.N,
|
|
||||||
corner=args.corner,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
console_main()
|
|
|
@ -1,134 +0,0 @@
|
||||||
from pysbmy.density import mesh_to_mesh
|
|
||||||
from pysbmy.field import Field, read_field
|
|
||||||
import numpy as np
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def field_to_field(
|
|
||||||
input_file:str,
|
|
||||||
output_file:str,
|
|
||||||
output_size:int|tuple[int,int,int]|list[int],
|
|
||||||
output_L:float|tuple[float,float,float]|list[float],
|
|
||||||
output_dpm:float|tuple[float,float,float]|list[float],
|
|
||||||
output_corner:tuple[float,float,float]|list[float],
|
|
||||||
boundary_conditions:int,
|
|
||||||
):
|
|
||||||
|
|
||||||
|
|
||||||
### Make sure all inputs are valid
|
|
||||||
if output_L is not None and output_dpm is not None:
|
|
||||||
raise ValueError("Either output_L or output_dpm can be specified, not both.")
|
|
||||||
|
|
||||||
if isinstance(output_size, int):
|
|
||||||
output_size = (output_size, output_size, output_size) # N0, N1, N2
|
|
||||||
elif len(output_size) == 1:
|
|
||||||
output_size = (output_size[0], output_size[0], output_size[0])
|
|
||||||
|
|
||||||
if output_L is not None:
|
|
||||||
if isinstance(output_L, float):
|
|
||||||
output_L = (output_L, output_L, output_L)
|
|
||||||
elif len(output_L) == 1:
|
|
||||||
output_L = (output_L[0], output_L[0], output_L[0])
|
|
||||||
|
|
||||||
if output_dpm is None:
|
|
||||||
if output_L is None:
|
|
||||||
output_dpm = (-1, -1, -1)
|
|
||||||
else:
|
|
||||||
output_dpm = (output_L[0] / output_size[0], output_L[1] / output_size[1], output_L[2] / output_size[2])
|
|
||||||
elif isinstance(output_dpm, float):
|
|
||||||
output_dpm = (output_dpm, output_dpm, output_dpm) # d0, d1, d2
|
|
||||||
elif len(output_dpm) == 1:
|
|
||||||
output_dpm = (output_dpm[0], output_dpm[0], output_dpm[0])
|
|
||||||
|
|
||||||
if not os.path.exists(input_file):
|
|
||||||
raise FileNotFoundError(f"Input file {input_file} does not exist.")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Read the input field
|
|
||||||
print(f"Reading input field from {input_file}")
|
|
||||||
input_field = read_field(input_file)
|
|
||||||
|
|
||||||
if input_field.rank != 1 or input_field.data.ndim != 3:
|
|
||||||
raise NotImplementedError("Only 3D scalar fields are supported for now.")
|
|
||||||
|
|
||||||
L0 = input_field.L0
|
|
||||||
L1 = input_field.L1
|
|
||||||
L2 = input_field.L2
|
|
||||||
N0 = input_field.N0
|
|
||||||
N1 = input_field.N1
|
|
||||||
N2 = input_field.N2
|
|
||||||
corner0 = input_field.corner0
|
|
||||||
corner1 = input_field.corner1
|
|
||||||
corner2 = input_field.corner2
|
|
||||||
|
|
||||||
d0_in = L0 / N0
|
|
||||||
d1_in = L1 / N1
|
|
||||||
d2_in = L2 / N2
|
|
||||||
|
|
||||||
d0_out = output_dpm[0] if output_dpm[0] > 0 else d0_in
|
|
||||||
d1_out = output_dpm[1] if output_dpm[1] > 0 else d1_in
|
|
||||||
d2_out = output_dpm[2] if output_dpm[2] > 0 else d2_in
|
|
||||||
|
|
||||||
offset_out_x = (output_corner[0] - corner0)
|
|
||||||
offset_out_y = (output_corner[1] - corner1)
|
|
||||||
offset_out_z = (output_corner[2] - corner2)
|
|
||||||
|
|
||||||
print("-----------------------------------------")
|
|
||||||
print(f"Input field size: {N0} x {N1} x {N2}")
|
|
||||||
print(f"Output field size: {output_size[0]} x {output_size[1]} x {output_size[2]}")
|
|
||||||
print(f"Input field dpm: {d0_in:.3f} x {d1_in:.3f} x {d2_in:.3f}")
|
|
||||||
print(f"Output field dpm: {d0_out:.3f} x {d1_out:.3f} x {d2_out:.3f}")
|
|
||||||
print(f"Input field corner: ({corner0:.1f}, {corner1:.1f}, {corner2:.1f})")
|
|
||||||
print(f"Output field corner: ({output_corner[0]:.1f}, {output_corner[1]:.1f}, {output_corner[2]:.1f})")
|
|
||||||
print(f"Boundary conditions: {'periodic' if boundary_conditions == 1 else 'non-periodic'}")
|
|
||||||
print("-----------------------------------------")
|
|
||||||
|
|
||||||
input_grid = input_field.data
|
|
||||||
|
|
||||||
output_grid = np.zeros(output_size, dtype=input_grid.dtype)
|
|
||||||
|
|
||||||
|
|
||||||
# Mesh to mesh interpolation
|
|
||||||
print("Interpolating field...")
|
|
||||||
mesh_to_mesh(input_grid, output_grid, d0_in, d1_in, d2_in, d0_out, d1_out, d2_out, offset_out_x, offset_out_y, offset_out_z, boundary_conditions)
|
|
||||||
|
|
||||||
# Create the output field
|
|
||||||
output_field = Field(
|
|
||||||
L0=output_size[0] * d0_out,
|
|
||||||
L1=output_size[1] * d1_out,
|
|
||||||
L2=output_size[2] * d2_out,
|
|
||||||
corner0=output_corner[0],
|
|
||||||
corner1=output_corner[1],
|
|
||||||
corner2=output_corner[2],
|
|
||||||
rank=input_field.rank,
|
|
||||||
N0=output_size[0],
|
|
||||||
N1=output_size[1],
|
|
||||||
N2=output_size[2],
|
|
||||||
time=input_field.time,
|
|
||||||
data=output_grid
|
|
||||||
)
|
|
||||||
|
|
||||||
# Write the output field
|
|
||||||
print(f"Writing output field to {output_file}")
|
|
||||||
output_field.write(output_file)
|
|
||||||
print("Done.")
|
|
||||||
|
|
||||||
|
|
||||||
def console_main():
|
|
||||||
import argparse
|
|
||||||
parser = argparse.ArgumentParser(description="Convert a field from one size to another.")
|
|
||||||
parser.add_argument("-i","--input_file", type=str, help="Input field file")
|
|
||||||
parser.add_argument("-o","--output_file", type=str, help="Output field file")
|
|
||||||
parser.add_argument("-N","--output_size", type=int, nargs="+", help="Output field size (N0, N1, N2)")
|
|
||||||
parser.add_argument("-L","--output_L", type=float, nargs="+", default=None, help="Output field size (L0, L1, L2)")
|
|
||||||
parser.add_argument("-dpm","--output_dpm", type=float, nargs="+", default=None, help="Output field dpm (d0, d1, d2)")
|
|
||||||
parser.add_argument("-corner","--output_corner", type=float, nargs=3, default=(0.,0.,0.), help="Output field corner (corner0, corner1, corner2)")
|
|
||||||
parser.add_argument("-BC","--boundary_conditions", type=int, default=1, help="Boundary conditions (1: periodic, 3: non-periodic)")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
field_to_field(args.input_file, args.output_file, args.output_size, args.output_L, args.output_dpm, args.output_corner, args.boundary_conditions)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
console_main()
|
|
|
@ -1,128 +0,0 @@
|
||||||
import numpy as np
|
|
||||||
from pysbmy.snapshot import read_tile
|
|
||||||
from pysbmy.field import Field
|
|
||||||
from pysbmy.density import get_density_pm, density_to_delta
|
|
||||||
from pysbmy import c_float_t
|
|
||||||
import tqdm
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
def get_indices(tile, N_TILES0, N_TILES1, N_TILES2):
|
|
||||||
"""Get the indices of the tile in a 3D grid."""
|
|
||||||
i = (tile // (N_TILES1 * N_TILES2))%N_TILES0
|
|
||||||
j = ((tile - N_TILES1 * N_TILES2 * i) // N_TILES2)%N_TILES1
|
|
||||||
k = (tile - N_TILES2 * j - N_TILES1 * N_TILES2 * i)%N_TILES2
|
|
||||||
return i, j, k
|
|
||||||
|
|
||||||
|
|
||||||
def tile_to_density_with_buffer(T, N, d, buffer):
|
|
||||||
"""
|
|
||||||
Convert a tile to density with a buffer.
|
|
||||||
"""
|
|
||||||
# Create a buffer for the density
|
|
||||||
A = np.zeros((N+2*buffer, N+2*buffer, N+2*buffer), dtype=np.float32)
|
|
||||||
|
|
||||||
# Get the density of the tile
|
|
||||||
A = get_density_pm(T.pos-T.corner_position.astype(c_float_t)+d*buffer, A, d)
|
|
||||||
return A
|
|
||||||
|
|
||||||
|
|
||||||
def add_density_tile(A,A_tile,corner_indices):
|
|
||||||
N = A.shape[0]
|
|
||||||
Np = A_tile.shape[0]
|
|
||||||
i0, j0, k0 = corner_indices
|
|
||||||
|
|
||||||
# Create index arrays for the tile
|
|
||||||
i_idx = (np.arange(Np) + i0) % N
|
|
||||||
j_idx = (np.arange(Np) + j0) % N
|
|
||||||
k_idx = (np.arange(Np) + k0) % N
|
|
||||||
|
|
||||||
# Use np.ix_ to get all combinations of indices
|
|
||||||
A[np.ix_(i_idx, j_idx, k_idx)] += A_tile
|
|
||||||
|
|
||||||
|
|
||||||
def gather_density(A, folder, tile_base, Np_tile, dpm, buffer, N_TILES):
|
|
||||||
"""
|
|
||||||
Gather the density from the tiles.
|
|
||||||
"""
|
|
||||||
for tile in tqdm.tqdm(range(N_TILES**3), desc="Density of tiles", unit="tiles"):
|
|
||||||
T=read_tile(folder+tile_base+str(tile+1)+".h5", int(Np_tile**3))
|
|
||||||
# Get the corner position of the tile
|
|
||||||
corner_position = T.corner_position
|
|
||||||
# Get the corner indices of the tile
|
|
||||||
i,j,k = get_indices(tile, N_TILES, N_TILES, N_TILES)
|
|
||||||
corner_grid_indices = (i*Np_tile-buffer, j*Np_tile-buffer, k*Np_tile-buffer)
|
|
||||||
# Get the density of the tile with a buffer
|
|
||||||
A_tile = tile_to_density_with_buffer(T, Np_tile, dpm, buffer)
|
|
||||||
# Add the density of the tile to the grid
|
|
||||||
add_density_tile(A, A_tile, corner_grid_indices)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def gather_tiles(folder, tile_base, L, Np, N_TILES, buffer):
|
|
||||||
"""
|
|
||||||
Gather sCOLA tiles into a single density field.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
folder : str
|
|
||||||
Folder containing the tiles.
|
|
||||||
tile_base : str
|
|
||||||
Base name of the tiles.
|
|
||||||
L : float
|
|
||||||
Size of the box in Mpc/h.
|
|
||||||
Np : int
|
|
||||||
Number of cells per dimension for the full box.
|
|
||||||
N_TILES : int
|
|
||||||
Number of tiles per dimension.
|
|
||||||
buffer : int
|
|
||||||
Buffer size for the density field of tiles.
|
|
||||||
"""
|
|
||||||
Np_tile = Np//N_TILES
|
|
||||||
dpm = L/Np_tile
|
|
||||||
|
|
||||||
print("Memory allocation for the grid...")
|
|
||||||
A=np.zeros((Np,Np,Np), dtype=np.float32)
|
|
||||||
|
|
||||||
print("Starting to read the tiles...")
|
|
||||||
gather_density(A, folder, tile_base, Np_tile, dpm, buffer, N_TILES)
|
|
||||||
|
|
||||||
print("Finished reading the tiles.")
|
|
||||||
A=density_to_delta(A,-1)
|
|
||||||
|
|
||||||
print("Converting to field...")
|
|
||||||
F=Field(L,L,L, 0.,0.,0., 1, Np,Np,Np, 1., A)
|
|
||||||
|
|
||||||
print("Saving field...")
|
|
||||||
F.write(folder+"../results/final_density_sCOLA.h5")
|
|
||||||
print("Density field saved to", folder+"../results/final_density_sCOLA.h5")
|
|
||||||
print("Done.")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def console_main():
|
|
||||||
parser = argparse.ArgumentParser(description="Gather density from tiles.")
|
|
||||||
parser.add_argument("-d","--folder", type=str, default="./", help="Folder containing the tiles")
|
|
||||||
parser.add_argument("--tile_base", type=str, default="sCOLA_tile", help="Base name of the tiles")
|
|
||||||
parser.add_argument("-L","--L", type=int, default=1920, help="Size of the box in Mpc/h")
|
|
||||||
parser.add_argument("-Np","--Np", type=int, default=80, help="Number of cells per dimension for the full box")
|
|
||||||
parser.add_argument("-Nt","--N_tiles", type=int, default=4, help="Number of tiles per dimension.")
|
|
||||||
parser.add_argument("--buffer", type=int, default=40, help="Buffer size for the density field of tiles")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
L = args.L
|
|
||||||
Np = args.Np
|
|
||||||
N_TILES = args.N_tiles
|
|
||||||
buffer = args.buffer
|
|
||||||
|
|
||||||
folder = args.folder
|
|
||||||
tile_base = args.tile_base
|
|
||||||
|
|
||||||
Np_tile = Np//N_TILES
|
|
||||||
dpm = L/Np_tile
|
|
||||||
|
|
||||||
gather_tiles(folder, tile_base, L, Np, N_TILES, buffer)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
console_main()
|
|
|
@ -1,502 +0,0 @@
|
||||||
import tqdm
|
|
||||||
import argparse
|
|
||||||
import numpy as np
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
def create_scola_slurm_script(slurmfile, box):
|
|
||||||
"""
|
|
||||||
Create a slurm script for sCOLA.
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
slurmfile : str
|
|
||||||
Path to the slurm script file.
|
|
||||||
box : str
|
|
||||||
Box number to be replaced in the slurm script.
|
|
||||||
"""
|
|
||||||
# Read the slurm file
|
|
||||||
with open(slurmfile, "r") as f:
|
|
||||||
slurm_script = f.read()
|
|
||||||
|
|
||||||
# Replace the placeholders in the slurm script
|
|
||||||
slurm_script = slurm_script.replace("BOX", box)
|
|
||||||
|
|
||||||
# Write the modified slurm script to a new file
|
|
||||||
with open(slurmfile+f".{box}", "w") as f:
|
|
||||||
f.write(slurm_script)
|
|
||||||
|
|
||||||
|
|
||||||
def submit_slurm_job(slurmfile):
|
|
||||||
"""
|
|
||||||
Submit a slurm job using the sbatch command.
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
slurmfile : str
|
|
||||||
Path to the slurm script file.
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
str
|
|
||||||
Job ID of the submitted job. None if the submission failed.
|
|
||||||
"""
|
|
||||||
# Submit the job
|
|
||||||
result = subprocess.run(["sbatch", slurmfile], capture_output=True, text=True)
|
|
||||||
|
|
||||||
# Check if the submission was successful
|
|
||||||
if result.returncode != 0:
|
|
||||||
print(f"Error submitting job: {result.stderr}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Get the job ID from the output
|
|
||||||
job_id = result.stdout.split()[-1]
|
|
||||||
|
|
||||||
return job_id
|
|
||||||
|
|
||||||
|
|
||||||
def convert_time_to_seconds(time_str):
|
|
||||||
"""
|
|
||||||
Convert a time string of the format D-HH:MM:SS or HH:MM:SS or MM:SS to seconds.
|
|
||||||
"""
|
|
||||||
time_parts = time_str.split("-")
|
|
||||||
if len(time_parts) == 2:
|
|
||||||
days = int(time_parts[0])
|
|
||||||
time_str = time_parts[1]
|
|
||||||
else:
|
|
||||||
days = 0
|
|
||||||
|
|
||||||
time_parts = time_str.split(":")
|
|
||||||
if len(time_parts) == 3:
|
|
||||||
hours, minutes, seconds = map(int, time_parts)
|
|
||||||
elif len(time_parts) == 2:
|
|
||||||
hours, minutes = map(int, time_parts)
|
|
||||||
seconds = 0
|
|
||||||
else:
|
|
||||||
raise ValueError("Invalid time format")
|
|
||||||
|
|
||||||
total_seconds = days * 86400 + hours * 3600 + minutes * 60 + seconds
|
|
||||||
return total_seconds
|
|
||||||
|
|
||||||
|
|
||||||
def convert_seconds_to_time(seconds):
|
|
||||||
"""
|
|
||||||
Convert seconds to a time string of the format D-HH:MM:SS.
|
|
||||||
"""
|
|
||||||
seconds = int(seconds)
|
|
||||||
if seconds < 0:
|
|
||||||
return "N/A"
|
|
||||||
days = seconds // 86400
|
|
||||||
seconds %= 86400
|
|
||||||
hours = seconds // 3600
|
|
||||||
seconds %= 3600
|
|
||||||
minutes = seconds // 60
|
|
||||||
seconds %= 60
|
|
||||||
|
|
||||||
if days > 0:
|
|
||||||
return f"{days}-{hours:02}:{minutes:02}:{seconds:02}"
|
|
||||||
if hours > 0:
|
|
||||||
return f"{hours:02}:{minutes:02}:{seconds:02}"
|
|
||||||
return f"{minutes:02}:{seconds:02}"
|
|
||||||
|
|
||||||
|
|
||||||
def check_job_status(job_id):
|
|
||||||
"""
|
|
||||||
Check the status of a job using the squeue command.
|
|
||||||
Returns the job status and running time.
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
job_id : str
|
|
||||||
Job ID of the job to check.
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
str
|
|
||||||
Job status. Possible values are 'R' (running), 'PD' (pending), 'X' (failed), 'CP' (completed).
|
|
||||||
int
|
|
||||||
Running time in seconds. -1 if the job is not found.
|
|
||||||
"""
|
|
||||||
# Check the job status
|
|
||||||
result = subprocess.run(["squeue", "-j", str(job_id)], capture_output=True, text=True)
|
|
||||||
|
|
||||||
# Check if the job is still running
|
|
||||||
if result.returncode == 1:
|
|
||||||
return "X", -1
|
|
||||||
|
|
||||||
if len(result.stdout.split("\n")[1].split()) == 0:
|
|
||||||
return "X", -1
|
|
||||||
|
|
||||||
status = result.stdout.split("\n")[1].split()[4]
|
|
||||||
job_time = convert_time_to_seconds(result.stdout.split("\n")[1].split()[5])
|
|
||||||
|
|
||||||
return status, job_time
|
|
||||||
|
|
||||||
|
|
||||||
def get_job_id(jobname):
|
|
||||||
"""
|
|
||||||
Get the job ID from the job name using the squeue command.
|
|
||||||
"""
|
|
||||||
# Check the job status
|
|
||||||
result = subprocess.run(["squeue", "-n", jobname], capture_output=True, text=True)
|
|
||||||
|
|
||||||
# Check if the job is still running
|
|
||||||
if result.returncode == 1:
|
|
||||||
return None
|
|
||||||
if len(result.stdout.split("\n")[1].split()) == 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Get the job ID
|
|
||||||
job_id = result.stdout.split("\n")[1].split()[0]
|
|
||||||
|
|
||||||
return job_id
|
|
||||||
|
|
||||||
|
|
||||||
def resubmit_job(slurmdir,slurmfile,job_ids_array,box,resubmit_count,error_count,MAX_RESUBMIT=10,MAX_ERRORS=10):
|
|
||||||
"""
|
|
||||||
Resubmit a job if it has failed.
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
slurmdir : str
|
|
||||||
Directory where the slurm scripts are saved.
|
|
||||||
slurmfile : str
|
|
||||||
Slurm script file.
|
|
||||||
job_ids_array : array
|
|
||||||
Array of job IDs for all previously submitted jobs. Indexed by box-1 number.
|
|
||||||
box : int
|
|
||||||
Box number of the job to resubmit.
|
|
||||||
resubmit_count : int
|
|
||||||
Number of resubmissions so far.
|
|
||||||
error_count : int
|
|
||||||
Number of errors so far.
|
|
||||||
MAX_RESUBMIT : int
|
|
||||||
Maximum number of resubmissions allowed.
|
|
||||||
MAX_ERRORS : int
|
|
||||||
Maximum number of errors allowed.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
int
|
|
||||||
Updated resubmit count.
|
|
||||||
int
|
|
||||||
Updated error count.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Resubmit the job
|
|
||||||
job_id = submit_slurm_job(slurmdir+slurmfile+"."+str(box))
|
|
||||||
|
|
||||||
# Check if the job was submitted successfully
|
|
||||||
if job_id is None:
|
|
||||||
print(f"Error resubmitting job for box {box}")
|
|
||||||
error_count+=1
|
|
||||||
# Check if the error count exceeds the maximum
|
|
||||||
if error_count >= MAX_ERRORS:
|
|
||||||
raise RuntimeError(f"Error count exceeded {MAX_ERRORS}. Stopping job submission.")
|
|
||||||
else:
|
|
||||||
job_ids_array[box-1] = int(job_id)
|
|
||||||
|
|
||||||
resubmit_count += 1
|
|
||||||
# Check if the resubmit count exceeds the maximum
|
|
||||||
if resubmit_count >= MAX_RESUBMIT:
|
|
||||||
raise RuntimeError(f"Resubmit count exceeded {MAX_RESUBMIT}. Stopping job submission.")
|
|
||||||
|
|
||||||
return resubmit_count, error_count
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def check_previous_jobs(workdir,slurmdir,slurmfile,tilefile,sleeptime,job_ids_array,box,resubmit_count,error_count,MAX_RESUBMIT=10,MAX_ERRORS=10):
|
|
||||||
"""
|
|
||||||
Get the status of all previously submitted jobs.
|
|
||||||
For each job, check if it is running, completed, or failed.
|
|
||||||
If the job is failed, resubmit it.
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
workdir : str
|
|
||||||
Directory where the tiles are saved.
|
|
||||||
slurmdir : str
|
|
||||||
Directory where the slurm scripts are saved.
|
|
||||||
slurmfile : str
|
|
||||||
Slurm script file.
|
|
||||||
tilefile : str
|
|
||||||
Tile file name.
|
|
||||||
sleeptime : float
|
|
||||||
Sleep time between each job submission (in s).
|
|
||||||
job_ids_array : array
|
|
||||||
Array of job IDs for all previously submitted jobs. Indexed by box-1 number.
|
|
||||||
box : int
|
|
||||||
Up to which box the job status is checked.
|
|
||||||
resubmit_count : int
|
|
||||||
Number of resubmissions so far.
|
|
||||||
error_count : int
|
|
||||||
Number of errors so far.
|
|
||||||
MAX_RESUBMIT : int
|
|
||||||
Maximum number of resubmissions allowed.
|
|
||||||
MAX_ERRORS : int
|
|
||||||
Maximum number of errors allowed.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
dict
|
|
||||||
Dictionary with the job status categories and their corresponding box numbers.
|
|
||||||
int
|
|
||||||
Updated resubmit count.
|
|
||||||
int
|
|
||||||
Updated error count.
|
|
||||||
"""
|
|
||||||
job_status_categories = {'R':[],'CP':[],'PD':[],'X':[]}
|
|
||||||
|
|
||||||
# Check the status of every previously submitted job
|
|
||||||
for prev_box in tqdm.tqdm(range(1,box), desc="Checking jobs", unit="boxes", leave=False, position=1):
|
|
||||||
# Check the job status
|
|
||||||
status, job_time = check_job_status(job_ids_array[prev_box-1])
|
|
||||||
# Add the job status to the dictionary
|
|
||||||
if status not in job_status_categories:
|
|
||||||
job_status_categories[status] = []
|
|
||||||
# If the status is 'X', check if the tile file was created
|
|
||||||
if status == 'X':
|
|
||||||
# Check if the tile file was created
|
|
||||||
if os.path.exists(workdir+f"{tilefile}{prev_box}.h5"):
|
|
||||||
job_status_categories['CP'].append(prev_box) # Classify as completed
|
|
||||||
else:
|
|
||||||
resubmit_count, error_count = resubmit_job(slurmdir,slurmfile,job_ids_array,prev_box,resubmit_count,error_count,MAX_RESUBMIT,MAX_ERRORS)
|
|
||||||
job_status_categories[status].append(prev_box) # Classify as failed
|
|
||||||
# Sleep for a while before resubmitting the next job
|
|
||||||
time.sleep(sleeptime)
|
|
||||||
# If the status is not 'X', record the job status
|
|
||||||
else:
|
|
||||||
job_status_categories[status].append(prev_box)
|
|
||||||
|
|
||||||
return job_status_categories, resubmit_count, error_count
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def cap_number_of_jobs(job_status_categories,job_ids_array, max_jobs, sleep_time):
|
|
||||||
"""
|
|
||||||
Cap the number of jobs to a maximum number.
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
job_status_categories : dict
|
|
||||||
Dictionary with the job status categories and their corresponding box numbers.
|
|
||||||
job_ids_array : array
|
|
||||||
Array of job IDs for all previously submitted jobs. Indexed by box-1 number.
|
|
||||||
max_jobs : int
|
|
||||||
Maximum number of jobs allowed.
|
|
||||||
sleep_time : float
|
|
||||||
Sleep time between each job submission (in s).
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
dict
|
|
||||||
Updated dictionary with the job status categories and their corresponding box numbers.
|
|
||||||
"""
|
|
||||||
discard_categories = ['CP', 'X'] # Completed and Failed
|
|
||||||
# Check the number of running /pending jobs
|
|
||||||
job_num = 0
|
|
||||||
for status in job_status_categories.keys():
|
|
||||||
if status not in discard_categories:
|
|
||||||
job_num += len(job_status_categories[status])
|
|
||||||
|
|
||||||
# We wait until the number of jobs is below the maximum
|
|
||||||
while job_num > max_jobs:
|
|
||||||
print(f"Number of open jobs: {job_num} > {max_jobs}. Waiting...")
|
|
||||||
for status in job_status_categories.keys():
|
|
||||||
if status not in discard_categories:
|
|
||||||
for box in job_status_categories[status]:
|
|
||||||
# Check the new job status
|
|
||||||
new_status, job_time = check_job_status(job_ids_array[box-1])
|
|
||||||
time.sleep(sleep_time)
|
|
||||||
if new_status in discard_categories:
|
|
||||||
job_num -= 1
|
|
||||||
job_status_categories[new_status].append(box) # WARNING: We do not reclassify 'X' jobs as 'CP'
|
|
||||||
job_status_categories[status].remove(box)
|
|
||||||
|
|
||||||
return job_status_categories
|
|
||||||
|
|
||||||
|
|
||||||
def print_summary_job_status(job_status_categories, box, resubmit_count, error_count):
|
|
||||||
print("---------------------------------------------------")
|
|
||||||
# Print summary of job statuses
|
|
||||||
print(f"Job statuses after box {box}:")
|
|
||||||
# Print a table with columns for each status and below the % of jobs in that status
|
|
||||||
row0 = f"{'Status':<14}"
|
|
||||||
for status in job_status_categories.keys():
|
|
||||||
row0 += f"{status:>9} "
|
|
||||||
print(row0)
|
|
||||||
row1 = f"{'Percentage':<14}"
|
|
||||||
for status in job_status_categories.keys():
|
|
||||||
row1 += f"{len(job_status_categories[status])/box*100:>9.1f}%"
|
|
||||||
print(row1)
|
|
||||||
# Print the rate of resubmissions
|
|
||||||
print(f"Resubmission rate: {resubmit_count/box*100:.2f}%")
|
|
||||||
print(f"Error count: {error_count}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def scola_submit(directory,
|
|
||||||
slurmdir=None,
|
|
||||||
workdir=None,
|
|
||||||
slurmfile="scola_sCOLA.sh",
|
|
||||||
tilefile="scola_tile",
|
|
||||||
jobname="sCOLA_",
|
|
||||||
N_tiles=4,
|
|
||||||
sleep=1.5,
|
|
||||||
force=False,
|
|
||||||
MAX_ERRORS=10,
|
|
||||||
MAX_RESUBMIT=10,
|
|
||||||
MAX_JOBS_AT_ONCE=48,
|
|
||||||
CHECK_EVERY=100):
|
|
||||||
|
|
||||||
if slurmdir is None:
|
|
||||||
slurmdir = directory + "slurm_scripts/"
|
|
||||||
if workdir is None:
|
|
||||||
workdir = directory + "work/"
|
|
||||||
|
|
||||||
|
|
||||||
# Check that the slurm file exists
|
|
||||||
if not os.path.exists(slurmdir+slurmfile):
|
|
||||||
raise FileNotFoundError(f"Slurm file {slurmdir+slurmfile} does not exist.")
|
|
||||||
# Check that the work directory exists
|
|
||||||
if not os.path.exists(workdir):
|
|
||||||
raise FileNotFoundError(f"Work directory {workdir} does not exist.")
|
|
||||||
|
|
||||||
# If force, remove all pre-existing tile files
|
|
||||||
if force:
|
|
||||||
count_removed = 0
|
|
||||||
for box in range(1,N_tiles**3+1):
|
|
||||||
if os.path.exists(workdir+f"{tilefile}{box}.h5"):
|
|
||||||
os.remove(workdir+f"{tilefile}{box}.h5")
|
|
||||||
count_removed += 1
|
|
||||||
print(f"Removed {count_removed} ({100*count_removed/N_tiles**3:.1f}%) pre-existing tile files.")
|
|
||||||
|
|
||||||
# MAX_ERRORS = 10
|
|
||||||
if MAX_RESUBMIT is None:
|
|
||||||
MAX_RESUBMIT = int(0.1*N_tiles**3) # 10% of the total number of jobs
|
|
||||||
# MAX_JOBS_AT_ONCE = int(3*128/8) # 3 nodes with 128 cores each, 8 jobs per core
|
|
||||||
# CHECK_EVERY = 100
|
|
||||||
|
|
||||||
error_count = 0
|
|
||||||
resubmit_count = 0
|
|
||||||
counter_for_checks = 0
|
|
||||||
|
|
||||||
job_ids_array = np.zeros((N_tiles**3,), dtype=int)
|
|
||||||
|
|
||||||
|
|
||||||
print("---------------------------------------------------")
|
|
||||||
print("Starting job submission for sCOLA tiles with the following parameters:")
|
|
||||||
print(f"Directory: {directory}")
|
|
||||||
print(f"Slurm file: {slurmdir}{slurmfile}")
|
|
||||||
print(f"Work directory: {workdir}")
|
|
||||||
print(f"Number of tiles: {N_tiles**3} tiles")
|
|
||||||
print(f"Sleep time: {sleep} s")
|
|
||||||
print(f"Max errors: {MAX_ERRORS} errors")
|
|
||||||
print(f"Max resubmits: {MAX_RESUBMIT} resubmits")
|
|
||||||
print(f"Max jobs at once: {MAX_JOBS_AT_ONCE} jobs")
|
|
||||||
print(f"Check every: {CHECK_EVERY} jobs")
|
|
||||||
print("---------------------------------------------------")
|
|
||||||
print(f"ETA: {convert_seconds_to_time(N_tiles**3*sleep*1.2)}")
|
|
||||||
print("Starting job submission...")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
for box in tqdm.tqdm(range(1,N_tiles**3+1), desc="Submitting jobs", unit="boxes"):
|
|
||||||
|
|
||||||
# Check if the tile file already exists
|
|
||||||
if os.path.exists(workdir+f"{tilefile}{box}.h5"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if the slurm job is already running
|
|
||||||
job_id = get_job_id(f"{jobname}{box}")
|
|
||||||
if job_id is not None:
|
|
||||||
job_ids_array[box-1] = int(job_id)
|
|
||||||
time.sleep(sleep)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create the slurm script for the box
|
|
||||||
create_scola_slurm_script(slurmdir+slurmfile, str(box))
|
|
||||||
|
|
||||||
# Submit the job
|
|
||||||
job_id = submit_slurm_job(slurmdir+slurmfile+"."+str(box))
|
|
||||||
|
|
||||||
# Check if the job was submitted successfully
|
|
||||||
if job_id is None:
|
|
||||||
print(f"Error submitting job for box {box}")
|
|
||||||
error_count+=1
|
|
||||||
else:
|
|
||||||
job_ids_array[box-1] = int(job_id)
|
|
||||||
|
|
||||||
# Sleep for a while before submitting the next job
|
|
||||||
time.sleep(sleep)
|
|
||||||
|
|
||||||
counter_for_checks += 1
|
|
||||||
|
|
||||||
# Check if the error count exceeds the maximum
|
|
||||||
if error_count >= MAX_ERRORS:
|
|
||||||
raise RuntimeError(f"Error count exceeded {MAX_ERRORS}. Stopping job submission.")
|
|
||||||
|
|
||||||
# Check the job status every CHECK_EVERY jobs
|
|
||||||
if counter_for_checks >= CHECK_EVERY:
|
|
||||||
|
|
||||||
counter_for_checks = 0
|
|
||||||
|
|
||||||
job_status_categories, resubmit_count, error_count = check_previous_jobs(workdir,slurmdir,slurmfile,tilefile,sleep,job_ids_array,box,resubmit_count,error_count,MAX_RESUBMIT,MAX_ERRORS)
|
|
||||||
print_summary_job_status(job_status_categories, box, resubmit_count, error_count)
|
|
||||||
job_status_categories = cap_number_of_jobs(job_status_categories,job_ids_array,MAX_JOBS_AT_ONCE,sleep)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
print("All jobs submitted. Now checking the status of the jobs.")
|
|
||||||
|
|
||||||
|
|
||||||
job_status_categories, resubmit_count, error_count = check_previous_jobs(workdir,slurmdir,slurmfile,tilefile,sleep,job_ids_array,N_tiles**3+1,resubmit_count,error_count,MAX_RESUBMIT,MAX_ERRORS)
|
|
||||||
print_summary_job_status(job_status_categories, box, resubmit_count, error_count)
|
|
||||||
# Now wait for all jobs to finish
|
|
||||||
while len(job_status_categories['CP'])<N_tiles**3:
|
|
||||||
time.sleep(10*sleep)
|
|
||||||
job_status_categories, resubmit_count, error_count = check_previous_jobs(workdir,slurmdir,slurmfile,tilefile,sleep,job_ids_array,N_tiles**3+1,resubmit_count,error_count,MAX_RESUBMIT,MAX_ERRORS)
|
|
||||||
print_summary_job_status(job_status_categories, N_tiles**3, resubmit_count, error_count)
|
|
||||||
job_status_categories = cap_number_of_jobs(job_status_categories,job_ids_array,MAX_JOBS_AT_ONCE,sleep)
|
|
||||||
|
|
||||||
|
|
||||||
print("All jobs finished.")
|
|
||||||
# Remove the slurm scripts
|
|
||||||
for box in range(1,N_tiles**3+1):
|
|
||||||
if os.path.exists(slurmdir+slurmfile+"."+str(box)):
|
|
||||||
os.remove(slurmdir+slurmfile+"."+str(box))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def console_main():
|
|
||||||
parser = argparse.ArgumentParser(description="Submit slurm jobs for sCOLA tiles.")
|
|
||||||
|
|
||||||
parser.add_argument("-d", "--directory", type=str, default="./", help="Main directory where the output will be saved (if other dir and filenames are not specified).")
|
|
||||||
parser.add_argument("-sd", "--slurmdir", type=str, default=None, help="Directory where the slurm scripts are saved (default is -d/slurm_scripts).")
|
|
||||||
parser.add_argument("-wd", "--workdir", type=str, default=None, help="Directory where the tiles are saved (default is -d/work).")
|
|
||||||
|
|
||||||
parser.add_argument("-sf","--slurmfile", type=str, default="scola_sCOLA.sh", help="Slurm script file (located in slurmdir, default is scola_sCOLA.sh).")
|
|
||||||
parser.add_argument("-tf","--tilefile", type=str, default="scola_tile", help="Tile file name (located in workdir, default is scola_tile).")
|
|
||||||
parser.add_argument("--jobname", type=str, default="sCOLA_", help="Job name for the slurm jobs (default is sCOLA_).")
|
|
||||||
|
|
||||||
parser.add_argument("-Nt","--N_tiles", type=int, default=4, help="Number of tiles per dimension.")
|
|
||||||
|
|
||||||
parser.add_argument("--sleep", type=float, default=1.5, help="Sleep time between each job submission (in s).")
|
|
||||||
|
|
||||||
parser.add_argument("-F","--force", action="store_true", help="Force to resimulate all tiles, even if they already exist.")
|
|
||||||
|
|
||||||
args=parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
scola_submit(args.directory,
|
|
||||||
slurmdir=args.slurmdir,
|
|
||||||
workdir=args.workdir,
|
|
||||||
slurmfile=args.slurmfile,
|
|
||||||
tilefile=args.tilefile,
|
|
||||||
jobname=args.jobname,
|
|
||||||
N_tiles=args.N_tiles,
|
|
||||||
sleep=args.sleep,
|
|
||||||
force=args.force)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
console_main()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
|
|
||||||
|
|
||||||
def main_scola(parsed_args):
|
def main_scola(parsed_args):
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
from .low_level import print_starting_module, print_message, print_ending_module, progress_bar_from_logfile, wait_until_file_exists
|
from low_level import print_starting_module, print_message, print_ending_module, progress_bar_from_logfile, wait_until_file_exists
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
@ -21,7 +19,7 @@ def main_scola(parsed_args):
|
||||||
return
|
return
|
||||||
|
|
||||||
if parsed_args.execution == "local":
|
if parsed_args.execution == "local":
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
print_message("Running sCOLA in local mode.", 1, "scola", verbose=parsed_args.verbose)
|
print_message("Running sCOLA in local mode.", 1, "scola", verbose=parsed_args.verbose)
|
||||||
|
@ -52,16 +50,14 @@ def main_scola(parsed_args):
|
||||||
print_message("sCOLA finished.", 1, "scola", verbose=parsed_args.verbose)
|
print_message("sCOLA finished.", 1, "scola", verbose=parsed_args.verbose)
|
||||||
|
|
||||||
elif parsed_args.execution == "slurm":
|
elif parsed_args.execution == "slurm":
|
||||||
from .slurm_submission import create_slurm_script, parse_arguments_slurm, limit_slurm_arrays
|
from slurm_submission import create_slurm_script, parse_arguments_slurm
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
from .parameters_card import parse_arguments_card
|
|
||||||
import os
|
import os
|
||||||
print_message("Running scola in slurm mode.", 1, "scola", verbose=parsed_args.verbose)
|
print_message("Running scola in slurm mode.", 1, "scola", verbose=parsed_args.verbose)
|
||||||
slurm_dict=parse_arguments_slurm(parsed_args)
|
slurm_dict=parse_arguments_slurm(parsed_args)
|
||||||
main_dict=parse_arguments_main(parsed_args)
|
main_dict=parse_arguments_main(parsed_args)
|
||||||
card_dict=parse_arguments_card(parsed_args)
|
|
||||||
|
|
||||||
if (have_no_tiles(parsed_args) or parsed_args.force) and card_dict["N_tiles"]**3 < limit_slurm_arrays :
|
if have_no_tiles(parsed_args) or parsed_args.force:
|
||||||
## Submit all boxes
|
## Submit all boxes
|
||||||
print_message("Submitting all boxes.", 2, "scola", verbose=parsed_args.verbose)
|
print_message("Submitting all boxes.", 2, "scola", verbose=parsed_args.verbose)
|
||||||
slurm_script = slurm_dict["scripts"]+"scola_"+main_dict["simname"]+".sh"
|
slurm_script = slurm_dict["scripts"]+"scola_"+main_dict["simname"]+".sh"
|
||||||
|
@ -97,7 +93,6 @@ def main_scola(parsed_args):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
## Submit missing boxes
|
## Submit missing boxes
|
||||||
from time import sleep
|
|
||||||
missing_tiles_arrays = get_missing_tiles_arrays(parsed_args)
|
missing_tiles_arrays = get_missing_tiles_arrays(parsed_args)
|
||||||
print_message(f"Submitting missing boxes: {missing_tiles_arrays}.", 2, "scola", verbose=parsed_args.verbose)
|
print_message(f"Submitting missing boxes: {missing_tiles_arrays}.", 2, "scola", verbose=parsed_args.verbose)
|
||||||
|
|
||||||
|
@ -132,7 +127,6 @@ def main_scola(parsed_args):
|
||||||
subprocess.run(command_args)
|
subprocess.run(command_args)
|
||||||
|
|
||||||
print_message("sCOLA job submitted.", 2, "scola", verbose=parsed_args.verbose)
|
print_message("sCOLA job submitted.", 2, "scola", verbose=parsed_args.verbose)
|
||||||
sleep((missing_tiles[1]-missing_tiles[0])*1.0) # Sleep for a bit to avoid overloading the scheduler
|
|
||||||
|
|
||||||
os.remove(slurm_script) # Remove the script after submission (because it is specific to the missing tiles)
|
os.remove(slurm_script) # Remove the script after submission (because it is specific to the missing tiles)
|
||||||
|
|
||||||
|
@ -148,15 +142,15 @@ def main_pre_scola(parsed_args):
|
||||||
If they already exist, it does nothing. If all tiles exist, raises error.
|
If they already exist, it does nothing. If all tiles exist, raises error.
|
||||||
"""
|
"""
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .low_level import print_starting_module, print_message, print_ending_module
|
from low_level import print_starting_module, print_message, print_ending_module
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
print_starting_module("pre-scola", verbose=parsed_args.verbose)
|
print_starting_module("pre-scola", verbose=parsed_args.verbose)
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
|
|
||||||
if not isfile(card_dict["OutputLPTPotential1"]) or not isfile(card_dict["OutputLPTPotential2"]) or parsed_args.force:
|
if not isfile(card_dict["OutputLPTPotential1"]) or not isfile(card_dict["OutputLPTPotential2"]) or parsed_args.force:
|
||||||
if not have_all_tiles(parsed_args):
|
if not have_all_tiles(parsed_args):
|
||||||
print_message("Running pre-scola.", 1, "pre-scola", verbose=parsed_args.verbose)
|
print_message("Running pre-scola.", 1, "pre-scola", verbose=parsed_args.verbose)
|
||||||
from .simbelmyne import main_simbelmyne
|
from simbelmyne import main_simbelmyne
|
||||||
main_simbelmyne(parsed_args)
|
main_simbelmyne(parsed_args)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError("All tiles exists, so calling simbelmyne would generate the final output instead of the LPT potentials.")
|
raise NotImplementedError("All tiles exists, so calling simbelmyne would generate the final output instead of the LPT potentials.")
|
||||||
|
@ -172,8 +166,8 @@ def main_post_scola(parsed_args):
|
||||||
If the output already exists, it does nothing. If tiles are missing, print the missing tiles.
|
If the output already exists, it does nothing. If tiles are missing, print the missing tiles.
|
||||||
"""
|
"""
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .low_level import print_starting_module, print_message, print_ending_module
|
from low_level import print_starting_module, print_message, print_ending_module
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
print_starting_module("post-scola", verbose=parsed_args.verbose)
|
print_starting_module("post-scola", verbose=parsed_args.verbose)
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
|
|
||||||
|
@ -186,7 +180,7 @@ def main_post_scola(parsed_args):
|
||||||
if (card_dict["WriteFinalDensity"] and not isfile(card_dict["OutputFinalDensity"])) or (card_dict["WriteFinalSnapshot"] and not isfile(card_dict["OutputFinalSnapshot"])) or parsed_args.force:
|
if (card_dict["WriteFinalDensity"] and not isfile(card_dict["OutputFinalDensity"])) or (card_dict["WriteFinalSnapshot"] and not isfile(card_dict["OutputFinalSnapshot"])) or parsed_args.force:
|
||||||
if have_all_tiles(parsed_args):
|
if have_all_tiles(parsed_args):
|
||||||
print_message("Running post-scola.", 1, "post-scola", verbose=parsed_args.verbose)
|
print_message("Running post-scola.", 1, "post-scola", verbose=parsed_args.verbose)
|
||||||
from .simbelmyne import main_simbelmyne
|
from simbelmyne import main_simbelmyne
|
||||||
main_simbelmyne(parsed_args)
|
main_simbelmyne(parsed_args)
|
||||||
else:
|
else:
|
||||||
missing_tiles_arrays = get_missing_tiles_arrays(parsed_args)
|
missing_tiles_arrays = get_missing_tiles_arrays(parsed_args)
|
||||||
|
@ -203,7 +197,7 @@ def main_post_scola(parsed_args):
|
||||||
|
|
||||||
def have_all_tiles(parsed_args):
|
def have_all_tiles(parsed_args):
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
|
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
nboxes_tot = int(parsed_args.N_tiles**3)
|
nboxes_tot = int(parsed_args.N_tiles**3)
|
||||||
|
@ -216,7 +210,7 @@ def have_all_tiles(parsed_args):
|
||||||
|
|
||||||
def have_no_tiles(parsed_args):
|
def have_no_tiles(parsed_args):
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
|
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
nboxes_tot = int(parsed_args.N_tiles**3)
|
nboxes_tot = int(parsed_args.N_tiles**3)
|
||||||
|
@ -229,24 +223,17 @@ def have_no_tiles(parsed_args):
|
||||||
|
|
||||||
def get_missing_tiles_arrays(parsed_args):
|
def get_missing_tiles_arrays(parsed_args):
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from .parameters_card import parse_arguments_card
|
from parameters_card import parse_arguments_card
|
||||||
from .slurm_submission import limit_slurm_arrays
|
|
||||||
|
|
||||||
card_dict = parse_arguments_card(parsed_args)
|
card_dict = parse_arguments_card(parsed_args)
|
||||||
nboxes_tot = int(parsed_args.N_tiles**3)
|
nboxes_tot = int(parsed_args.N_tiles**3)
|
||||||
missing_tiles_arrays = []
|
missing_tiles_arrays = []
|
||||||
in_sequence_of_missing = False
|
in_sequence_of_missing = False
|
||||||
len_sequence_of_missing = 0
|
|
||||||
for b in range(1,nboxes_tot+1):
|
for b in range(1,nboxes_tot+1):
|
||||||
if not isfile(card_dict["OutputTilesBase"]+str(b)+".h5"):
|
if not isfile(card_dict["OutputTilesBase"]+str(b)+".h5"):
|
||||||
len_sequence_of_missing += 1
|
|
||||||
if not in_sequence_of_missing:
|
if not in_sequence_of_missing:
|
||||||
missing_tiles_arrays.append([b])
|
missing_tiles_arrays.append([b])
|
||||||
in_sequence_of_missing = True
|
in_sequence_of_missing = True
|
||||||
len_sequence_of_missing = 1
|
|
||||||
if b%limit_slurm_arrays==limit_slurm_arrays-1:
|
|
||||||
missing_tiles_arrays[-1].append(b)
|
|
||||||
in_sequence_of_missing = False
|
|
||||||
elif in_sequence_of_missing:
|
elif in_sequence_of_missing:
|
||||||
missing_tiles_arrays[-1].append(b-1)
|
missing_tiles_arrays[-1].append(b-1)
|
||||||
in_sequence_of_missing = False
|
in_sequence_of_missing = False
|
||||||
|
@ -260,13 +247,13 @@ def get_missing_tiles_arrays(parsed_args):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .timestepping import register_arguments_timestepping, main_timestepping
|
from timestepping import register_arguments_timestepping, main_timestepping
|
||||||
from .parameters_card import register_arguments_card, main_parameter_card
|
from parameters_card import register_arguments_card, main_parameter_card
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
from .parameters_monofonic import register_arguments_monofonic
|
from parameters_monofonic import register_arguments_monofonic
|
||||||
from .slurm_submission import register_arguments_slurm
|
from slurm_submission import register_arguments_slurm
|
||||||
from .low_level import wait_until_file_exists
|
from low_level import wait_until_file_exists
|
||||||
|
|
||||||
parser = ArgumentParser(description="Run sCOLA.")
|
parser = ArgumentParser(description="Run sCOLA.")
|
||||||
register_arguments_main(parser)
|
register_arguments_main(parser)
|
42
setup.py
42
setup.py
|
@ -1,42 +0,0 @@
|
||||||
from setuptools import setup, find_packages
|
|
||||||
setup(
|
|
||||||
name='sbmy_control',
|
|
||||||
version='0.1.0',
|
|
||||||
author='Mayeul Aubin',
|
|
||||||
author_email='mayeul.aubin@iap.fr',
|
|
||||||
description='Simbelmyne control package',
|
|
||||||
long_description='This package provides control functionalities for Simbelmyne. It allows to create automatically all the required files and scripts to run a N-body simulation with Simbelmyne, using monofonIC for initial conditions. The subpackage `analysis` provides tools to analyze the results of the simulation (slices and power spectra), while the subpackage `scripts` includes tools to handle snapshots, tiles and density fields.',
|
|
||||||
long_description_content_type='text/markdown',
|
|
||||||
url='https://git.aquila-consortium.org/maubin/sbmy_control',
|
|
||||||
packages=find_packages(),
|
|
||||||
install_requires=[
|
|
||||||
'pysbmy',
|
|
||||||
'numpy',
|
|
||||||
'matplotlib',
|
|
||||||
'h5py',
|
|
||||||
'tqdm',
|
|
||||||
],
|
|
||||||
license='GPL-3.0',
|
|
||||||
classifiers=[
|
|
||||||
'Development Status :: 3 - Alpha',
|
|
||||||
'Intended Audience :: Science/Research',
|
|
||||||
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
|
|
||||||
'Programming Language :: Python :: 3',
|
|
||||||
'Programming Language :: Python :: 3.8',
|
|
||||||
'Programming Language :: Python :: 3.9',
|
|
||||||
'Programming Language :: Python :: 3.10',
|
|
||||||
'Programming Language :: Python :: 3.11',
|
|
||||||
],
|
|
||||||
python_requires='>=3.8',
|
|
||||||
entry_points={
|
|
||||||
'console_scripts': [
|
|
||||||
'sbmy_control=sbmy_control.main:console_main',
|
|
||||||
'slices=sbmy_control.analysis.slices:console_main',
|
|
||||||
'power_spectrum=sbmy_control.analysis.power_spectrum:console_main',
|
|
||||||
'field_to_field=sbmy_control.scripts.field_to_field:console_main',
|
|
||||||
'gather_tiles=sbmy_control.scripts.gather_tiles:console_main',
|
|
||||||
'convert_snapshot_to_density=sbmy_control.scripts.convert_snapshot_to_density:console_main',
|
|
||||||
'scola_submit=sbmy_control.scripts.scola_submit:console_main',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
|
@ -1,6 +1,6 @@
|
||||||
def main_simbelmyne(parsed_args):
|
def main_simbelmyne(parsed_args):
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
from .low_level import print_starting_module, print_message, print_ending_module, progress_bar_from_logfile
|
from low_level import print_starting_module, print_message, print_ending_module, progress_bar_from_logfile
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
@ -30,8 +30,8 @@ def main_simbelmyne(parsed_args):
|
||||||
print_message("Simbelyne finished.", 1, "simbelmyne", verbose=parsed_args.verbose)
|
print_message("Simbelyne finished.", 1, "simbelmyne", verbose=parsed_args.verbose)
|
||||||
|
|
||||||
elif parsed_args.execution == "slurm":
|
elif parsed_args.execution == "slurm":
|
||||||
from .slurm_submission import create_slurm_script, parse_arguments_slurm
|
from slurm_submission import create_slurm_script, parse_arguments_slurm
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
print_message("Running simbelmyne in slurm mode.", 1, "simbelmyne", verbose=parsed_args.verbose)
|
print_message("Running simbelmyne in slurm mode.", 1, "simbelmyne", verbose=parsed_args.verbose)
|
||||||
slurm_dict=parse_arguments_slurm(parsed_args)
|
slurm_dict=parse_arguments_slurm(parsed_args)
|
||||||
main_dict=parse_arguments_main(parsed_args)
|
main_dict=parse_arguments_main(parsed_args)
|
||||||
|
@ -76,12 +76,12 @@ def main_simbelmyne(parsed_args):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .timestepping import register_arguments_timestepping, main_timestepping
|
from timestepping import register_arguments_timestepping, main_timestepping
|
||||||
from .parameters_card import register_arguments_card, main_parameter_card
|
from parameters_card import register_arguments_card, main_parameter_card
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
from .parameters_simbelmyne import register_arguments_simbelmyne
|
from parameters_simbelmyne import register_arguments_simbelmyne
|
||||||
from .slurm_submission import register_arguments_slurm
|
from slurm_submission import register_arguments_slurm
|
||||||
|
|
||||||
parser = ArgumentParser(description="Run Simbelmyne.")
|
parser = ArgumentParser(description="Run Simbelmyne.")
|
||||||
register_arguments_main(parser)
|
register_arguments_main(parser)
|
|
@ -1,8 +1,7 @@
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from .args_main import parse_arguments_main
|
from args_main import parse_arguments_main
|
||||||
|
|
||||||
path_to_monofonic_binary = "/home/aubin/monofonic/build/monofonIC"
|
path_to_monofonic_binary = "/home/aubin/monofonic/build/monofonIC"
|
||||||
limit_slurm_arrays=799
|
|
||||||
|
|
||||||
def register_arguments_slurm(parser:ArgumentParser):
|
def register_arguments_slurm(parser:ArgumentParser):
|
||||||
"""
|
"""
|
||||||
|
@ -134,15 +133,11 @@ def create_slurm_script(slurm_template:str,
|
||||||
- simbelmyne
|
- simbelmyne
|
||||||
- scola
|
- scola
|
||||||
"""
|
"""
|
||||||
index_sub=0
|
|
||||||
|
|
||||||
if array is not None and job != "scola":
|
if array is not None and job != "scola":
|
||||||
raise ValueError(f"Array job range provided for job type {job}.")
|
raise ValueError(f"Array job range provided for job type {job}.")
|
||||||
if array is None and job == "scola":
|
if array is None and job == "scola":
|
||||||
raise ValueError(f"Array job range not provided for job type {job}.")
|
raise ValueError(f"Array job range not provided for job type {job}.")
|
||||||
elif job == "scola":
|
|
||||||
index_sub=array[0]//limit_slurm_arrays
|
|
||||||
array=(array[0]%limit_slurm_arrays, array[1]%limit_slurm_arrays)
|
|
||||||
|
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
if not isfile(slurm_template):
|
if not isfile(slurm_template):
|
||||||
|
@ -160,7 +155,7 @@ def create_slurm_script(slurm_template:str,
|
||||||
case "simbelmyne":
|
case "simbelmyne":
|
||||||
command_line = f"{job} {job_config_file} {job_log}"
|
command_line = f"{job} {job_config_file} {job_log}"
|
||||||
case "scola":
|
case "scola":
|
||||||
command_line = f"{job} {job_config_file} {job_log} "+f"-b $(({index_sub*limit_slurm_arrays} + $SLURM_ARRAY_TASK_ID))"
|
command_line = f"{job} {job_config_file} {job_log} "+"-b ${SLURM_ARRAY_TASK_ID}"
|
||||||
case _:
|
case _:
|
||||||
raise ValueError(f"Job type {job} not recognized.")
|
raise ValueError(f"Job type {job} not recognized.")
|
||||||
|
|
||||||
|
@ -168,7 +163,7 @@ def create_slurm_script(slurm_template:str,
|
||||||
with open(slurm_script, "w") as f:
|
with open(slurm_script, "w") as f:
|
||||||
for line in template:
|
for line in template:
|
||||||
if job_name is not None and "--job-name" in line:
|
if job_name is not None and "--job-name" in line:
|
||||||
line = f"#SBATCH --job-name={index_sub if index_sub!=0 else ''}{job_name}\n"
|
line = f"#SBATCH --job-name={job_name}\n"
|
||||||
if array is not None and "--array" in line:
|
if array is not None and "--array" in line:
|
||||||
line = f"#SBATCH --array={array[0]}-{array[1]}\n"
|
line = f"#SBATCH --array={array[0]}-{array[1]}\n"
|
||||||
if array is not None and ("--output" in line or "--error" in line):
|
if array is not None and ("--output" in line or "--error" in line):
|
|
@ -18,8 +18,8 @@ def parse_arguments_timestepping(parsed_args):
|
||||||
"""
|
"""
|
||||||
Parse the arguments for the timestepping.
|
Parse the arguments for the timestepping.
|
||||||
"""
|
"""
|
||||||
from .parameters_card import parse_arguments_card_for_timestepping
|
from parameters_card import parse_arguments_card_for_timestepping
|
||||||
from .cosmo_params import parse_arguments_cosmo, z2a
|
from cosmo_params import parse_arguments_cosmo, z2a
|
||||||
|
|
||||||
card_dict = parse_arguments_card_for_timestepping(parsed_args)
|
card_dict = parse_arguments_card_for_timestepping(parsed_args)
|
||||||
cosmo_dict = parse_arguments_cosmo(parsed_args)
|
cosmo_dict = parse_arguments_cosmo(parsed_args)
|
||||||
|
@ -79,7 +79,7 @@ def create_timestepping(timestepping_dict, ts_filename:str, verbose:int=1):
|
||||||
TS = StandardTimeStepping(**timestepping_dict)
|
TS = StandardTimeStepping(**timestepping_dict)
|
||||||
if verbose < 2:
|
if verbose < 2:
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from .low_level import stdout_redirector, stderr_redirector
|
from low_level import stdout_redirector, stderr_redirector
|
||||||
f = BytesIO()
|
f = BytesIO()
|
||||||
g = BytesIO()
|
g = BytesIO()
|
||||||
with stdout_redirector(f):
|
with stdout_redirector(f):
|
||||||
|
@ -95,7 +95,7 @@ def main_timestepping(parsed_args):
|
||||||
"""
|
"""
|
||||||
Main function for the timestepping.
|
Main function for the timestepping.
|
||||||
"""
|
"""
|
||||||
from .low_level import print_message, print_ending_module, print_starting_module
|
from low_level import print_message, print_ending_module, print_starting_module
|
||||||
|
|
||||||
print_starting_module("timestepping", verbose=parsed_args.verbose)
|
print_starting_module("timestepping", verbose=parsed_args.verbose)
|
||||||
print_message("Parsing arguments for the timestepping file.", 1, "timestepping", verbose=parsed_args.verbose)
|
print_message("Parsing arguments for the timestepping file.", 1, "timestepping", verbose=parsed_args.verbose)
|
||||||
|
@ -110,9 +110,9 @@ def main_timestepping(parsed_args):
|
||||||
return timestepping_dict
|
return timestepping_dict
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from .args_main import register_arguments_main
|
from args_main import register_arguments_main
|
||||||
from .parameters_card import register_arguments_card_for_timestepping
|
from parameters_card import register_arguments_card_for_timestepping
|
||||||
from .cosmo_params import register_arguments_cosmo
|
from cosmo_params import register_arguments_cosmo
|
||||||
|
|
||||||
parser = ArgumentParser(description="Create timestepping file.")
|
parser = ArgumentParser(description="Create timestepping file.")
|
||||||
# TODO: reduce the volume of arguments
|
# TODO: reduce the volume of arguments
|
Loading…
Add table
Add a link
Reference in a new issue