import aquila_borg as borg import configparser import numpy as np import matplotlib.pyplot as plt import matplotlib import itertools import h5py as h5 import os import sys import contextlib from tqdm import tqdm sys.path.insert(0, '../mgborg_emulator/') import likelihood import forwards import utils run_test = True # run_test = False epsilon = 1e-2 # Create a context manager to suppress stdout @contextlib.contextmanager def suppress_stdout(): with open(os.devnull, 'w') as devnull: old_stdout = sys.stdout sys.stdout = devnull try: yield finally: sys.stdout = old_stdout def compare_gradients( ag_lh_auto_real: np.ndarray, ag_lh_auto_imag: np.ndarray, ag_lh_num_real: np.ndarray, ag_lh_num_imag: np.ndarray, plot_step: int, filename: str="gradients.png", ) -> None: """ Comparison of an autodiff adjoint gradient of the likelihood against a numerical one evaluated with finite differences. Args: - ag_lh_auto_real (np.ndarray): Real part of the adjoint gradient (autodiff) - ag_lh_auto_imag (np.ndarray): Imaginary part of the adjoint gradient (autodiff) - ag_lh_num_real (np.ndarray): Real part of the adjoint gradient (numerical) - ag_lh_num_imag (np.ndarray): Imaginary part of the adjoint gradient (numerical) - plot_step (int): How frequently to sample the arrays - filename (str): Name of the file to save the figure """ # Plot colors colors = { "red": "#ba3d3b", "blue": "#3d5792", } fig, axs = plt.subplots(2, 2, figsize=(10, 7)) # Real part axs[0,0].axhline(0.0, color="black", linestyle=":") axs[0,0].plot(ag_lh_num_real[::plot_step], c=colors["blue"], label="Finite differences") axs[0,0].plot(ag_lh_auto_real[::plot_step], "o", c=colors["red"], ms=3, label="Autodiff") axs[0,0].yaxis.get_major_formatter().set_powerlimits((-2, 2)) axs[0,0].set_ylabel("Real part") axs[0,0].legend() axs[0,1].plot(ag_lh_num_real[::plot_step], ag_lh_auto_real[::plot_step] - ag_lh_num_real[::plot_step], "o", c=colors["red"], ms=3 ) x = axs[0,1].get_xlim() axs[0,1].axhline(y=0, color='k') axs[0,1].set_xlabel("Numerical") axs[0,1].set_ylabel("Autodiff - Numerical (real)") # Imaginary part axs[1,0].axhline(0.0, color="black", linestyle=":") axs[1,0].plot(ag_lh_num_imag[::plot_step][4:], c=colors["blue"], label="Finite differences") axs[1,0].plot(ag_lh_auto_imag[::plot_step][4:], "o", c=colors["red"], ms=3, label="Autodiff") axs[1,0].yaxis.get_major_formatter().set_powerlimits((-2, 2)) axs[1,0].set_ylabel("Imaginary part") axs[1,0].set_xlabel("Voxel ID") axs[1,1].plot(ag_lh_num_imag[::plot_step], ag_lh_auto_imag[::plot_step] - ag_lh_num_imag[::plot_step], ".", c=colors["red"] ) x = axs[1,1].get_xlim() axs[1,1].axhline(y=0, color='k') axs[1,1].set_xlabel("Numerical") axs[1,1].set_ylabel("Autodiff - Numerical (imag)") fig.suptitle("Adjoint gradient of the likelihood w.r.t. initial conditions") fig.tight_layout() fig.subplots_adjust(hspace=0.) path = "../figs/" fig.savefig(path + filename, bbox_inches="tight") ini_file = '../conf/basic_ini.ini' # Input box box_in = borg.forward.BoxModel() config = configparser.ConfigParser() config.read(ini_file) box_in.L = (float(config['system']['L0']), float(config['system']['L1']), float(config['system']['L2'])) box_in.N = (int(config['system']['N0']), int(config['system']['N1']), int(config['system']['N2'])) box_in.xmin = (float(config['system']['corner0']), float(config['system']['corner1']), float(config['system']['corner2'])) # Setup BORG forward model and likelihood model = likelihood.build_gravity_model(None, box_in, ini_file=ini_file) cosmo = utils.get_cosmopar(ini_file) model.setCosmoParams(cosmo) fwd_param = forwards.NullForward(box_in) mylike = likelihood.GaussianLikelihood(model, fwd_param, ini_file) # Create mock data state = borg.likelihood.MarkovState() mylike.initializeLikelihood(state) mylike.updateCosmology(cosmo) s_hat = np.fft.rfftn(np.random.randn(*box_in.N)) / box_in.Ntot ** (0.5) mylike.generateMockData(s_hat, state) # Compute density field output_density = np.zeros(box_in.N) mylike.fwd.forwardModel_v2(s_hat) print('SUM START', output_density.sum()) mylike.fwd.getDensityFinal(output_density) print('SUM NOW', output_density.sum()) L = mylike.logLikelihoodComplex(s_hat, None) print(L) quit() # Autodiff autodiff_gradient = mylike.gradientLikelihoodComplex(s_hat) print(autodiff_gradient.min(), autodiff_gradient.max(), np.sum(np.isfinite(autodiff_gradient)), np.prod(autodiff_gradient.shape)) # Finite differences if run_test: s_hat_epsilon = s_hat.copy() num_gradient = np.zeros(s_hat.shape, dtype=np.complex128) for i, j, k in tqdm( itertools.product(*map(range, [box_in.N[0], box_in.N[1], box_in.N[2] // 2 + 1])), total=box_in.N[0] * box_in.N[1] * (box_in.N[2] // 2 + 1), mininterval=1, ): # +/- epsilon s_hat_epsilon[i, j, k] = s_hat[i, j, k] + epsilon with suppress_stdout(): L = mylike.logLikelihoodComplex(s_hat_epsilon, None) s_hat_epsilon[i, j, k] = s_hat[i, j, k] - epsilon with suppress_stdout(): L -= mylike.logLikelihoodComplex(s_hat_epsilon, None) QQ = L / (2.0 * epsilon) # +/- i * epsilon s_hat_epsilon[i, j, k] = s_hat[i, j, k] + 1j * epsilon with suppress_stdout(): L = mylike.logLikelihoodComplex(s_hat_epsilon, None) s_hat_epsilon[i, j, k] = s_hat[i, j, k] - 1j * epsilon with suppress_stdout(): L -= mylike.logLikelihoodComplex(s_hat_epsilon, None) QQ = QQ + L * 1j / (2.0 * epsilon) s_hat_epsilon[i, j, k] = s_hat[i, j, k] num_gradient[i, j, k] = QQ with h5.File(f"gradients_{box_in.N}.h5", mode="w") as ff: ff["scalars/gradient_array_lh"] = autodiff_gradient ff["scalars/gradient_array_lh_ref"] = num_gradient ff["scalars/gradient_array_prior"] = np.zeros_like(autodiff_gradient) ff["scalars/gradient_array_prior_ref"] = np.zeros_like(autodiff_gradient) slice_step = 2 plot_step = 2 with h5.File(f'gradients_{box_in.N}.h5', 'r') as f: ag_lh_auto_real = f["scalars"]["gradient_array_lh"][::slice_step, :, :].flatten().real ag_lh_auto_imag = f["scalars"]["gradient_array_lh"][::slice_step, :, :].flatten().imag ag_lh_num_real = f["scalars"]["gradient_array_lh_ref"][::slice_step, :, :].flatten().real ag_lh_num_imag = f["scalars"]["gradient_array_lh_ref"][::slice_step, :, :].flatten().imag compare_gradients( ag_lh_auto_real, ag_lh_auto_imag, ag_lh_num_real, ag_lh_num_imag, plot_step, f'gradient_test_{box_in.N[0]}.png', )