diff --git a/tests/data/conf_covariance_ssc_fsky.yaml b/tests/data/conf_covariance_ssc_fsky.yaml new file mode 100644 index 00000000..e49033ac --- /dev/null +++ b/tests/data/conf_covariance_ssc_fsky.yaml @@ -0,0 +1,37 @@ +tjpcov: + # sacc input file + sacc_file: ./tests/benchmarks/32_DES_tjpcov_bm/cls_cov.fits + + # 'set' from parameters OR pass CCL cosmology object OR yaml file + cosmo: 'set' + + outdir: ./tests/tmp/ + + # Survey params: + # 5 lens bins + Ngal_DESgc__0: 26 + + Ngal_DESwl__0: 26 + Ngal_DESwl__1: 26 + # # constant bin sigma_e + sigma_e_DESwl__0: 0.26 + sigma_e_DESwl__1: 0.26 + + # linear bias for lenses constant for redshift bin (example notebook) + bias_DESgc__0: 1.48 + + # IA: 0.5 + +parameters: + # Not used for while (read by ccl.cosmo): + Omega_c: 0.2640 + Omega_b: 0.0493 + h: 0.6736 + n_s: 0.9649 + sigma8: 0.8111 + w0: -1 + wa: 0 + transfer_function: 'boltzmann_camb' + +GaussianFsky: + fsky: 0.05 diff --git a/tests/test_covariance_fourier_ssc_fsky.py b/tests/test_covariance_fourier_ssc_fsky.py new file mode 100644 index 00000000..c9196b91 --- /dev/null +++ b/tests/test_covariance_fourier_ssc_fsky.py @@ -0,0 +1,305 @@ +#!/usr/bin/python3 +import os +import shutil + +import numpy as np +import pyccl as ccl +import pytest +import sacc +import yaml + +from tjpcov.covariance_fourier_ssc_fsky import FourierSSCHaloModelFsky + +ROOT = "./tests/benchmarks/32_DES_tjpcov_bm/" +INPUT_YML_SSC = "./tests/data/conf_covariance_ssc_fsky.yaml" +OUTDIR = "./tests/tmp/" +NSIDE = 32 + + +def setup_module(): + os.makedirs(OUTDIR, exist_ok=True) + + +def teardown_module(): + shutil.rmtree(OUTDIR) + + +@pytest.fixture(autouse=True) +def teardown_test(): + clean_outdir() + + +def clean_outdir(): + os.system(f"rm -f {OUTDIR}*") + + +@pytest.fixture +def sacc_file(): + return sacc.Sacc.load_fits(ROOT + "cls_cov.fits") + + +@pytest.fixture +def cov_fssc(): + return FourierSSCHaloModelFsky(INPUT_YML_SSC) + + +def get_config(): + with open(INPUT_YML_SSC) as f: + config = yaml.safe_load(f) + return config + + +def get_halo_model(cosmo): + md = ccl.halos.MassDef200m + mf = ccl.halos.MassFuncTinker08(mass_def=md) + hb = ccl.halos.HaloBiasTinker10(mass_def=md) + hmc = ccl.halos.HMCalculator(mass_function=mf, halo_bias=hb, mass_def=md) + + return hmc + + +def get_NFW_profile(): + md = ccl.halos.MassDef200m + cm = ccl.halos.ConcentrationDuffy08(mass_def=md) + pNFW = ccl.halos.HaloProfileNFW(mass_def=md, concentration=cm) + + return pNFW + + +def test_smoke(): + FourierSSCHaloModelFsky(INPUT_YML_SSC) + + +@pytest.mark.parametrize( + "tracer_comb1,tracer_comb2", + [ + (("DESgc__0", "DESgc__0"), ("DESgc__0", "DESgc__0")), + (("DESgc__0", "DESwl__0"), ("DESwl__0", "DESwl__0")), + (("DESgc__0", "DESgc__0"), ("DESwl__0", "DESwl__0")), + (("DESwl__0", "DESwl__0"), ("DESwl__0", "DESwl__0")), + (("DESwl__0", "DESwl__0"), ("DESwl__1", "DESwl__1")), + ], +) +def test_get_covariance_block(cov_fssc, tracer_comb1, tracer_comb2): + # TJPCov covariance + cosmo = cov_fssc.get_cosmology() + s = cov_fssc.io.get_sacc_file() + ell, _ = s.get_ell_cl("cl_00", "DESgc__0", "DESgc__0") + fsky = cov_fssc.fsky + cov_ssc = cov_fssc.get_covariance_block( + tracer_comb1=tracer_comb1, + tracer_comb2=tracer_comb2, + include_b_modes=False, + ) + + # Check saved file + covf = np.load( + OUTDIR + "ssc_{}_{}_{}_{}.npz".format(*tracer_comb1, *tracer_comb2) + ) + assert ( + np.max(np.abs((covf["cov_nob"] + 1e-100) / (cov_ssc + 1e-100) - 1)) + < 1e-10 + ) + + # CCL covariance + na = ccl.ccllib.get_pk_spline_na(cosmo.cosmo) + a_arr, _ = ccl.ccllib.get_pk_spline_a(cosmo.cosmo, na, 0) + + bias1 = 1 + is_nc1 = False + if "gc" in tracer_comb1[0]: + bias1 = cov_fssc.bias_lens[tracer_comb1[0]] + is_nc1 = True + + bias2 = 1 + is_nc2 = False + if "gc" in tracer_comb1[1]: + bias2 = cov_fssc.bias_lens[tracer_comb1[1]] + is_nc2 = True + + bias3 = 1 + is_nc3 = False + if "gc" in tracer_comb2[0]: + bias3 = cov_fssc.bias_lens[tracer_comb2[0]] + is_nc3 = True + + bias4 = 1 + is_nc4 = False + if "gc" in tracer_comb2[0]: + bias4 = cov_fssc.bias_lens[tracer_comb2[1]] + is_nc4 = True + + hmc = get_halo_model(cosmo) + nfw_profile = get_NFW_profile() + tkk_ssc = ccl.halos.halomod_Tk3D_SSC_linear_bias( + cosmo, + hmc, + prof=nfw_profile, + bias1=bias1, + bias2=bias2, + bias3=bias3, + bias4=bias4, + is_number_counts1=is_nc1, + is_number_counts2=is_nc2, + is_number_counts3=is_nc3, + is_number_counts4=is_nc4, + ) + + sigma2_B = ccl.sigma2_B_disc(cosmo, a_arr=a_arr, fsky=fsky) + + ccl_tracers, _ = cov_fssc.get_tracer_info() + tr1 = ccl_tracers[tracer_comb1[0]] + tr2 = ccl_tracers[tracer_comb1[1]] + tr3 = ccl_tracers[tracer_comb2[0]] + tr4 = ccl_tracers[tracer_comb2[1]] + cov_ccl = ccl.angular_cl_cov_SSC( + cosmo, + tracer1=tr1, + tracer2=tr2, + ell=ell, + t_of_kk_a=tkk_ssc, + sigma2_B=(a_arr, sigma2_B), + tracer3=tr3, + tracer4=tr4, + ) + + assert np.max(np.fabs(np.diag(cov_ssc / cov_ccl - 1))) < 1e-5 + assert np.max(np.fabs(cov_ssc / cov_ccl - 1)) < 1e-3 + + # Check you get zeroed B-modes + cov_ssc_zb = cov_fssc.get_covariance_block( + tracer_comb1=tracer_comb1, + tracer_comb2=tracer_comb2, + include_b_modes=True, + ) + # Check saved + assert ( + np.max(np.abs((covf["cov"] + 1e-100) / (cov_ssc_zb + 1e-100) - 1)) + < 1e-10 + ) + + ix1 = s.indices(tracers=tracer_comb1) + ix2 = s.indices(tracers=tracer_comb2) + ncell1 = int(ix1.size / ell.size) + ncell2 = int(ix2.size / ell.size) + + # The covariance will have all correlations, including when EB == BE + if (ncell1 == 3) and (tracer_comb1[0] == tracer_comb1[1]): + ncell1 += 1 + if (ncell2 == 3) and (tracer_comb2[0] == tracer_comb2[1]): + ncell2 += 1 + + assert cov_ssc_zb.shape == (ell.size * ncell1, ell.size * ncell2) + # Check the blocks + cov_ssc_zb = cov_ssc_zb.reshape((ell.size, ncell1, ell.size, ncell2)) + # Check the reshape has the correct ordering + assert cov_ssc_zb[:, 0, :, 0].flatten() == pytest.approx( + cov_ssc.flatten(), rel=1e-10 + ) + assert np.all(cov_ssc_zb[:, 1::, :, 1::] == 0) + + # Check get_SSC_cov reads file + covf = np.load( + OUTDIR + "ssc_{}_{}_{}_{}.npz".format(*tracer_comb1, *tracer_comb2) + ) + cov_ssc = cov_fssc.get_covariance_block( + tracer_comb1=tracer_comb1, + tracer_comb2=tracer_comb2, + include_b_modes=False, + ) + assert np.all(covf["cov_nob"] == cov_ssc) + + cov_ssc_zb = cov_fssc.get_covariance_block( + tracer_comb1=tracer_comb1, + tracer_comb2=tracer_comb2, + include_b_modes=True, + ) + + assert np.all(covf["cov"] == cov_ssc_zb) + + +def test_get_covariance_block_WL_benchmark(cov_fssc): + # Based on CCL benchmark test in benchmarks/test_covariances.py + # + # Compare against Benjamin Joachimi's code. An overview of the methodology + # is given in appendix E.2 of 2007.01844. + # + # The approximation is different so one cannot expect a perfect agreement. + # However an agreement ~5% should be enough to convince ourselves that + # get_SSC_cov is doing what it's supposed to do. + + # Read benchmark data + data_dir = "tests/benchmarks/SSC/" + z, nofz = np.loadtxt( + os.path.join(data_dir, "ssc_WL_nofz.txt"), unpack=True + ) + ell = np.loadtxt(os.path.join(data_dir, "ssc_WL_ell.txt")) + cov_ssc_bj = np.loadtxt(os.path.join(data_dir, "ssc_WL_cov_matrix.txt")) + + # TJPCov SSC + h = 0.7 + cosmo = ccl.Cosmology( + Omega_c=0.25, Omega_b=0.05, h=h, n_s=0.97, sigma8=0.8, m_nu=0.0 + ) + WL_tracer = ccl.WeakLensingTracer(cosmo, dndz=(z, nofz)) + + # Trick TJPCov to use the ells we want instead the ones in the file + s = sacc.Sacc() + s.tracers = cov_fssc.io.get_sacc_file().tracers.copy() + for dt in ["cl_ee", "cl_eb", "cl_be", "cl_bb"]: + s.add_ell_cl(dt, "DESwl__0", "DESwl__0", ell, np.ones_like(ell)) + + # Modify tjpcov instance + cov_fssc.io.sacc_file = s + # Pass fsky to generate a disc mask. + cov_fssc.fksy = 0.05 + cov_fssc.cosmo = cosmo + + trs = ("DESwl__0", "DESwl__0") + cov_fssc.get_tracer_info() + cov_fssc.ccl_tracers.update({"DESwl__0": WL_tracer}) + cov_ssc = cov_fssc.get_covariance_block(trs, trs, include_b_modes=False) + + # Tests + var_ssc_ccl = np.diag(cov_ssc) + off_diag_1_ccl = np.diag(cov_ssc, k=1) + + # At large scales, CCL uses a different convention for the Limber + # approximation. This factor accounts for this difference + ccl_limber_shear_fac = ( + np.sqrt((ell - 1) * ell * (ell + 1) * (ell + 2)) / (ell + 1 / 2) ** 2 + ) + cov_ssc_bj_corrected = cov_ssc_bj * np.outer( + ccl_limber_shear_fac**2, ccl_limber_shear_fac**2 + ) + var_bj = np.diag(cov_ssc_bj_corrected) + off_diag_1_bj = np.diag(cov_ssc_bj_corrected, k=1) + + # TODO: After the refactoring, and freeing some of the halo model choices + # for the SSC, we should update this test to a greater accuracy. + # + # Note: the array of a in the CCL test: a = np.linspace(1/(1+6), 1, 100) + # + # - If I use the halomod_Tk3D_SSC, the same mass function + # (MassFuncTinker10, instead of MassFuncTinker08, as is currently in + # tjpcov/main.py) and the array of a in the CCL test, the test passes + # with the same precision as in CCL (<3%) + # - If I change the mass function to MassFuncTinker08, the error goes up + # to 6.8% + # - If I use the halomod_Tk3D_SSC_linear_bias, MassFuncTinker10 and the + # same a's as in the test, the error goes to ~6%. (This is the error + # introduced by the approximation, then) + # - If I use the halomod_Tk3D_SSC_linear_bias, MassFuncTinker08 and the + # same a's as in the test, the test passes with <3% error + # - If I use the a's as implemented in tjpcov/main.py (i.e. from the pk + # splines): the error goes up to ~18% irrespectively of the Tk3D, or mass + # function that one choses. That's why the threshold is 0.2 + # + # This error instability seem to appear only on the first ell. So I will do + # the test without including the first column & row of the covariance. + + assert np.max(np.fabs(var_ssc_ccl / var_bj - 1)[1:]) < 0.07 + assert np.max(np.fabs(off_diag_1_ccl / off_diag_1_bj - 1)[1:]) < 0.07 + assert ( + np.max(np.fabs(cov_ssc / cov_ssc_bj_corrected - 1)[1:][:, 1:]) < 0.07 + ) diff --git a/tjpcov/__init__.py b/tjpcov/__init__.py index 01461d65..719eb7cc 100644 --- a/tjpcov/__init__.py +++ b/tjpcov/__init__.py @@ -15,6 +15,8 @@ def covariance_from_name(name): from .covariance_fourier_gaussian_nmt import FourierGaussianNmt as Cov elif name == "FourierSSCHaloModel": from .covariance_fourier_ssc import FourierSSCHaloModel as Cov + elif name == "FourierSSCHaloModelFsky": + from .covariance_fourier_ssc_fsky import FourierSSCHaloModelFsky as Cov elif name == "ClusterCountsSSC": from .covariance_cluster_counts_ssc import ClusterCountsSSC as Cov elif name == "ClusterCountsGaussian": diff --git a/tjpcov/clusters_helpers.py b/tjpcov/clusters_helpers.py index eba73e66..604f3cfe 100644 --- a/tjpcov/clusters_helpers.py +++ b/tjpcov/clusters_helpers.py @@ -142,7 +142,7 @@ def two_fast_algorithm(self, z1, z2): print( err, f"""\n - Value you tried to interpolate: {max(r1,r2)} Mpc, + Value you tried to interpolate: {max(r1, r2)} Mpc, Input r {r1}, {r2} Valid range range: [{self.r_grid[self.idx_min]}, {self.r_grid[self.idx_max]}] diff --git a/tjpcov/covariance_fourier_ssc.py b/tjpcov/covariance_fourier_ssc.py index efc8cdee..64820363 100644 --- a/tjpcov/covariance_fourier_ssc.py +++ b/tjpcov/covariance_fourier_ssc.py @@ -1,5 +1,4 @@ import os - import healpy as hp import numpy as np import pyccl as ccl @@ -28,6 +27,40 @@ def __init__(self, config): self.ssc_conf = self.config.get("SSC", {}) + def _get_sigma2_B(self, cosmo, a_arr, tr=None): + """Returns the variance of the projected linear density field, + for the case when a survey mask is provided. + + Args: + cosmo (:class:`~pyccl.cosmology.Cosmology`): a Cosmology object. + a_arr (:obj:`float`, `array` or :obj:`None`): an array of + scale factor values at which to evaluate + the projected variance. + tr (:obj:`dict`): dictionary containing the + tracer name combinations. + + Returns: + - (:obj:`float` or `array`): projected variance. + """ + + # TODO: Optimize this, avoid computing the mask_wl for all blocks. + # Note that this is correct for same footprint cross-correlations. In + # case of multisurvey analyses this approximation might break. + masks = self.get_masks_dict(tr, {}) + + m12 = masks[1] * masks[2] + m34 = masks[3] * masks[4] + area = hp.nside2pixarea(hp.npix2nside(m12.size)) + + alm = hp.map2alm(m12) + blm = hp.map2alm(m34) + + mask_wl = hp.alm2cl(alm, blm) + mask_wl *= 2 * np.arange(mask_wl.size) + 1 + mask_wl /= np.sum(m12) * np.sum(m34) * area**2 + + return ccl.sigma2_B_from_mask(cosmo, a_arr=a_arr, mask_wl=mask_wl) + def get_covariance_block( self, tracer_comb1, @@ -141,23 +174,7 @@ def get_covariance_block( is_number_counts4=isnc[4], ) - masks = self.get_masks_dict(tr, {}) - # TODO: Optimize this, avoid computing the mask_wl for all blocks. - # Note that this is correct for same footprint cross-correlations. In - # case of multisurvey analyses this approximation might break. - m12 = masks[1] * masks[2] - m34 = masks[3] * masks[4] - area = hp.nside2pixarea(hp.npix2nside(m12.size)) - - alm = hp.map2alm(m12) - blm = hp.map2alm(m34) - - mask_wl = hp.alm2cl(alm, blm) - mask_wl *= 2 * np.arange(mask_wl.size) + 1 - mask_wl /= np.sum(m12) * np.sum(m34) * area**2 - - # TODO: Allow using fsky instead of the masks? - sigma2_B = ccl.sigma2_B_from_mask(cosmo, a_arr=a, mask_wl=mask_wl) + sigma2_B = self._get_sigma2_B(cosmo, a, tr=tr) ell = self.get_ell_eff() cov_ssc = ccl.covariances.angular_cl_cov_SSC( diff --git a/tjpcov/covariance_fourier_ssc_fsky.py b/tjpcov/covariance_fourier_ssc_fsky.py new file mode 100644 index 00000000..30b74cef --- /dev/null +++ b/tjpcov/covariance_fourier_ssc_fsky.py @@ -0,0 +1,45 @@ +import pyccl as ccl +from .covariance_fourier_ssc import FourierSSCHaloModel + + +class FourierSSCHaloModelFsky(FourierSSCHaloModel): + """Class to compute the CellxCell Halo Model Super Sample Covariance + with the fsky approximation. + + The SSC is computed in CCL with the "linear bias" approximation using + :func:`pyccl.halos.halo_model.halomod_Tk3D_SSC_linear_bias`. + """ + + cov_type = "SSC" + + def __init__(self, config): + """Initialize the class with a config file or dictionary. + + Args: + config (dict or str): If dict, it returns the configuration + dictionary directly. If string, it asumes a YAML file and + parses it. + """ + super().__init__(config) + + self.fsky = self.config["GaussianFsky"].get("fsky", None) + if self.fsky is None: + raise ValueError( + "You need to set fsky for FourierSSCHaloModelFsky" + ) + + def _get_sigma2_B(self, cosmo, a_arr, tr=None): + """Returns the variance of the projected linear density field, + for the fsky/disk approximation case. + + Args: + cosmo (:class:`~pyccl.cosmology.Cosmology`): a Cosmology object. + a_arr (:obj:`float`, `array` or :obj:`None`): an array of + scale factor values at which to evaluate + the projected variance. + tr (:obj:`dict`): dictionary containing the + tracer name combinations. + Returns: + - (:obj:`float` or `array`): projected variance. + """ + return ccl.sigma2_B_disc(cosmo, a_arr=a_arr, fsky=self.fsky)