diff --git a/mflike/mflike.py b/mflike/mflike.py index e3cf08d..f73b758 100644 --- a/mflike/mflike.py +++ b/mflike/mflike.py @@ -21,13 +21,12 @@ import os from typing import Optional - import numpy as np +from numbers import Real +import sacc from cobaya.conventions import data_path, packages_path_input -from cobaya.likelihoods.base_classes import InstallableLikelihood +from cobaya.likelihoods.base_classes import InstallableLikelihood, _fast_chi_square from cobaya.log import LoggedError -from cobaya.tools import are_different_params_lists - from .theoryforge import TheoryForge @@ -39,11 +38,15 @@ class MFLike(InstallableLikelihood): # attributes set from .yaml input_file: Optional[str] cov_Bbl_file: Optional[str] + data_folder: str data: dict defaults: dict foregrounds: dict top_hat_band: dict systematics_template: dict + lmax_theory: Optional[int] + + _fast_chi_squared = _fast_chi_square() def initialize(self): # Set default values to data member not initialized via yaml file @@ -51,7 +54,7 @@ def initialize(self): self.spec_meta = [] # Set path to data - if (not getattr(self, "path", None)) and (not getattr(self, packages_path_input, None)): + if not getattr(self, "path", None) and not getattr(self, packages_path_input, None): raise LoggedError( self.log, "No path given to MFLike data. Set the likelihood property " @@ -78,75 +81,10 @@ def initialize(self): self.lmax_theory = self.lmax_theory or 9000 self.log.debug(f"Maximum multipole value: {self.lmax_theory}") - self.expected_params_fg = [ - "a_tSZ", - "a_kSZ", - "a_p", - "beta_p", - "a_c", - "beta_c", - "a_s", - "a_gtt", - "a_gte", - "a_gee", - "a_psee", - "a_pste", - "xi", - "T_d", - "beta_s", - "alpha_s", - "T_effd", - "beta_d", - "alpha_dT", - "alpha_dE", - "alpha_tSZ", - "alpha_p", - ] - - self.expected_params_nuis = ["calG_all"] - for f in self.experiments: - self.expected_params_nuis += [ - f"bandint_shift_{f}", - f"calT_{f}", - f"cal_{f}", - f"calE_{f}", - f"alpha_{f}", - ] - + self._constant_nuisance: Optional[dict] = None self.ThFo = TheoryForge(self) self.log.info("Initialized!") - def initialize_non_sampled_params(self): - """ - Allows for systematic params such as polarization angles and ``calT`` - not to be sampled and not to be required - """ - self.non_sampled_params = {} - for exp in self.experiments: - self.non_sampled_params.update({f"calT_{exp}": 1.0, f"alpha_{exp}": 0.0}) - - def initialize_with_params(self): - # Check that the parameters are the right ones - self.initialize_non_sampled_params() - - # Remove the parameters if it appears in the input/samples ones - for par in self.input_params: - self.non_sampled_params.pop(par, None) - - # Finally set the list of nuisance params - self.expected_params_nuis = [ - par for par in self.expected_params_nuis if par not in self.non_sampled_params - ] - - differences = are_different_params_lists( - self.input_params, - self.expected_params_fg + self.expected_params_nuis, - name_A="given", - name_B="expected", - ) - if differences: - raise LoggedError(self.log, f"Configuration error in parameters: {differences}.") - def get_requirements(self): r""" Gets the theory :math:`D_{\ell}` from the Boltzmann solver code used, @@ -158,38 +96,51 @@ def get_requirements(self): def logp(self, **params_values): cl = self.provider.get_Cl(ell_factor=True) - params_values_nocosmo = { - k: params_values[k] for k in self.expected_params_fg + self.expected_params_nuis - } + return self._loglike(cl, **params_values) - return self.loglike(cl, **params_values_nocosmo) - - def loglike(self, cl, **params_values_nocosmo): + def _loglike(self, cl, **params_values): """ Computes the gaussian log-likelihood :param cl: the dictionary of theory + foregrounds :math:`D_{\ell}` - :param params_values_nocosmo: the dictionary of required foreground + systematic parameters + :param params_values: the dictionary of all foreground + systematic parameters :return: the exact loglikelihood :math:`\ln \mathcal{L}` """ - # This is needed if someone calls the function without initializing the likelihood - # (typically a call with a precomputed Cl and no cobaya initialization steps e.g - # test_mflike) - if not hasattr(self, "non_sampled_params"): - self.initialize_non_sampled_params() - - params_values_nocosmo = self.non_sampled_params | params_values_nocosmo - - ps_vec = self._get_power_spectra(cl, **params_values_nocosmo) + ps_vec = self._get_power_spectra(cl, **params_values) delta = self.data_vec - ps_vec - logp = -0.5 * (delta @ self.inv_cov @ delta) + # logp = -0.5 * (delta @ self.inv_cov @ delta) + logp = -0.5 * self._fast_chi_squared(self.inv_cov, delta) logp += self.logp_const self.log.debug( - f"Log-likelihood value computed = {logp} (Χ² = {-2 * (logp - self.logp_const)})" + f"Log-likelihood value computed = {logp} (Χ² = {-2 * (- self.logp_const)})" ) return logp + def loglike(self, cl, **params_values): + """ + Computes the gaussian log-likelihood, callable independent of Cobaya. + + :param cl: the dictionary of theory + foregrounds :math:`D_{\ell}` + :param params_values: the dictionary of required foreground + systematic parameters + + :return: the exact loglikelihood :math:`\ln \mathcal{L}` + """ + # This is needed if someone calls the function without initializing the likelihood + # (typically a call with a precomputed Cl and no cobaya initialization steps e.g. + # test_mflike) + if self._constant_nuisance is None: + from cobaya.parameterization import expand_info_param + # pre-set default nuisance parameters + self._constant_nuisance = {p: float(v) for p, info in self.params.items() + if isinstance(v := expand_info_param(info).get("value"), Real)} + if unknown := (set(params_values) - set(self.params)): + raise ValueError(f"Unknown parameters: {unknown}") + + params_values = self._constant_nuisance | params_values + + return self._loglike(cl, **params_values) + def prepare_data(self): r""" Reads the sacc data, extracts the data tracers, @@ -201,8 +152,6 @@ def prepare_data(self): range, bandpowers and :math:`D_{\ell}` for each power spectrum required in the yaml. """ - import sacc - data = self.data # Read data input_fname = os.path.join(self.data_folder, self.input_file) @@ -222,7 +171,7 @@ def prepare_data(self): except AttributeError: raise KeyError("You must provide a list of default cuts") - # Translation betwen TEB and sacc C_ell types + # Translation between TEB and sacc C_ell types pol_dict = {"T": "0", "E": "e", "B": "b"} ppol_dict = { "TT": "tt", @@ -234,7 +183,6 @@ def prepare_data(self): "BE": "eb", "TB": "tb", "BT": "tb", - "BB": "bb", } def get_cl_meta(spec): @@ -369,6 +317,11 @@ def get_sacc_names(pol, exp_1, exp_2): ws = s_b.get_bandpower_windows(ind_b) else: ws = s.get_bandpower_windows(ind) + # pre-compute the actual slices of the weights that are needed + nonzeros = np.array([np.nonzero(ws.weight[:, i])[0][[0, -1]] for i in range(ws.weight.shape[1])]) + ws.nonzeros = [slice(i[0], i[1] + 1) for i in nonzeros] + ws.sliced_weights = [np.ascontiguousarray(ws.weight[ws.nonzeros[i], i]) + for i in range(len(nonzeros))] if self.l_bpws is None: # The assumption here is that bandpower windows @@ -454,18 +407,22 @@ def _get_power_spectra(self, cl, **params_values_nocosmo): Dls = {s: cl[s][self.l_bpws] for s, _ in self.lcuts.items()} DlsObs = self.ThFo.get_modified_theory(Dls, **params_values_nocosmo) + return self._get_ps_vec(DlsObs) + + def _get_ps_vec(self, DlsObs): ps_vec = np.zeros_like(self.data_vec) for m in self.spec_meta: p = m["pol"] - i = m["ids"] - w = m["bpw"].weight.T + w = m["bpw"] # If symmetrize = False, the (ET, exp1, exp2) spectrum # will have the flag m["hasYX_xsp"] = True. # In this case, the power spectrum # is computed as DlsObs["te", m["t2"], m["t1"]], to associate # T --> exp2, E --> exp1 dls_obs = DlsObs[p, m["t2"], m["t1"]] if m["hasYX_xsp"] else DlsObs[p, m["t1"], m["t2"]] - clt = w @ dls_obs - ps_vec[i] = clt + for i, nonzero, weights in zip(m["ids"], w.nonzeros, w.sliced_weights): + ps_vec[i] = weights @ dls_obs[nonzero] + # can check against unoptimized version + # assert np.allclose(ps_vec[m["ids"]], np.dot(w.weight.T, dls_obs)) return ps_vec diff --git a/mflike/theoryforge.py b/mflike/theoryforge.py index bedc12f..435a40c 100644 --- a/mflike/theoryforge.py +++ b/mflike/theoryforge.py @@ -3,8 +3,8 @@ The ``TheoryForge`` class applies the foreground spectra and systematics effects to the theory spectra provided by ``MFLike``. To do that, ``TheoryForge`` gets from ``MFLike`` the appropriate -list of arrays, the requested temperature/polarization fields, the :math:`\ell` ranges, the list of -expected parameters, a dictionary of the passbands read from the ``sacc`` file: +list of arrays, the requested temperature/polarization fields, the :math:`\ell` ranges, +a dictionary of the passbands read from the ``sacc`` file: .. code-block:: python @@ -55,7 +55,8 @@ :math:`\nu_{\rm{low/high}} = \nu_{\rm{center}}(1 \mp \delta/2) + \Delta^{\nu}_{\rm band}` (with :math:`\Delta^{\nu}_{\rm band}` being the possible bandpass shift). ``bandwidth`` has to be 0 if ``nstep`` = 1, > 0 otherwise. - ``bandwidth`` can be a list if you want a different width for each band e.g. ``bandwidth: [0.3,0.2,0.3]`` for 3 bands. + ``bandwidth`` can be a list if you want a different width for each band + e.g. ``bandwidth: [0.3,0.2,0.3]`` for 3 bands. The effective frequencies, used as central frequencies to build the bandpasses, are read from the ``bands`` dictionary as before. To build a Dirac delta, use: @@ -69,10 +70,9 @@ """ import os -from itertools import product - import numpy as np from cobaya.log import LoggedError +from scipy import constants # Converts from cmb temperature to differential source intensity @@ -94,8 +94,6 @@ def _cmb2bb(nu): :return: the array :math:`\frac{\partial B_{\nu}}{\partial T}`. See note above. """ - from scipy import constants - T_CMB = 2.72548 x = nu * constants.h * 1e9 / constants.k / T_CMB return np.exp(x) * (nu * x / np.expm1(x)) ** 2 @@ -108,7 +106,7 @@ def __init__(self, mflike=None): self.log = logging.getLogger(self.__class__.__name__.lower()) self.data_folder = None - self.experiments = np.array(["LAT_93", "LAT_145", "LAT_225"]) + self.experiments = ["LAT_93", "LAT_145", "LAT_225"] self.foregrounds = { "normalisation": {"nu_0": 150.0, "ell_0": 3000, "T_CMB": 2.725}, "components": { @@ -133,8 +131,6 @@ def __init__(self, mflike=None): self.bands = mflike.bands self.l_bpws = mflike.l_bpws self.requested_cls = mflike.requested_cls - self.expected_params_fg = mflike.expected_params_fg - self.expected_params_nuis = mflike.expected_params_nuis self.spec_meta = mflike.spec_meta self.defaults_cuts = mflike.defaults @@ -164,6 +160,7 @@ def __init__(self, mflike=None): raise LoggedError( self.log, "One band has width = 0, set a positive width and run again" ) + self._bandint_shift_params = [f"bandint_shift_{f}" for f in self.experiments] # Takes care of the bandpass construction. It returns a list of nu-transmittance # for each frequency or an array with the effective freqs. @@ -192,8 +189,7 @@ def _bandpass_construction(self, **params): """ data_are_monofreq = False self.bandint_freqs = [] - for iexp, exp in enumerate(self.experiments): - bandpar = f"bandint_shift_{exp}" + for iexp, (bandpar, exp) in enumerate(zip(self._bandint_shift_params, self.experiments)): # Only temperature bandpass for the time being bands = self.bands[f"{exp}_s0"] nu_ghz, bp = np.asarray(bands["nu"]), np.asarray(bands["bandpass"]) @@ -239,7 +235,7 @@ def _bandpass_construction(self, **params): self.bandint_freqs = np.asarray(self.bandint_freqs) self.log.info("bandpass is delta function, no band integration performed") - def get_modified_theory(self, Dls, **params): + def get_modified_theory(self, Dls, **nuis_params): r""" Takes the theory :math:`D_{\ell}`, sums it to the total foreground power spectrum (possibly computed with bandpass @@ -248,35 +244,31 @@ def get_modified_theory(self, Dls, **params): polarization angles rotation and systematic templates. :param Dls: CMB theory spectra - :param \**params: dictionary of nuisance and foregrounds parameters + :param params: dictionary of nuisance and foregrounds parameters :return: the CMB+foregrounds :math:`D_{\ell}` dictionary, modulated by systematics """ - fg_params = {k: params[k] for k in self.expected_params_fg} - nuis_params = {k: params[k] for k in self.expected_params_nuis} - # compute bandpasses at each step only if bandint_shift params are not null # and bandint_freqs has been computed at least once - if np.all( - np.array([nuis_params[k] for k in nuis_params.keys() if "bandint_shift_" in k]) == 0.0 - ): - if not hasattr(self, "bandint_freqs"): - self.log.info("Computing bandpass at first step, no shifts") - self._bandpass_construction(**nuis_params) - else: + if any(nuis_params[k] for k in self._bandint_shift_params): + self._bandpass_construction(**nuis_params) + elif not hasattr(self, "bandint_freqs"): + self.log.info("Computing bandpass at first step, no shifts") self._bandpass_construction(**nuis_params) - fg_dict = self._get_foreground_model(**fg_params) + model = self._get_foreground_model_arrays(nuis_params) + # get total foregrounds; model is dictionary of n x n arrays for each frequency combo + totals = [np.sum([model[s, comp] for comp in self.fg_component_list[s]], axis=0) + for s in self.requested_cls] - cmbfg_dict = {} - # Sum CMB and FGs - for exp1, exp2 in product(self.experiments, self.experiments): - for s in self.requested_cls: - cmbfg_dict[s, exp1, exp2] = Dls[s] + fg_dict[s, "all", exp1, exp2] + cmbfg_dict = {(s, exp1, exp2): Dls[s] + total_fg[i, j] # Sum CMB and FGs + for i, exp1 in enumerate(self.experiments) + for j, exp2 in enumerate(self.experiments) + for s, total_fg in zip(self.requested_cls, totals)} # Apply alm based calibration factors - cmbfg_dict = self._get_calibrated_spectra(cmbfg_dict, **nuis_params) + self._calibrate_spectra(cmbfg_dict, **nuis_params) # Introduce spectra rotations cmbfg_dict = self._get_rotated_spectra(cmbfg_dict, **nuis_params) @@ -354,26 +346,22 @@ def _init_foreground_model(self): self.fg_component_list = {s: components[s] for s in self.requested_cls} # Gets the actual power spectrum of foregrounds given the passed parameters - def _get_foreground_model(self, ell=None, freqs_order=None, **fg_params): + def _get_foreground_model_arrays(self, fg_params, ell=None): r""" Gets the foreground power spectra for each component computed by ``fgspectra``. The computation assumes the bandpass transmissions computed in ``_bandpass_construction`` and integration in frequency is performed if the passbands are not Dirac delta. + :param fg_params: parameters of the foreground components :param ell: ell range. If ``None`` the default range - set in ``mflike.l_bpws`` is used - :param freqs_order: list of the effective frequencies for each channel - used to compute the foreground components. Useful when - this function is called outside of mflike, used in place of - ``self.experiments`` - :param \**fg_params: parameters of the foreground components + set in ``mflike.l_bpws`` is used - :return: the foreground dictionary + :return: the foreground dictionary of arrays """ # if ell = None, it uses the l_bpws, otherwise the ell array provided # useful to make tests at different l_max than the data - if not hasattr(ell, "__len__"): + if ell is None: ell = self.l_bpws ell_0 = self.fg_ell_0 nu_0 = self.fg_nu_0 @@ -479,28 +467,43 @@ def _get_foreground_model(self, ell=None, freqs_order=None, **fg_params): {"ell": ell, "ell_0": 500.0, "alpha": fg_params["alpha_dE"]}, ) + return model + + def _get_foreground_model(self, ell=None, freqs_order=None, **fg_params): + r""" + Gets the foreground power spectra for each component computed by ``fgspectra``. + The computation assumes the bandpass transmissions computed in ``_bandpass_construction`` + and integration in frequency is performed if the passbands are not Dirac delta. + + :param ell: ell range. If ``None`` the default range + set in ``mflike.l_bpws`` is used + :param freqs_order: list of the effective frequencies for each channel + used to compute the foreground components. Useful when + this function is called outside of mflike, used in place of + ``self.experiments`` + :param \**fg_params: parameters of the foreground components + + :return: the foreground dictionary + """ + model = self._get_foreground_model_arrays(fg_params, ell=ell) + experiments = self.experiments if freqs_order is None else freqs_order fg_dict = {} - if not hasattr(freqs_order, "__len__"): - experiments = self.experiments - else: - experiments = freqs_order for c1, exp1 in enumerate(experiments): for c2, exp2 in enumerate(experiments): for s in self.requested_cls: - fg_dict[s, "all", exp1, exp2] = np.zeros(len(ell)) + sum_all = np.zeros(len(ell)) for comp in self.fg_component_list[s]: + term = model[s, comp][c1, c2] if comp == "tSZ_and_CIB": fg_dict[s, "tSZ", exp1, exp2] = model[s, "tSZ"][c1, c2] fg_dict[s, "cibc", exp1, exp2] = model[s, "cibc"][c1, c2] fg_dict[s, "tSZxCIB", exp1, exp2] = ( - model[s, comp][c1, c2] - - model[s, "tSZ"][c1, c2] - - model[s, "cibc"][c1, c2] + term - model[s, "tSZ"][c1, c2] - model[s, "cibc"][c1, c2] ) - fg_dict[s, "all", exp1, exp2] += model[s, comp][c1, c2] else: - fg_dict[s, comp, exp1, exp2] = model[s, comp][c1, c2] - fg_dict[s, "all", exp1, exp2] += fg_dict[s, comp, exp1, exp2] + fg_dict[s, comp, exp1, exp2] = term + sum_all += term + fg_dict[s, "all", exp1, exp2] = sum_all return fg_dict @@ -513,9 +516,9 @@ def _get_foreground_model(self, ell=None, freqs_order=None, **fg_params): ## A global calibration factor calG_all is also considered. ########################################################################### - def _get_calibrated_spectra(self, dls_dict, **nuis_params): + def _calibrate_spectra(self, dls_dict, **nuis_params): r""" - Calibrates the spectra through calibration factors at + Calibrates the spectra in place through calibration factors at the map level: .. math:: @@ -535,36 +538,30 @@ def _get_calibrated_spectra(self, dls_dict, **nuis_params): {\rm cal}^{\nu_1}_{\rm E}\, {\rm cal}^{\nu_2}_{\rm E}}\, D^{EE, \nu_1 \nu_2}_{\ell} - It uses the ``syslibrary.syslib_mflike.Calibration_alm`` function. - :param dls_dict: the CMB+foregrounds :math:`D_{\ell}` dictionary + :param dls_dict: the CMB+foregrounds :math:`D_{\ell}` dictionary, calibrated in place :param \**nuis_params: dictionary of nuisance parameters - :return: dictionary of calibrated CMB+foregrounds :math:`D_{\ell}` - """ - from syslibrary import syslib_mflike as syl + """ # allowing for not having calT_{exp} in the yaml cal_pars = {} + calG_all = 1 / nuis_params["calG_all"] if "tt" in self.requested_cls or "te" in self.requested_cls: - cal = nuis_params["calG_all"] * np.array( - [ - nuis_params[f"cal_{exp}"] * nuis_params.get(f"calT_{exp}", 1) - for exp in self.experiments - ] - ) - cal_pars["t"] = 1 / cal + cal_pars["t"] = {exp: calG_all / (nuis_params[f"cal_{exp}"] * nuis_params.get(f"calT_{exp}", 1)) + for exp in self.experiments} if "ee" in self.requested_cls or "te" in self.requested_cls: - cal = nuis_params["calG_all"] * np.array( - [nuis_params[f"cal_{exp}"] * nuis_params[f"calE_{exp}"] for exp in self.experiments] - ) - cal_pars["e"] = 1 / cal + cal_pars["e"] = {exp: calG_all / (nuis_params[f"cal_{exp}"] * nuis_params[f"calE_{exp}"]) + for exp in self.experiments} - calib = syl.Calibration_alm(ell=self.l_bpws, spectra=dls_dict) + self._mul_calibrations(dls_dict, cal1=cal_pars, cal2=cal_pars) - return calib(cal1=cal_pars, cal2=cal_pars, nu=self.experiments) + def _mul_calibrations(self, dls_dict, cal1, cal2): + for (spec, freq1, freq2), cl in dls_dict.items(): + if (cal := (cal1[spec[0]][freq1] * cal2[spec[1]][freq2])) != 1: + cl *= cal ########################################################################### ## This part deals with rotation of spectra @@ -610,17 +607,17 @@ def _get_rotated_spectra(self, dls_dict, **nuis_params): def _init_template_from_file(self): """ Reads the systematics template from file, using - the ``syslibrary.syslib_mflike.ReadTemplateFromFile`` + the ``syslibrary.syslib.ReadTemplateFromFile`` function. """ if not self.systematics_template.get("rootname"): raise LoggedError(self.log, "Missing 'rootname' for systematics template!") - from syslibrary import syslib_mflike as syl + from syslibrary import syslib # decide where to store systematics template. # Currently stored inside syslibrary package - templ_from_file = syl.ReadTemplateFromFile(rootname=self.systematics_template["rootname"]) + templ_from_file = syslib.ReadTemplateFromFile(rootname=self.systematics_template["rootname"]) self.dltempl_from_file = templ_from_file(ell=self.l_bpws) def _get_template_from_file(self, dls_dict, **nuis_params): @@ -646,7 +643,7 @@ def _get_template_from_file(self, dls_dict, **nuis_params): for i1, exp1 in enumerate(self.experiments): for i2, exp2 in enumerate(self.experiments): dls_dict[cls, exp1, exp2] += ( - templ_pars[cls][i1][i2] * self.dltempl_from_file[cls, exp1, exp2] + templ_pars[cls][i1][i2] * self.dltempl_from_file[cls, exp1, exp2] ) return dls_dict diff --git a/pyproject.toml b/pyproject.toml index c24fd61..5277cb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12" ] requires-python = ">=3.9.0" dependencies = [