Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Parameters defaults etc #85

Merged
merged 6 commits into from
Sep 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 54 additions & 97 deletions mflike/mflike.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,12 @@

import os
from typing import Optional

import numpy as np
from numbers import Real
import sacc
from cobaya.conventions import data_path, packages_path_input
from cobaya.likelihoods.base_classes import InstallableLikelihood
from cobaya.likelihoods.base_classes import InstallableLikelihood, _fast_chi_square
from cobaya.log import LoggedError
from cobaya.tools import are_different_params_lists

from .theoryforge import TheoryForge


Expand All @@ -39,19 +38,23 @@ class MFLike(InstallableLikelihood):
# attributes set from .yaml
input_file: Optional[str]
cov_Bbl_file: Optional[str]
data_folder: str
data: dict
defaults: dict
foregrounds: dict
top_hat_band: dict
systematics_template: dict
lmax_theory: Optional[int]

_fast_chi_squared = _fast_chi_square()

def initialize(self):
# Set default values to data member not initialized via yaml file
self.l_bpws = None
self.spec_meta = []

# Set path to data
if (not getattr(self, "path", None)) and (not getattr(self, packages_path_input, None)):
if not getattr(self, "path", None) and not getattr(self, packages_path_input, None):
raise LoggedError(
self.log,
"No path given to MFLike data. Set the likelihood property "
Expand All @@ -78,75 +81,10 @@ def initialize(self):
self.lmax_theory = self.lmax_theory or 9000
self.log.debug(f"Maximum multipole value: {self.lmax_theory}")

self.expected_params_fg = [
"a_tSZ",
"a_kSZ",
"a_p",
"beta_p",
"a_c",
"beta_c",
"a_s",
"a_gtt",
"a_gte",
"a_gee",
"a_psee",
"a_pste",
"xi",
"T_d",
"beta_s",
"alpha_s",
"T_effd",
"beta_d",
"alpha_dT",
"alpha_dE",
"alpha_tSZ",
"alpha_p",
]

self.expected_params_nuis = ["calG_all"]
for f in self.experiments:
self.expected_params_nuis += [
f"bandint_shift_{f}",
f"calT_{f}",
f"cal_{f}",
f"calE_{f}",
f"alpha_{f}",
]

self._constant_nuisance: Optional[dict] = None
self.ThFo = TheoryForge(self)
self.log.info("Initialized!")

def initialize_non_sampled_params(self):
"""
Allows for systematic params such as polarization angles and ``calT``
not to be sampled and not to be required
"""
self.non_sampled_params = {}
for exp in self.experiments:
self.non_sampled_params.update({f"calT_{exp}": 1.0, f"alpha_{exp}": 0.0})

def initialize_with_params(self):
# Check that the parameters are the right ones
self.initialize_non_sampled_params()

# Remove the parameters if it appears in the input/samples ones
for par in self.input_params:
self.non_sampled_params.pop(par, None)

# Finally set the list of nuisance params
self.expected_params_nuis = [
par for par in self.expected_params_nuis if par not in self.non_sampled_params
]

differences = are_different_params_lists(
self.input_params,
self.expected_params_fg + self.expected_params_nuis,
name_A="given",
name_B="expected",
)
if differences:
raise LoggedError(self.log, f"Configuration error in parameters: {differences}.")

def get_requirements(self):
r"""
Gets the theory :math:`D_{\ell}` from the Boltzmann solver code used,
Expand All @@ -158,38 +96,51 @@ def get_requirements(self):

def logp(self, **params_values):
cl = self.provider.get_Cl(ell_factor=True)
params_values_nocosmo = {
k: params_values[k] for k in self.expected_params_fg + self.expected_params_nuis
}
return self._loglike(cl, **params_values)

return self.loglike(cl, **params_values_nocosmo)

def loglike(self, cl, **params_values_nocosmo):
def _loglike(self, cl, **params_values):
"""
Computes the gaussian log-likelihood

:param cl: the dictionary of theory + foregrounds :math:`D_{\ell}`
:param params_values_nocosmo: the dictionary of required foreground + systematic parameters
:param params_values: the dictionary of all foreground + systematic parameters

:return: the exact loglikelihood :math:`\ln \mathcal{L}`
"""
# This is needed if someone calls the function without initializing the likelihood
# (typically a call with a precomputed Cl and no cobaya initialization steps e.g
# test_mflike)
if not hasattr(self, "non_sampled_params"):
self.initialize_non_sampled_params()

params_values_nocosmo = self.non_sampled_params | params_values_nocosmo

ps_vec = self._get_power_spectra(cl, **params_values_nocosmo)
ps_vec = self._get_power_spectra(cl, **params_values)
delta = self.data_vec - ps_vec
logp = -0.5 * (delta @ self.inv_cov @ delta)
# logp = -0.5 * (delta @ self.inv_cov @ delta)
logp = -0.5 * self._fast_chi_squared(self.inv_cov, delta)
logp += self.logp_const
self.log.debug(
f"Log-likelihood value computed = {logp} (Χ² = {-2 * (logp - self.logp_const)})"
f"Log-likelihood value computed = {logp} (Χ² = {-2 * (- self.logp_const)})"
)
return logp

def loglike(self, cl, **params_values):
"""
Computes the gaussian log-likelihood, callable independent of Cobaya.

:param cl: the dictionary of theory + foregrounds :math:`D_{\ell}`
:param params_values: the dictionary of required foreground + systematic parameters

:return: the exact loglikelihood :math:`\ln \mathcal{L}`
"""
# This is needed if someone calls the function without initializing the likelihood
# (typically a call with a precomputed Cl and no cobaya initialization steps e.g.
# test_mflike)
if self._constant_nuisance is None:
from cobaya.parameterization import expand_info_param
# pre-set default nuisance parameters
self._constant_nuisance = {p: float(v) for p, info in self.params.items()
if isinstance(v := expand_info_param(info).get("value"), Real)}
if unknown := (set(params_values) - set(self.params)):
raise ValueError(f"Unknown parameters: {unknown}")

params_values = self._constant_nuisance | params_values

return self._loglike(cl, **params_values)

def prepare_data(self):
r"""
Reads the sacc data, extracts the data tracers,
Expand All @@ -201,8 +152,6 @@ def prepare_data(self):
range, bandpowers and :math:`D_{\ell}` for each power spectrum required
in the yaml.
"""
import sacc

data = self.data
# Read data
input_fname = os.path.join(self.data_folder, self.input_file)
Expand All @@ -222,7 +171,7 @@ def prepare_data(self):
except AttributeError:
raise KeyError("You must provide a list of default cuts")

# Translation betwen TEB and sacc C_ell types
# Translation between TEB and sacc C_ell types
pol_dict = {"T": "0", "E": "e", "B": "b"}
ppol_dict = {
"TT": "tt",
Expand All @@ -234,7 +183,6 @@ def prepare_data(self):
"BE": "eb",
"TB": "tb",
"BT": "tb",
"BB": "bb",
}

def get_cl_meta(spec):
Expand Down Expand Up @@ -369,6 +317,11 @@ def get_sacc_names(pol, exp_1, exp_2):
ws = s_b.get_bandpower_windows(ind_b)
else:
ws = s.get_bandpower_windows(ind)
# pre-compute the actual slices of the weights that are needed
nonzeros = np.array([np.nonzero(ws.weight[:, i])[0][[0, -1]] for i in range(ws.weight.shape[1])])
ws.nonzeros = [slice(i[0], i[1] + 1) for i in nonzeros]
ws.sliced_weights = [np.ascontiguousarray(ws.weight[ws.nonzeros[i], i])
for i in range(len(nonzeros))]

if self.l_bpws is None:
# The assumption here is that bandpower windows
Expand Down Expand Up @@ -454,18 +407,22 @@ def _get_power_spectra(self, cl, **params_values_nocosmo):
Dls = {s: cl[s][self.l_bpws] for s, _ in self.lcuts.items()}
DlsObs = self.ThFo.get_modified_theory(Dls, **params_values_nocosmo)

return self._get_ps_vec(DlsObs)

def _get_ps_vec(self, DlsObs):
ps_vec = np.zeros_like(self.data_vec)
for m in self.spec_meta:
p = m["pol"]
i = m["ids"]
w = m["bpw"].weight.T
w = m["bpw"]
# If symmetrize = False, the (ET, exp1, exp2) spectrum
# will have the flag m["hasYX_xsp"] = True.
# In this case, the power spectrum
# is computed as DlsObs["te", m["t2"], m["t1"]], to associate
# T --> exp2, E --> exp1
dls_obs = DlsObs[p, m["t2"], m["t1"]] if m["hasYX_xsp"] else DlsObs[p, m["t1"], m["t2"]]
clt = w @ dls_obs
ps_vec[i] = clt

for i, nonzero, weights in zip(m["ids"], w.nonzeros, w.sliced_weights):
ps_vec[i] = weights @ dls_obs[nonzero]
# can check against unoptimized version
# assert np.allclose(ps_vec[m["ids"]], np.dot(w.weight.T, dls_obs))
return ps_vec
Loading