Skip to content

Commit

Permalink
fixed bug when computing Fisher biases
Browse files Browse the repository at this point in the history
  • Loading branch information
Javier Sanchez authored and Javier Sanchez committed Dec 5, 2024
1 parent 3945568 commit 6a1f575
Showing 1 changed file with 9 additions and 6 deletions.
15 changes: 9 additions & 6 deletions augur/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def __init__(self, config, likelihood=None, tools=None, req_params=None,
self.norm = self.par_bounds[:, 1] - self.par_bounds[:, 0]
self.x = (self.x - self.par_bounds[:, 0]) * 1/self.norm

def f(self, x, labels, pars_fid, sys_fid):
def f(self, x, labels, pars_fid, sys_fid, donorm=False):
"""
Auxiliary Function that returns a theory vector evaluated at x.
Labels are the name of the parameters x (with the same length and order)
Expand All @@ -157,6 +157,8 @@ def f(self, x, labels, pars_fid, sys_fid):
sys_fid: dict
Dictionary containing the fiducial `systematic` (required) parameters
for the likelihood.
norm: bool
If `True` it normalizes the input datavector (useful for derivatives).
Returns:
--------
f_out : np.ndarray
Expand All @@ -169,7 +171,7 @@ def f(self, x, labels, pars_fid, sys_fid):
if isinstance(x, list):
x = np.array(x)
# If we normalize the sampling we need to undo the normalization
if self.norm_step:
if donorm:
x = self.norm * x + self.par_bounds[:, 0]

if x.ndim == 1:
Expand Down Expand Up @@ -222,7 +224,7 @@ def get_derivatives(self, force=False, method='5pt_stencil', step=None):
if (self.derivatives is None) or (force):
if '5pt_stencil' in method:
self.derivatives = five_pt_stencil(lambda y: self.f(y, self.var_pars, self.pars_fid,
self.req_params),
self.req_params, donorm=self.norm_step),
self.x, h=step)
elif 'numdifftools' in method:
import numdifftools as nd
Expand All @@ -231,7 +233,8 @@ def get_derivatives(self, force=False, method='5pt_stencil', step=None):
else:
ndkwargs = {}
jacobian_calc = nd.Jacobian(lambda y: self.f(y, self.var_pars, self.pars_fid,
self.req_params),
self.req_params,
donorm=self.norm_step),
step=step,
**ndkwargs)
self.derivatives = jacobian_calc(self.x).T
Expand Down Expand Up @@ -322,8 +325,8 @@ def get_fisher_bias(self):
else:
raise ValueError('bias_params is required if no biased_dv file is passed')

self.biased_cls = self.f(_x_here, _labels_here, _pars_here, _sys_here) \
- self.data_fid
self.biased_cls = self.f(_x_here, _labels_here, _pars_here, _sys_here,
donorm=False) - self.data_fid

Bj = np.einsum('l, lm, jm', self.biased_cls, self.lk.inv_cov, self.derivatives)
bi = np.einsum('ij, j', np.linalg.inv(self.Fij), Bj)
Expand Down

0 comments on commit 6a1f575

Please sign in to comment.