Skip to content

Commit

Permalink
Merge pull request #161 from pberkes/add-mean-estimate
Browse files Browse the repository at this point in the history
Add the "posterior mean" parameter estimate type
  • Loading branch information
pberkes authored Nov 27, 2024
2 parents daa73be + 58b2eb6 commit f198fac
Show file tree
Hide file tree
Showing 11 changed files with 784 additions and 85 deletions.
3 changes: 2 additions & 1 deletion psignifit/_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from . import sigmoids
from ._utils import PsignifitException
from ._typing import ExperimentType, Prior
from ._typing import EstimateType, ExperimentType, Prior


_PARAMETERS = {'threshold', 'width', 'lambda', 'gamma', 'eta'}
Expand Down Expand Up @@ -38,6 +38,7 @@ class Configuration:
beta_prior: int = 10
CI_method: str = 'percentiles'
confP: Tuple[float, float, float] = (.95, .9, .68)
estimate_type: EstimateType = 'MAP'
experiment_type: str = ExperimentType.YES_NO.value
experiment_choices: Optional[int] = None
fast_optim: bool = False
Expand Down
53 changes: 42 additions & 11 deletions psignifit/_result.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import dataclasses
import json
from typing import Any, Dict, Tuple, List, TextIO, Union
from typing import Any, Dict, Tuple, List, Optional, TextIO, Union
from pathlib import Path

import numpy as np
from numpy.typing import NDArray

from ._configuration import Configuration
from ._typing import EstimateType


class NumpyEncoder(json.JSONEncoder):
Expand All @@ -18,7 +19,8 @@ def default(self, obj):

@dataclasses.dataclass
class Result:
parameter_estimate: Dict[str, float]
parameters_estimate_MAP: Dict[str, float]
parameters_estimate_mean: Dict[str, float]
configuration: Configuration
confidence_intervals: Dict[str, List[Tuple[float, float]]]
data: NDArray[float]
Expand Down Expand Up @@ -66,8 +68,29 @@ def load_json(cls, file: Union[TextIO, str, Path], **kwargs: Any):
result_dict['data'] = np.asarray(result_dict['data'])
return cls.from_dict(result_dict)

def threshold(self, proportion_correct: np.ndarray, unscaled: bool = False, return_ci: bool = True
) -> Union[np.ndarray, List[Tuple[np.ndarray, np.ndarray]]]:
def get_parameters_estimate(self, estimate_type: Optional[EstimateType]=None):
""" Get the estimate of the parameters by type.
Args:
estimate_type: Type of estimate, either "MAP" or "mean".
If None, the value of `estimate_type` in `Result.configuration` is used instead.
Returns:
A dictionary mapping parameter names to parameter estimate.
"""
if estimate_type is None:
estimate_type = self.configuration.estimate_type

if estimate_type == 'MAP':
estimate = self.parameters_estimate_MAP
elif estimate_type == 'mean':
estimate = self.parameters_estimate_mean
else:
raise ValueError("`estimate_type` must be either 'MAP' or 'mean'")

return estimate

def threshold(self, proportion_correct: np.ndarray, unscaled: bool = False, return_ci: bool = True,
estimate_type: Optional[EstimateType]=None) -> Union[np.ndarray, List[Tuple[np.ndarray, np.ndarray]]]:
""" Threshold stimulus value and confidence interval for a different proportion correct cutoff.
The CIs you may obtain from this are calculated based on the confidence
Expand All @@ -83,6 +106,8 @@ def threshold(self, proportion_correct: np.ndarray, unscaled: bool = False, retu
sigmoid or for the one scaled by lambda and gamma.
By default this function returns the one for the scaled sigmoid.
return_ci: If the confidence bounds should be returned along with the stimulus value
estimate_type: Type of estimate, either "MAP" or "mean".
If None, the value of `estimate_type` in `Result.configuration` is used instead.
Returns:
thresholds: stimulus values for all provided proportion_correct (if return_ci=False)
(thresholds, ci): stimulus values along with confidence intervals
Expand All @@ -91,12 +116,13 @@ def threshold(self, proportion_correct: np.ndarray, unscaled: bool = False, retu
proportion_correct = np.asarray(proportion_correct)
sigmoid = self.configuration.make_sigmoid()

estimate = self.get_parameters_estimate(estimate_type)
if unscaled: # set asymptotes to 0 for everything.
lambd, gamma = 0, 0
else:
lambd, gamma = self.parameter_estimate['lambda'], self.parameter_estimate['gamma']
new_threshold = sigmoid.inverse(proportion_correct, self.parameter_estimate['threshold'],
self.parameter_estimate['width'], gamma, lambd)
lambd, gamma = estimate['lambda'], estimate['gamma']
new_threshold = sigmoid.inverse(proportion_correct, estimate['threshold'],
estimate['width'], gamma, lambd)
if not return_ci:
return new_threshold

Expand All @@ -113,29 +139,34 @@ def threshold(self, proportion_correct: np.ndarray, unscaled: bool = False, retu

return new_threshold, new_ci

def slope(self, stimulus_level: np.ndarray) -> np.ndarray:
def slope(self, stimulus_level: np.ndarray, estimate_type: Optional[EstimateType]=None) -> np.ndarray:
""" Slope of the psychometric function at a given stimulus levels.
Args:
stimulus_level: stimulus levels at where to evaluate the slope.
estimate_type: Type of estimate, either "MAP" or "mean".
If None, the value of `estimate_type` in `Result.configuration` is used instead.
Returns:
Slopes of the psychometric function at the stimulus levels.
"""
stimulus_level, param = np.asarray(stimulus_level), self.parameter_estimate
stimulus_level, param = np.asarray(stimulus_level), self.get_parameters_estimate(estimate_type)
sigmoid = self.configuration.make_sigmoid()
return sigmoid.slope(stimulus_level, param['threshold'], param['width'], param['gamma'], param['lambda'])

def slope_at_proportion_correct(self, proportion_correct: np.ndarray, unscaled: bool = False):
def slope_at_proportion_correct(self, proportion_correct: np.ndarray, unscaled: bool = False,
estimate_type: Optional[EstimateType]=None):
""" Slope of the psychometric function at a given performance level in proportion correct.
Args:
proportion_correct: proportion correct at the threshold you want to calculate
unscaled: If the proportion correct you provide are for the unscaled
sigmoid or for the one scaled by lambda and gamma.
By default this function returns the one for the scaled sigmoid.
estimate_type: Type of estimate, either "MAP" or "mean".
If None, the value of `estimate_type` in `Result.configuration` is used instead.
Returns:
Slopes of the psychometric function at the stimulus levels which
correspond to the given proportion correct.
"""
stimulus_levels = self.threshold(proportion_correct, unscaled, return_ci=False)
stimulus_levels = self.threshold(proportion_correct, unscaled, return_ci=False, estimate_type=estimate_type)
return self.slope(stimulus_levels)
4 changes: 3 additions & 1 deletion psignifit/_typing.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import enum
from typing import Callable, Dict, Tuple, Optional
from typing import Callable, Dict, Literal, Tuple, Optional

import numpy as np

Expand Down Expand Up @@ -33,3 +33,5 @@ class ParameterGrid(TypedDict):


Prior = Callable[[np.ndarray], np.ndarray]

EstimateType = Literal['MAP', 'mean']
13 changes: 13 additions & 0 deletions psignifit/demos/demo_002.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,19 @@
# may provide a reference to your own sigmoid, which has to be
# a subclass of :class:`psignifit.sigmoids.Sigmoid`.

# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Estimation Type
# ---------------

# Set the default parameters estimate type to use when looking at the results of a fit.
# 'mean' The posterior mean. In a Bayesian framework, this is sometimes considered
# a more suitable estimate.
# 'MAP' The MAP (Maximum-A-Posteriori) estimate is the mode of the posterior.
# If not specified, the default estimate type is set to 'MAP`

config['estimate_type'] = 'MAP'
config['estimate_type'] = 'mean'

# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Optimization Steps
# ------------------
Expand Down
480 changes: 480 additions & 0 deletions psignifit/demos/parameters_estimates.ipynb

Large diffs are not rendered by default.

25 changes: 19 additions & 6 deletions psignifit/psignifit.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,10 @@ def psignifit(data: np.ndarray, conf: Optional[Configuration] = None,
priors = setup_priors(custom_priors=conf.priors, bounds=bounds,
stimulus_range=stimulus_range, width_min=width_min, width_alpha=conf.width_alpha,
beta_prior=conf.beta_prior, threshold_prop_correct=conf.thresh_PC)
fit_dict, posteriors, grid = _fit_parameters(data, bounds, priors, sigmoid, conf.steps_moving_bounds,
conf.max_bound_value, conf.grid_steps)
estimate_MAP_dict, estimate_mean_dict, posteriors, grid = _fit_parameters(
data, bounds, priors, sigmoid,
conf.steps_moving_bounds, conf.max_bound_value, conf.grid_steps,
)

grid_values = [grid_value for _, grid_value in sorted(grid.items())]
intervals = confidence_intervals(posteriors, grid_values, conf.confP, conf.CI_method)
Expand All @@ -100,7 +102,8 @@ def psignifit(data: np.ndarray, conf: Optional[Configuration] = None,
_warn_marginal_sanity_checks(marginals)

if conf.experiment_type == 'equal asymptote':
fit_dict['gamma'] = fit_dict['lambda'].copy()
estimate_MAP_dict['gamma'] = estimate_MAP_dict['lambda'].copy()
estimate_mean_dict['gamma'] = estimate_mean_dict['lambda'].copy()
grid['gamma'] = grid['lambda'].copy()
priors['gamma'] = priors['lambda']
marginals['gamma'] = marginals['lambda'].copy()
Expand All @@ -113,7 +116,8 @@ def psignifit(data: np.ndarray, conf: Optional[Configuration] = None,
debug_dict['priors'] = priors
debug_dict['bounds'] = bounds

return Result(parameter_estimate=fit_dict,
return Result(parameters_estimate_MAP=estimate_MAP_dict,
parameters_estimate_mean=estimate_mean_dict,
configuration=conf,
confidence_intervals=intervals_dict,
parameter_values=grid,
Expand Down Expand Up @@ -235,11 +239,20 @@ def _fit_parameters(data: np.ndarray, bounds: ParameterBounds,
grid = parameter_grid(tighter_bounds, grid_steps)
posteriors, grid_max = posterior_grid(data, sigmoid=sigmoid, priors=priors, grid=grid)

# Estimate parameters as the mean of the posterior
estimate_mean_dict = {}
params_values = [grid[p] for p in sorted(grid.keys())]
params_grid = np.meshgrid(*params_values, indexing='ij')
for idx, p in enumerate(sorted(grid.keys())):
estimate_mean_dict[p] = (params_grid[idx] * posteriors).sum()

# Estimate parameters as the mode of the posterior (MAP)
fixed_param = {}
for parm_name, parm_values in grid.items():
if len(parm_values) == 1:
fixed_param[parm_name] = parm_values[0]
# Compute MAP estimate of parameters on the joint posterior
fit_dict = maximize_posterior(data, param_init=grid_max, param_fixed=fixed_param, sigmoid=sigmoid, priors=priors)
estimate_MAP_dict = maximize_posterior(data, param_init=grid_max, param_fixed=fixed_param,
sigmoid=sigmoid, priors=priors)

return fit_dict, posteriors, grid
return estimate_MAP_dict, estimate_mean_dict, posteriors, grid
Loading

0 comments on commit f198fac

Please sign in to comment.