Skip to content

Commit

Permalink
make stimulus_range a private configuration attribute
Browse files Browse the repository at this point in the history
  • Loading branch information
otizonaizit committed Nov 28, 2024
1 parent 34965ea commit 822c5eb
Show file tree
Hide file tree
Showing 5 changed files with 15 additions and 27 deletions.
7 changes: 5 additions & 2 deletions psignifit/_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,15 @@ class Configuration:
pool_max_blocks: int = 25
priors: Optional[Dict[str, Prior]] = dataclasses.field(default=None, hash=False)
sigmoid: Union[str, sigmoids.Sigmoid] = 'norm'
stimulus_range: Optional[Tuple[float, float]] = None
thresh_PC: float = 0.5
verbose: bool = True
width_alpha: float = 0.05
width_min: Optional[float] = None

# private attributes, used for testing and benchmarking -> don't use unless you know
# what you are doing
_stimulus_range: Optional[Tuple[float, float]] = None

# attributes, if not specified, will be initialized based on others
bounds: Optional[Dict[str, Tuple[float, float]]] = None
grid_steps: Dict[str, int] = dataclasses.field(default_factory=dict)
Expand Down Expand Up @@ -206,7 +209,7 @@ def check_sigmoid(self, value):
except KeyError:
raise PsignifitException('Invalid sigmoid name "{value}", use one of {sigmoids.ALL_SIGMOID_NAMES}')

def check_stimulus_range(self, value):
def check__stimulus_range(self, value):
if value:
try:
len_ = len(value)
Expand Down
20 changes: 7 additions & 13 deletions psignifit/psignifit.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,20 +60,19 @@ def psignifit(data: np.ndarray, conf: Optional[Configuration] = None,

levels, ntrials = data[:, 0], data[:, 2]
if conf.verbose:
_warn_common_data_mistakes(levels, ntrials, has_user_stimulus_range=conf.stimulus_range is not None,
pool_max_blocks=conf.pool_max_blocks)
_warn_common_data_mistakes(levels, ntrials, pool_max_blocks=conf.pool_max_blocks)

stimulus_range = conf.stimulus_range
stimulus_range = conf._stimulus_range
if stimulus_range is None:
stimulus_range = (levels.min(), levels.max())

width_min = conf.width_min
if width_min is None:
if conf.stimulus_range is None:
if conf._stimulus_range is None:
width_min = np.diff(np.unique(levels)).min()
else:
# For user specified stimulus range, use conservative estimate of width_min.
width_min = (conf.stimulus_range[1] - conf.stimulus_range[0]) / 100
width_min = (conf._stimulus_range[1] - conf._stimulus_range[0]) / 100

bounds = parameter_bounds(min_width=width_min, experiment_type=conf.experiment_type, stimulus_range=stimulus_range,
alpha=conf.width_alpha, nafc_choices=conf.experiment_choices)
Expand Down Expand Up @@ -127,7 +126,7 @@ def psignifit(data: np.ndarray, conf: Optional[Configuration] = None,
data=data)


def _warn_common_data_mistakes(levels, ntrials, has_user_stimulus_range, pool_max_blocks) -> None:
def _warn_common_data_mistakes(levels, ntrials, pool_max_blocks) -> None:
""" Show warnings for common mistakes.
Checks for too many blocks and too few trials.
Expand All @@ -136,7 +135,6 @@ def _warn_common_data_mistakes(levels, ntrials, has_user_stimulus_range, pool_ma
Args:
level: Array of stimulus level per block
ntrial: Array of trial numbers per block
has_user_stimulus_range: User configured the stimulus range
pool_max_blocks: Maximum number of blocks until print of pool warning.
Returns:
None
Expand All @@ -151,19 +149,15 @@ def _warn_common_data_mistakes(levels, ntrials, has_user_stimulus_range, pool_ma
"psignifit.pool_blocks(data).\n"
"Hide this warning by increasing conf.pool_max_blocks.")
# warning if many blocks were measured
if len(levels) >= 25 and not has_user_stimulus_range:
if len(levels) >= 25:
warnings.warn(f"""The data you supplied contained {len(levels)}>= 25 stimulus levels.
Did you sample adaptively?
If so please specify a range which contains the whole psychometric function in
conf.stimulus_range.
An appropriate prior prior will be then chosen. For now we use the standard
heuristic, assuming that the psychometric function is covered by the stimulus
levels,which is frequently invalid for adaptive procedures!""")
if ntrials.max() <= 5 and not has_user_stimulus_range:
if ntrials.max() <= 5:
warnings.warn("""All provided data blocks contain <= 5 trials.
Did you sample adaptively?
If so please specify a range which contains the whole psychometric function in
conf.stimulus_range.
An appropriate prior prior will be then chosen. For now we use the standard
heuristic, assuming that the psychometric function is covered by the stimulus
levels, which is frequently invalid for adaptive procedures!""")
Expand Down
8 changes: 2 additions & 6 deletions psignifit/tests/test_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,12 @@ def test_set_wrong_sigmoid():

def test_set_stimulus_range_wrong_type():
with pytest.raises(PsignifitException):
Configuration(stimulus_range=10)
Configuration(_stimulus_range=10)


def test_set_stimulus_range_wrong_length():
with pytest.raises(PsignifitException):
Configuration(stimulus_range=(1, 2, 3))
Configuration(_stimulus_range=(1, 2, 3))


def test_set_width_alpha_wrong_type():
Expand All @@ -107,7 +107,6 @@ def test_set_wrong_CI_method():

def test_warning_for_2afc_and_wrong_gamma():
sigmoid = "norm"
stim_range = [0.001, 0.2]
lambda_ = 0.0232
gamma = 0.1

Expand All @@ -116,14 +115,12 @@ def test_warning_for_2afc_and_wrong_gamma():
options['experiment_type'] = '2AFC'
options['fixed_parameters'] = {'lambda': lambda_,
'gamma': gamma}
options["stimulus_range"] = stim_range

with pytest.warns(UserWarning, match='gamma was fixed'):
Configuration(**options)

def test_warning_for_equal_asymptote_fixing_lambda_and_gamma():
sigmoid = "norm"
stim_range = [0.001, 0.2]
lambda_ = 0.2
gamma = 0.1

Expand All @@ -132,7 +129,6 @@ def test_warning_for_equal_asymptote_fixing_lambda_and_gamma():
options['experiment_type'] = 'equal asymptote'
options['fixed_parameters'] = {'lambda': lambda_,
'gamma': gamma}
options["stimulus_range"] = stim_range

with pytest.raises(PsignifitException, match='were fixed to different values'):
Configuration(**options)
Expand Down
5 changes: 0 additions & 5 deletions psignifit/tests/test_param_recovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ def test_parameter_recovery_2afc(sigmoid):
options['sigmoid'] = sigmoid # choose a cumulative Gauss as the sigmoid
options['experiment_type'] = '2AFC'
options['fixed_parameters'] = {'lambda': lambda_}
options["stimulus_range"] = stim_range

res = psignifit(data, **options)

Expand Down Expand Up @@ -64,7 +63,6 @@ def test_parameter_recovery_2afc_eta(random_state, eta):
options['sigmoid'] = sigmoid # choose a cumulative Gauss as the sigmoid
options['experiment_type'] = '2AFC'
options['fixed_parameters'] = {'lambda': lambda_}
options["stimulus_range"] = stim_range

res = psignifit(data, **options)

Expand Down Expand Up @@ -108,7 +106,6 @@ def test_parameter_recovery_fixed_params(fixed_param):
options = {}
options['sigmoid'] = sigmoid # choose a cumulative Gauss as the sigmoid
options['experiment_type'] = 'yes/no'
options["stimulus_range"] = stim_range
options['fixed_parameters'] = {}
# we fix it to a slightly off value, so we can check if stays fixed
options['fixed_parameters'][fixed_param] = sim_params[fixed_param]
Expand Down Expand Up @@ -144,7 +141,6 @@ def test_parameter_recovery_YN(sigmoid):
options['sigmoid'] = sigmoid # choose a cumulative Gauss as the sigmoid
options['experiment_type'] = 'yes/no'
options['fixed_parameters'] = {'lambda': lambda_}
options["stimulus_range"] = stim_range

res = psignifit(data, **options)

Expand Down Expand Up @@ -177,7 +173,6 @@ def test_parameter_recovery_eq_asymptote(sigmoid):
options['sigmoid'] = sigmoid # choose a cumulative Gauss as the sigmoid
options['experiment_type'] = 'equal asymptote'
options['fixed_parameters'] = {}
options["stimulus_range"] = stim_range

res = psignifit(data, **options)

Expand Down
2 changes: 1 addition & 1 deletion psignifit/tests/test_priors.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,4 @@ def func(x):

# For a fixed value, the integral should be one
norm = psignifit._priors.normalize_prior(func, (1, 1))
assert np.allclose([1], norm(1))
assert np.allclose([1], norm(1))

0 comments on commit 822c5eb

Please sign in to comment.