-
Notifications
You must be signed in to change notification settings - Fork 81
/
test_infer.py
221 lines (181 loc) · 7.15 KB
/
test_infer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
import pytest
import pyhf
import numpy as np
import scipy.stats
@pytest.fixture(scope='module')
def hypotest_args():
pdf = pyhf.simplemodels.hepdata_like(
signal_data=[12.0, 11.0], bkg_data=[50.0, 52.0], bkg_uncerts=[3.0, 7.0]
)
mu_test = 1.0
data = [51, 48] + pdf.config.auxdata
return mu_test, data, pdf
def check_uniform_type(in_list):
return all(
[isinstance(item, type(pyhf.tensorlib.astensor(item))) for item in in_list]
)
def test_mle_fit_default(tmpdir, hypotest_args):
"""
Check that the default return structure of pyhf.infer.mle.fit is as expected
"""
tb = pyhf.tensorlib
_, data, model = hypotest_args
kwargs = {}
result = pyhf.infer.mle.fit(data, model, **kwargs)
# bestfit_pars
assert isinstance(result, type(tb.astensor(result)))
assert pyhf.tensorlib.shape(result) == (model.config.npars,)
def test_mle_fit_return_fitted_val(tmpdir, hypotest_args):
"""
Check that the return structure of pyhf.infer.mle.fit with the
return_fitted_val keyword arg is as expected
"""
tb = pyhf.tensorlib
_, data, model = hypotest_args
kwargs = {"return_fitted_val": True}
result = pyhf.infer.mle.fit(data, model, **kwargs)
# bestfit_pars, twice_nll
assert pyhf.tensorlib.shape(result[0]) == (model.config.npars,)
assert isinstance(result[0], type(tb.astensor(result[0])))
assert pyhf.tensorlib.shape(result[1]) == ()
def test_hypotest_default(tmpdir, hypotest_args):
"""
Check that the default return structure of pyhf.infer.hypotest is as expected
"""
tb = pyhf.tensorlib
kwargs = {}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs
assert pyhf.tensorlib.shape(result) == ()
assert isinstance(result, type(tb.astensor(result)))
def test_hypotest_return_tail_probs(tmpdir, hypotest_args):
"""
Check that the return structure of pyhf.infer.hypotest with the
return_tail_probs keyword arg is as expected
"""
tb = pyhf.tensorlib
kwargs = {'return_tail_probs': True}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs, [CL_sb, CL_b]
assert len(list(result)) == 2
assert isinstance(result[0], type(tb.astensor(result[0])))
assert len(result[1]) == 2
assert check_uniform_type(result[1])
def test_hypotest_return_expected(tmpdir, hypotest_args):
"""
Check that the return structure of pyhf.infer.hypotest with the
additon of the return_expected keyword arg is as expected
"""
tb = pyhf.tensorlib
kwargs = {'return_tail_probs': True, 'return_expected': True}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs, [CLsb, CLb], CLs_exp
assert len(list(result)) == 3
assert isinstance(result[0], type(tb.astensor(result[0])))
assert len(result[1]) == 2
assert check_uniform_type(result[1])
assert isinstance(result[2], type(tb.astensor(result[2])))
def test_hypotest_return_expected_set(tmpdir, hypotest_args):
"""
Check that the return structure of pyhf.infer.hypotest with the
additon of the return_expected_set keyword arg is as expected
"""
tb = pyhf.tensorlib
kwargs = {
'return_tail_probs': True,
'return_expected': True,
'return_expected_set': True,
}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs, [CLsb, CLb], CLs_exp, CLs_exp @[-2, -1, 0, +1, +2]sigma
assert len(list(result)) == 4
assert isinstance(result[0], type(tb.astensor(result[0])))
assert len(result[1]) == 2
assert check_uniform_type(result[1])
assert isinstance(result[2], type(tb.astensor(result[2])))
assert len(result[3]) == 5
assert check_uniform_type(result[3])
def test_inferapi_pyhf_independence():
"""
pyhf.infer should eventually be factored out so it should be
infependent from pyhf internals. This is testing that
a much simpler model still can run through pyhf.infer.hypotest
"""
from pyhf import get_backend
class _NonPyhfConfig(object):
def __init__(self):
self.poi_index = 0
self.npars = 2
def suggested_init(self):
return [1.0, 1.0]
def suggested_bounds(self):
return [[0.0, 10.0], [0.0, 10.0]]
class NonPyhfModel(object):
def __init__(self, spec):
self.sig, self.nominal, self.uncert = spec
self.factor = (self.nominal / self.uncert) ** 2
self.aux = 1.0 * self.factor
self.config = _NonPyhfConfig()
def _make_main_pdf(self, pars):
mu, gamma = pars
expected_main = gamma * self.nominal + mu * self.sig
return pyhf.probability.Poisson(expected_main)
def _make_constraint_pdf(self, pars):
mu, gamma = pars
return pyhf.probability.Poisson(gamma * self.factor)
def expected_data(self, pars, include_auxdata=True):
tensorlib, _ = get_backend()
expected_main = tensorlib.astensor(
[self._make_main_pdf(pars).expected_data()]
)
aux_data = tensorlib.astensor(
[self._make_constraint_pdf(pars).expected_data()]
)
if not include_auxdata:
return expected_main
return tensorlib.concatenate([expected_main, aux_data])
def logpdf(self, pars, data):
tensorlib, _ = get_backend()
maindata, auxdata = data
main = self._make_main_pdf(pars).log_prob(maindata)
constraint = self._make_constraint_pdf(pars).log_prob(auxdata)
return tensorlib.astensor([main + constraint])
model = NonPyhfModel([5, 50, 7])
cls = pyhf.infer.hypotest(
1.0, model.expected_data(model.config.suggested_init()), model
)
assert np.isclose(cls, 0.7267836451638846)
@pytest.mark.parametrize("qtilde", [True, False])
def test_calculator_distributions_without_teststatistic(qtilde):
calc = pyhf.infer.AsymptoticCalculator(
[0.0], {}, [1.0], [(0.0, 10.0)], qtilde=qtilde
)
with pytest.raises(RuntimeError):
calc.distributions(1.0)
@pytest.mark.parametrize(
"nsigma,expected_pval",
[
# values tabulated using ROOT.RooStats.SignificanceToPValue
# they are consistent with relative difference < 1e-14 with scipy.stats.norm.sf
(5, 2.866515718791945e-07),
(6, 9.865876450377018e-10),
(7, 1.279812543885835e-12),
(8, 6.220960574271829e-16),
(9, 1.1285884059538408e-19),
],
)
def test_asymptotic_dist_low_pvalues(backend, nsigma, expected_pval):
rtol = 1e-8
if backend[0].precision != '64b':
rtol = 1e-5
dist = pyhf.infer.calculators.AsymptoticTestStatDistribution(0)
assert np.isclose(np.array(dist.pvalue(nsigma)), expected_pval, rtol=rtol, atol=0)
def test_significance_to_pvalue_roundtrip(backend):
rtol = 1e-15
if backend[0].precision != '64b':
rtol = 1e-6
sigma = np.arange(0, 10, 0.1)
dist = pyhf.infer.calculators.AsymptoticTestStatDistribution(0)
pvalue = dist.pvalue(pyhf.tensorlib.astensor(sigma))
back_to_sigma = -scipy.stats.norm.ppf(np.array(pvalue))
assert np.allclose(sigma, back_to_sigma, atol=0, rtol=rtol)