From bf10783aa3577d1583b1f2cfd48faf697df88d64 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 26 Aug 2020 13:26:26 -0500 Subject: [PATCH 01/12] Apply new black to src --- src/pyhf/constraints.py | 8 ++++---- src/pyhf/modifiers/histosys.py | 4 ++-- src/pyhf/modifiers/lumi.py | 4 ++-- src/pyhf/modifiers/normfactor.py | 4 ++-- src/pyhf/modifiers/normsys.py | 4 ++-- src/pyhf/modifiers/shapefactor.py | 4 ++-- src/pyhf/modifiers/shapesys.py | 4 ++-- src/pyhf/tensor/common.py | 4 ++-- 8 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/pyhf/constraints.py b/src/pyhf/constraints.py index b7663c6673..81091c2419 100644 --- a/src/pyhf/constraints.py +++ b/src/pyhf/constraints.py @@ -82,10 +82,10 @@ def _precompute(self): self.access_field = tensorlib.astensor(self._access_field, dtype='int') def has_pdf(self): - ''' + """ Returns: flag (`bool`): Whether the model has a Gaussian Constraint - ''' + """ return bool(self.param_viewer.index_selection) def make_pdf(self, pars): @@ -213,10 +213,10 @@ def _precompute(self): self.batched_factors = tensorlib.astensor(self._batched_factors) def has_pdf(self): - ''' + """ Returns: flag (`bool`): Whether the model has a Gaussian Constraint - ''' + """ return bool(self.param_viewer.index_selection) def make_pdf(self, pars): diff --git a/src/pyhf/modifiers/histosys.py b/src/pyhf/modifiers/histosys.py index fa8ebef183..bc802fc462 100644 --- a/src/pyhf/modifiers/histosys.py +++ b/src/pyhf/modifiers/histosys.py @@ -80,10 +80,10 @@ def _precompute(self): ) def apply(self, pars): - ''' + """ Returns: modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) - ''' + """ if not self.param_viewer.index_selection: return diff --git a/src/pyhf/modifiers/lumi.py b/src/pyhf/modifiers/lumi.py index 9bd8fc8131..9235bc4a3c 100644 --- a/src/pyhf/modifiers/lumi.py +++ b/src/pyhf/modifiers/lumi.py @@ -57,10 +57,10 @@ def _precompute(self): self.lumi_default = tensorlib.ones(self.lumi_mask.shape) def apply(self, pars): - ''' + """ Returns: modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) - ''' + """ if not self.param_viewer.index_selection: return diff --git a/src/pyhf/modifiers/normfactor.py b/src/pyhf/modifiers/normfactor.py index 15770d7739..0f0552de5a 100644 --- a/src/pyhf/modifiers/normfactor.py +++ b/src/pyhf/modifiers/normfactor.py @@ -58,10 +58,10 @@ def _precompute(self): self.normfactor_default = tensorlib.ones(self.normfactor_mask.shape) def apply(self, pars): - ''' + """ Returns: modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) - ''' + """ if not self.param_viewer.index_selection: return tensorlib, _ = get_backend() diff --git a/src/pyhf/modifiers/normsys.py b/src/pyhf/modifiers/normsys.py index 601254fd15..4c454c7155 100644 --- a/src/pyhf/modifiers/normsys.py +++ b/src/pyhf/modifiers/normsys.py @@ -81,10 +81,10 @@ def _precompute(self): ) def apply(self, pars): - ''' + """ Returns: modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) - ''' + """ if not self.param_viewer.index_selection: return diff --git a/src/pyhf/modifiers/shapefactor.py b/src/pyhf/modifiers/shapefactor.py index 29bf1e2798..9469819218 100644 --- a/src/pyhf/modifiers/shapefactor.py +++ b/src/pyhf/modifiers/shapefactor.py @@ -131,10 +131,10 @@ def _precompute(self): self.sample_ones = tensorlib.ones(tensorlib.shape(self.shapefactor_mask)[1]) def apply(self, pars): - ''' + """ Returns: modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) - ''' + """ if not self.param_viewer.index_selection: return diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index 956f375a50..16a7d7dacd 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -154,10 +154,10 @@ def finalize(self, pdfconfig): pdfconfig.param_set(pname).auxdata = default_backend.tolist(factors) def apply(self, pars): - ''' + """ Returns: modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) - ''' + """ tensorlib, _ = get_backend() if not self.param_viewer.index_selection: return diff --git a/src/pyhf/tensor/common.py b/src/pyhf/tensor/common.py index 838aadec59..a300ef8851 100644 --- a/src/pyhf/tensor/common.py +++ b/src/pyhf/tensor/common.py @@ -74,7 +74,7 @@ def _tensorviewer_from_slices(target_slices, names, batch_size): def _tensorviewer_from_sizes(sizes, names, batch_size): - ''' + """ Creates a _Tensorviewer based on tensor sizes. the TV will be able to stitch together data with @@ -83,7 +83,7 @@ def _tensorviewer_from_sizes(sizes, names, batch_size): tv.stitch([foo[slice1],foo[slice2],foo[slice3]) and split them again accordingly. - ''' + """ target_slices = [] start = 0 for sz in sizes: From 0e50cd18a55bd4c6d24bc0dc216e5a6fab6eb0b5 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 26 Aug 2020 13:29:53 -0500 Subject: [PATCH 02/12] Apply black to tests --- tests/conftest.py | 4 +- tests/test_backend_consistency.py | 6 +- tests/test_combined_modifiers.py | 30 ++++++++-- tests/test_infer.py | 4 +- tests/test_optim.py | 12 +++- tests/test_patchset.py | 5 +- tests/test_pdf.py | 5 +- tests/test_schema.py | 3 +- tests/test_scripts.py | 12 ++-- tests/test_tensor.py | 94 +++++++++++++++++++------------ tests/test_tensorviewer.py | 18 +++++- 11 files changed, 133 insertions(+), 60 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f090cab60d..e9047008de 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -144,11 +144,11 @@ def interpcode(request): @pytest.fixture(scope='function') def datadir(tmpdir, request): - ''' + """ Fixture responsible for searching a folder with the same name of test module and, if available, moving all contents to a temporary directory so tests can use them freely. - ''' + """ # this gets the module name (e.g. /path/to/pyhf/tests/test_schema.py) # and then gets the directory by removing the suffix (e.g. /path/to/pyhf/tests/test_schema) test_dir = pathlib.Path(request.module.__file__).with_suffix('') diff --git a/tests/test_backend_consistency.py b/tests/test_backend_consistency.py index e0ea595214..654c591d5e 100644 --- a/tests/test_backend_consistency.py +++ b/tests/test_backend_consistency.py @@ -115,7 +115,11 @@ def test_hypotest_qmu_tilde( pyhf.set_backend(backend) qmu_tilde = pyhf.infer.test_statistics.qmu_tilde( - 1.0, data, pdf, pdf.config.suggested_init(), pdf.config.suggested_bounds(), + 1.0, + data, + pdf, + pdf.config.suggested_init(), + pdf.config.suggested_bounds(), ) test_statistic.append(qmu_tilde) diff --git a/tests/test_combined_modifiers.py b/tests/test_combined_modifiers.py index c8dde543db..dadc3109e0 100644 --- a/tests/test_combined_modifiers.py +++ b/tests/test_combined_modifiers.py @@ -396,7 +396,10 @@ def test_shapesys(backend): par_map={ 'dummy1': { 'paramset': paramset( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, @@ -424,7 +427,10 @@ def test_shapesys(backend): }, 'dummy2': { 'paramset': paramset( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(4, 5), }, @@ -495,13 +501,19 @@ def test_normfactor(backend): par_map={ 'mu1': { 'paramset': unconstrained( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, 'mu2': { 'paramset': unconstrained( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(1, 2), }, @@ -575,7 +587,10 @@ def test_shapesys_zero(backend): par_map={ 'SigXsecOverSM': { 'paramset': paramset( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, @@ -669,7 +684,10 @@ def test_shapefactor(backend): par_map={ 'shapefac1': { 'paramset': unconstrained( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, diff --git a/tests/test_infer.py b/tests/test_infer.py index a734dce93d..527ec60f1c 100644 --- a/tests/test_infer.py +++ b/tests/test_infer.py @@ -120,11 +120,11 @@ def test_hypotest_return_expected_set(tmpdir, hypotest_args): def test_inferapi_pyhf_independence(): - ''' + """ pyhf.infer should eventually be factored out so it should be infependent from pyhf internals. This is testing that a much simpler model still can run through pyhf.infer.hypotest - ''' + """ from pyhf import get_backend class _NonPyhfConfig(object): diff --git a/tests/test_optim.py b/tests/test_optim.py index 2850a26686..f0e0464b8c 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -28,7 +28,9 @@ def rosen(x): @pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch']) @pytest.mark.parametrize( - 'precision', ['32b', '64b'], ids=['32b', '64b'], + 'precision', + ['32b', '64b'], + ids=['32b', '64b'], ) @pytest.mark.parametrize( 'tensorlib', @@ -141,7 +143,13 @@ def test_optimizer_mixin_extra_kwargs(optimizer): @pytest.mark.parametrize( 'backend,backend_new', itertools.permutations( - [('numpy', False), ('pytorch', True), ('tensorflow', True), ('jax', True),], 2 + [ + ('numpy', False), + ('pytorch', True), + ('tensorflow', True), + ('jax', True), + ], + 2, ), ids=lambda pair: f'{pair[0]}', ) diff --git a/tests/test_patchset.py b/tests/test_patchset.py index afe5dacb82..ff21b2d9f4 100644 --- a/tests/test_patchset.py +++ b/tests/test_patchset.py @@ -25,7 +25,10 @@ def patch(): @pytest.mark.parametrize( 'patchset_file', - ['patchset_bad_empty_patches.json', 'patchset_bad_no_version.json',], + [ + 'patchset_bad_empty_patches.json', + 'patchset_bad_no_version.json', + ], ) def test_patchset_invalid_spec(datadir, patchset_file): patchsetspec = json.load(open(datadir.join(patchset_file))) diff --git a/tests/test_pdf.py b/tests/test_pdf.py index 1e47cfdb2b..d74f62af79 100644 --- a/tests/test_pdf.py +++ b/tests/test_pdf.py @@ -753,7 +753,10 @@ def test_sample_wrong_bins(): [ ( None, - {'normsys': {'interpcode': 'code4'}, 'histosys': {'interpcode': 'code4p'},}, + { + 'normsys': {'interpcode': 'code4'}, + 'histosys': {'interpcode': 'code4p'}, + }, ) ], ) diff --git a/tests/test_schema.py b/tests/test_schema.py index 94af25db1c..7f09a30daf 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -420,7 +420,8 @@ def test_jsonpatch_fail(patch): @pytest.mark.parametrize( - 'patchset_file', ['patchset_good.json'], + 'patchset_file', + ['patchset_good.json'], ) def test_patchset(datadir, patchset_file): patchset = json.load(open(datadir.join(patchset_file))) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 40105d72d2..b6ef1e8745 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -97,7 +97,8 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): @pytest.mark.parametrize( - "backend", ["numpy", "tensorflow", "pytorch", "jax"], + "backend", + ["numpy", "tensorflow", "pytorch", "jax"], ) def test_cls_backend_option(tmpdir, script_runner, backend): temp = tmpdir.join("parsed_output.json") @@ -240,7 +241,8 @@ def test_testpoi(tmpdir, script_runner): 'optimizer', ['scipy', 'minuit', 'scipy_optimizer', 'minuit_optimizer'] ) @pytest.mark.parametrize( - 'opts,success', [(['maxiter=1000'], True), (['maxiter=10'], False)], + 'opts,success', + [(['maxiter=1000'], True), (['maxiter=10'], False)], ) def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): temp = tmpdir.join("parsed_output.json") @@ -306,8 +308,10 @@ def test_prune(tmpdir, script_runner): ) ret = script_runner.run(*shlex.split(command)) - command = 'pyhf prune -m staterror_channel1 --measurement GammaExample {0:s}'.format( - temp.strpath + command = ( + 'pyhf prune -m staterror_channel1 --measurement GammaExample {0:s}'.format( + temp.strpath + ) ) ret = script_runner.run(*shlex.split(command)) assert ret.success diff --git a/tests/test_tensor.py b/tests/test_tensor.py index c493f43b6f..8cbfdead76 100644 --- a/tests/test_tensor.py +++ b/tests/test_tensor.py @@ -77,13 +77,16 @@ def test_complex_tensor_ops(backend): 1, 1, ] - assert tb.tolist( - tb.where( - tb.astensor([1, 0, 1], dtype="bool"), - tb.astensor([1, 1, 1]), - tb.astensor([2, 2, 2]), + assert ( + tb.tolist( + tb.where( + tb.astensor([1, 0, 1], dtype="bool"), + tb.astensor([1, 1, 1]), + tb.astensor([2, 2, 2]), + ) ) - ) == [1, 2, 1] + == [1, 2, 1] + ) def test_ones(backend): @@ -106,30 +109,39 @@ def test_zeros(backend): def test_broadcasting(backend): tb = pyhf.tensorlib - assert list( - map( - tb.tolist, - tb.simple_broadcast( - tb.astensor([1, 1, 1]), tb.astensor([2]), tb.astensor([3, 3, 3]) - ), + assert ( + list( + map( + tb.tolist, + tb.simple_broadcast( + tb.astensor([1, 1, 1]), tb.astensor([2]), tb.astensor([3, 3, 3]) + ), + ) ) - ) == [[1, 1, 1], [2, 2, 2], [3, 3, 3]] - assert list( - map( - tb.tolist, - tb.simple_broadcast( - tb.astensor(1), tb.astensor([2, 3, 4]), tb.astensor([5, 6, 7]) - ), + == [[1, 1, 1], [2, 2, 2], [3, 3, 3]] + ) + assert ( + list( + map( + tb.tolist, + tb.simple_broadcast( + tb.astensor(1), tb.astensor([2, 3, 4]), tb.astensor([5, 6, 7]) + ), + ) ) - ) == [[1, 1, 1], [2, 3, 4], [5, 6, 7]] - assert list( - map( - tb.tolist, - tb.simple_broadcast( - tb.astensor([1]), tb.astensor([2, 3, 4]), tb.astensor([5, 6, 7]) - ), + == [[1, 1, 1], [2, 3, 4], [5, 6, 7]] + ) + assert ( + list( + map( + tb.tolist, + tb.simple_broadcast( + tb.astensor([1]), tb.astensor([2, 3, 4]), tb.astensor([5, 6, 7]) + ), + ) ) - ) == [[1, 1, 1], [2, 3, 4], [5, 6, 7]] + == [[1, 1, 1], [2, 3, 4], [5, 6, 7]] + ) with pytest.raises(Exception): tb.simple_broadcast( tb.astensor([1]), tb.astensor([2, 3]), tb.astensor([5, 6, 7]) @@ -231,18 +243,26 @@ def test_pdf_calculations(backend): def test_boolean_mask(backend): tb = pyhf.tensorlib - assert tb.tolist( - tb.boolean_mask( - tb.astensor([1, 2, 3, 4, 5, 6]), - tb.astensor([True, True, False, True, False, False], dtype='bool'), + assert ( + tb.tolist( + tb.boolean_mask( + tb.astensor([1, 2, 3, 4, 5, 6]), + tb.astensor([True, True, False, True, False, False], dtype='bool'), + ) ) - ) == [1, 2, 4] - assert tb.tolist( - tb.boolean_mask( - tb.astensor([[1, 2], [3, 4], [5, 6]]), - tb.astensor([[True, True], [False, True], [False, False]], dtype='bool'), + == [1, 2, 4] + ) + assert ( + tb.tolist( + tb.boolean_mask( + tb.astensor([[1, 2], [3, 4], [5, 6]]), + tb.astensor( + [[True, True], [False, True], [False, False]], dtype='bool' + ), + ) ) - ) == [1, 2, 4] + == [1, 2, 4] + ) def test_tensor_tile(backend): diff --git a/tests/test_tensorviewer.py b/tests/test_tensorviewer.py index 789e71f6af..1c1dcc1e64 100644 --- a/tests/test_tensorviewer.py +++ b/tests/test_tensorviewer.py @@ -4,7 +4,11 @@ def test_tensorviewer(backend): tb, _ = backend tv = _TensorViewer( - [tb.astensor([0, 4, 5]), tb.astensor([1, 2, 3]), tb.astensor([6]),], + [ + tb.astensor([0, 4, 5]), + tb.astensor([1, 2, 3]), + tb.astensor([6]), + ], names=['zzz', 'aaa', 'x'], ) @@ -29,11 +33,19 @@ def test_tensorviewer(backend): assert a == [[0, 40, 50], [10, 20, 30], [60]] subviewer = _TensorViewer( - [tb.astensor([0]), tb.astensor([1, 2, 3]),], names=['x', 'aaa'] + [ + tb.astensor([0]), + tb.astensor([1, 2, 3]), + ], + names=['x', 'aaa'], ) assert tb.tolist(subviewer.stitch(tv.split(data, ['x', 'aaa']))) == [60, 10, 20, 30] subviewer = _TensorViewer( - [tb.astensor([0, 1, 2]), tb.astensor([3]),], names=['aaa', 'x'] + [ + tb.astensor([0, 1, 2]), + tb.astensor([3]), + ], + names=['aaa', 'x'], ) assert tb.tolist(subviewer.stitch(tv.split(data, ['aaa', 'x']))) == [10, 20, 30, 60] From 0193627fec5a59c52f754b28cdff15e7751f3421 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 26 Aug 2020 13:30:50 -0500 Subject: [PATCH 03/12] Reapply Black to src --- src/pyhf/cli/patchset.py | 8 ++++++-- src/pyhf/cli/spec.py | 6 +++++- src/pyhf/optimize/mixins.py | 8 +++++++- src/pyhf/parameters/paramview.py | 13 +++++++++++-- src/pyhf/pdf.py | 7 ++++++- src/pyhf/tensor/tensorflow_backend.py | 3 ++- src/pyhf/writexml.py | 3 ++- 7 files changed, 39 insertions(+), 9 deletions(-) diff --git a/src/pyhf/cli/patchset.py b/src/pyhf/cli/patchset.py index 33c349a113..a586b3db17 100644 --- a/src/pyhf/cli/patchset.py +++ b/src/pyhf/cli/patchset.py @@ -19,7 +19,9 @@ def cli(): @cli.command() @click.argument('patchset', default='-') @click.option( - '--name', help='The name of the patch to extract.', default=None, + '--name', + help='The name of the patch to extract.', + default=None, ) @click.option( '--output-file', @@ -65,7 +67,9 @@ def extract(patchset, name, output_file, with_metadata): @click.argument('background-only', default='-') @click.argument('patchset', default='-') @click.option( - '--name', help='The name of the patch to extract.', default=None, + '--name', + help='The name of the patch to extract.', + default=None, ) @click.option( '--output-file', diff --git a/src/pyhf/cli/spec.py b/src/pyhf/cli/spec.py index eb5b4d4671..0e45bb9901 100644 --- a/src/pyhf/cli/spec.py +++ b/src/pyhf/cli/spec.py @@ -346,7 +346,11 @@ def digest(workspace, algorithm, output_json): } if output_json: - output = json.dumps(digests, indent=4, sort_keys=True,) + output = json.dumps( + digests, + indent=4, + sort_keys=True, + ) else: output = '\n'.join( f"{hash_alg}:{digest}" for hash_alg, digest in digests.items() diff --git a/src/pyhf/optimize/mixins.py b/src/pyhf/optimize/mixins.py index daff31f2ba..3497addfbf 100644 --- a/src/pyhf/optimize/mixins.py +++ b/src/pyhf/optimize/mixins.py @@ -29,7 +29,13 @@ def __init__(self, **kwargs): ) def _internal_minimize( - self, func, x0, do_grad=False, bounds=None, fixed_vals=None, options={}, + self, + func, + x0, + do_grad=False, + bounds=None, + fixed_vals=None, + options={}, ): minimizer = self._get_minimizer( diff --git a/src/pyhf/parameters/paramview.py b/src/pyhf/parameters/paramview.py index 2210b632c9..5b33815106 100644 --- a/src/pyhf/parameters/paramview.py +++ b/src/pyhf/parameters/paramview.py @@ -9,7 +9,14 @@ def _tensorviewer_from_parmap(par_map, batch_size): names, slices, _ = list( zip( *sorted( - [(k, v['slice'], v['slice'].start,) for k, v in par_map.items()], + [ + ( + k, + v['slice'], + v['slice'].start, + ) + for k, v in par_map.items() + ], key=lambda x: x[2], ) ) @@ -18,7 +25,9 @@ def _tensorviewer_from_parmap(par_map, batch_size): def extract_index_access( - baseviewer, subviewer, indices, + baseviewer, + subviewer, + indices, ): tensorlib, _ = get_backend() diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index f22205c8fb..c5e41256e4 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -99,7 +99,12 @@ def _paramset_requirements_from_modelspec(spec, channel_nbins): def _nominal_and_modifiers_from_spec(config, spec): default_data_makers = { - 'histosys': lambda: {'hi_data': [], 'lo_data': [], 'nom_data': [], 'mask': [],}, + 'histosys': lambda: { + 'hi_data': [], + 'lo_data': [], + 'nom_data': [], + 'mask': [], + }, 'lumi': lambda: {'mask': []}, 'normsys': lambda: {'hi': [], 'lo': [], 'nom_data': [], 'mask': []}, 'normfactor': lambda: {'mask': []}, diff --git a/src/pyhf/tensor/tensorflow_backend.py b/src/pyhf/tensor/tensorflow_backend.py index 6725bbaed1..ab9431a80c 100644 --- a/src/pyhf/tensor/tensorflow_backend.py +++ b/src/pyhf/tensor/tensorflow_backend.py @@ -460,7 +460,8 @@ def normal_cdf(self, x, mu=0.0, sigma=1): TensorFlow Tensor: The CDF """ normal = tfp.distributions.Normal( - self.astensor(mu, dtype='float'), self.astensor(sigma, dtype='float'), + self.astensor(mu, dtype='float'), + self.astensor(sigma, dtype='float'), ) return normal.cdf(x) diff --git a/src/pyhf/writexml.py b/src/pyhf/writexml.py index 3d8c10c071..bdb64bc14b 100644 --- a/src/pyhf/writexml.py +++ b/src/pyhf/writexml.py @@ -245,7 +245,8 @@ def writexml(spec, specdir, data_rootdir, resultprefix): Path(specdir).parent.joinpath('HistFactorySchema.dtd'), ) combination = ET.Element( - "Combination", OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)), + "Combination", + OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)), ) with uproot.recreate( From 535d2382d83f1ac3e905dbe864f1aea7d53511e1 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 27 Aug 2020 22:08:53 -0500 Subject: [PATCH 04/12] Update rev tag in pre-commit --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0e47ae761a..209a7e5883 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/psf/black - rev: stable + rev: 20.8b1 hooks: - id: black language_version: python3 From 776b8c71be9a9f46b33142d7bfccc43e79ec9f3b Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 27 Aug 2020 22:23:04 -0500 Subject: [PATCH 05/12] Revert "Update rev tag in pre-commit" This reverts commit 535d2382d83f1ac3e905dbe864f1aea7d53511e1. --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 209a7e5883..0e47ae761a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/psf/black - rev: 20.8b1 + rev: stable hooks: - id: black language_version: python3 From 6faf1677f2039367f6eafff5b2a06a5c469042fb Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 29 Aug 2020 01:41:04 -0500 Subject: [PATCH 06/12] Use pre-commit autoupdate to bump black rev --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0e47ae761a..209a7e5883 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/psf/black - rev: stable + rev: 20.8b1 hooks: - id: black language_version: python3 From 7f55eb410a4e130e8ebdd24f7a8a33927476ffce Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 29 Aug 2020 01:59:34 -0500 Subject: [PATCH 07/12] Update dev docs to explain pre-commit autoupdate --- docs/development.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/development.rst b/docs/development.rst index 833313be33..7370ea000a 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -19,6 +19,15 @@ Then setup the Git pre-commit hook for `Black `__ pre-commit install +as the ``rev`` gets updated through time to track changes of different hooks, +simply run + +.. code-block:: console + + pre-commit autoupdate + +to have pre-commit install the new version. + Testing ------- From d5e8e6ca1aacc284d5509a45b0f47c5ebe104207 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 29 Aug 2020 02:08:47 -0500 Subject: [PATCH 08/12] Fix issues introduced by trailing commas --- src/pyhf/cli/patchset.py | 6 +----- src/pyhf/cli/spec.py | 6 +----- src/pyhf/optimize/mixins.py | 8 +------- src/pyhf/parameters/paramview.py | 15 ++------------- src/pyhf/pdf.py | 7 +------ src/pyhf/tensor/tensorflow_backend.py | 3 +-- src/pyhf/writexml.py | 3 +-- 7 files changed, 8 insertions(+), 40 deletions(-) diff --git a/src/pyhf/cli/patchset.py b/src/pyhf/cli/patchset.py index a586b3db17..89e732c998 100644 --- a/src/pyhf/cli/patchset.py +++ b/src/pyhf/cli/patchset.py @@ -18,11 +18,7 @@ def cli(): @cli.command() @click.argument('patchset', default='-') -@click.option( - '--name', - help='The name of the patch to extract.', - default=None, -) +@click.option('--name', help='The name of the patch to extract.', default=None) @click.option( '--output-file', help='The location of the output json file. If not specified, prints to screen.', diff --git a/src/pyhf/cli/spec.py b/src/pyhf/cli/spec.py index 0e45bb9901..e439b82e30 100644 --- a/src/pyhf/cli/spec.py +++ b/src/pyhf/cli/spec.py @@ -346,11 +346,7 @@ def digest(workspace, algorithm, output_json): } if output_json: - output = json.dumps( - digests, - indent=4, - sort_keys=True, - ) + output = json.dumps(digests, indent=4, sort_keys=True) else: output = '\n'.join( f"{hash_alg}:{digest}" for hash_alg, digest in digests.items() diff --git a/src/pyhf/optimize/mixins.py b/src/pyhf/optimize/mixins.py index 3497addfbf..a5c52088bf 100644 --- a/src/pyhf/optimize/mixins.py +++ b/src/pyhf/optimize/mixins.py @@ -29,13 +29,7 @@ def __init__(self, **kwargs): ) def _internal_minimize( - self, - func, - x0, - do_grad=False, - bounds=None, - fixed_vals=None, - options={}, + self, func, x0, do_grad=False, bounds=None, fixed_vals=None, options={} ): minimizer = self._get_minimizer( diff --git a/src/pyhf/parameters/paramview.py b/src/pyhf/parameters/paramview.py index 5b33815106..13c76e3257 100644 --- a/src/pyhf/parameters/paramview.py +++ b/src/pyhf/parameters/paramview.py @@ -9,14 +9,7 @@ def _tensorviewer_from_parmap(par_map, batch_size): names, slices, _ = list( zip( *sorted( - [ - ( - k, - v['slice'], - v['slice'].start, - ) - for k, v in par_map.items() - ], + [(k, v['slice'], v['slice'].start) for k, v in par_map.items()], key=lambda x: x[2], ) ) @@ -24,11 +17,7 @@ def _tensorviewer_from_parmap(par_map, batch_size): return _tensorviewer_from_slices(slices, names, batch_size) -def extract_index_access( - baseviewer, - subviewer, - indices, -): +def extract_index_access(baseviewer, subviewer, indices): tensorlib, _ = get_backend() index_selection = [] diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index c5e41256e4..66ea30231d 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -99,12 +99,7 @@ def _paramset_requirements_from_modelspec(spec, channel_nbins): def _nominal_and_modifiers_from_spec(config, spec): default_data_makers = { - 'histosys': lambda: { - 'hi_data': [], - 'lo_data': [], - 'nom_data': [], - 'mask': [], - }, + 'histosys': lambda: {'hi_data': [], 'lo_data': [], 'nom_data': [], 'mask': []}, 'lumi': lambda: {'mask': []}, 'normsys': lambda: {'hi': [], 'lo': [], 'nom_data': [], 'mask': []}, 'normfactor': lambda: {'mask': []}, diff --git a/src/pyhf/tensor/tensorflow_backend.py b/src/pyhf/tensor/tensorflow_backend.py index ab9431a80c..e427a6a15a 100644 --- a/src/pyhf/tensor/tensorflow_backend.py +++ b/src/pyhf/tensor/tensorflow_backend.py @@ -460,8 +460,7 @@ def normal_cdf(self, x, mu=0.0, sigma=1): TensorFlow Tensor: The CDF """ normal = tfp.distributions.Normal( - self.astensor(mu, dtype='float'), - self.astensor(sigma, dtype='float'), + self.astensor(mu, dtype='float'), self.astensor(sigma, dtype='float') ) return normal.cdf(x) diff --git a/src/pyhf/writexml.py b/src/pyhf/writexml.py index bdb64bc14b..8c3d95072c 100644 --- a/src/pyhf/writexml.py +++ b/src/pyhf/writexml.py @@ -245,8 +245,7 @@ def writexml(spec, specdir, data_rootdir, resultprefix): Path(specdir).parent.joinpath('HistFactorySchema.dtd'), ) combination = ET.Element( - "Combination", - OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)), + "Combination", OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)) ) with uproot.recreate( From f1242b52dec18fed7a8b2b356b4eb4c3088799d2 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 29 Aug 2020 02:10:57 -0500 Subject: [PATCH 09/12] Also grab this one --- src/pyhf/cli/patchset.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/pyhf/cli/patchset.py b/src/pyhf/cli/patchset.py index 89e732c998..9bcc7a7eb6 100644 --- a/src/pyhf/cli/patchset.py +++ b/src/pyhf/cli/patchset.py @@ -62,11 +62,7 @@ def extract(patchset, name, output_file, with_metadata): @cli.command() @click.argument('background-only', default='-') @click.argument('patchset', default='-') -@click.option( - '--name', - help='The name of the patch to extract.', - default=None, -) +@click.option('--name', help='The name of the patch to extract.', default=None) @click.option( '--output-file', help='The location of the output json file. If not specified, prints to screen.', From e8d34edbba7ef65219e7aaae4eb2cd148427123c Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 29 Aug 2020 02:26:19 -0500 Subject: [PATCH 10/12] Fix issues introduced by trailing commas for tests --- tests/test_backend_consistency.py | 6 +----- tests/test_combined_modifiers.py | 35 +++++++------------------------ tests/test_optim.py | 13 ++---------- tests/test_patchset.py | 5 +---- tests/test_pdf.py | 5 +---- tests/test_schema.py | 5 +---- tests/test_scripts.py | 12 +++-------- 7 files changed, 16 insertions(+), 65 deletions(-) diff --git a/tests/test_backend_consistency.py b/tests/test_backend_consistency.py index 654c591d5e..61f9b8579c 100644 --- a/tests/test_backend_consistency.py +++ b/tests/test_backend_consistency.py @@ -115,11 +115,7 @@ def test_hypotest_qmu_tilde( pyhf.set_backend(backend) qmu_tilde = pyhf.infer.test_statistics.qmu_tilde( - 1.0, - data, - pdf, - pdf.config.suggested_init(), - pdf.config.suggested_bounds(), + 1.0, data, pdf, pdf.config.suggested_init(), pdf.config.suggested_bounds() ) test_statistic.append(qmu_tilde) diff --git a/tests/test_combined_modifiers.py b/tests/test_combined_modifiers.py index dadc3109e0..af53bcc01d 100644 --- a/tests/test_combined_modifiers.py +++ b/tests/test_combined_modifiers.py @@ -396,10 +396,7 @@ def test_shapesys(backend): par_map={ 'dummy1': { 'paramset': paramset( - n_parameters=1, - inits=[0], - bounds=[[0, 10]], - fixed=False, + n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False ), 'slice': slice(0, 1), }, @@ -427,10 +424,7 @@ def test_shapesys(backend): }, 'dummy2': { 'paramset': paramset( - n_parameters=1, - inits=[0], - bounds=[[0, 10]], - fixed=False, + n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False ), 'slice': slice(4, 5), }, @@ -501,19 +495,13 @@ def test_normfactor(backend): par_map={ 'mu1': { 'paramset': unconstrained( - n_parameters=1, - inits=[0], - bounds=[[0, 10]], - fixed=False, + n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False ), 'slice': slice(0, 1), }, 'mu2': { 'paramset': unconstrained( - n_parameters=1, - inits=[0], - bounds=[[0, 10]], - fixed=False, + n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False ), 'slice': slice(1, 2), }, @@ -587,10 +575,7 @@ def test_shapesys_zero(backend): par_map={ 'SigXsecOverSM': { 'paramset': paramset( - n_parameters=1, - inits=[0], - bounds=[[0, 10]], - fixed=False, + n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False ), 'slice': slice(0, 1), }, @@ -684,19 +669,13 @@ def test_shapefactor(backend): par_map={ 'shapefac1': { 'paramset': unconstrained( - n_parameters=1, - inits=[0], - bounds=[[0, 10]], - fixed=False, + n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False ), 'slice': slice(0, 1), }, 'shapefac2': { 'paramset': unconstrained( - n_parameters=2, - inits=[0, 0], - bounds=[[0, 10], [0, 10]], - fixed=False, + n_parameters=2, inits=[0, 0], bounds=[[0, 10], [0, 10]], fixed=False ), 'slice': slice(1, 3), }, diff --git a/tests/test_optim.py b/tests/test_optim.py index f0e0464b8c..20da421bf5 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -27,11 +27,7 @@ def rosen(x): @pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch']) -@pytest.mark.parametrize( - 'precision', - ['32b', '64b'], - ids=['32b', '64b'], -) +@pytest.mark.parametrize('precision', ['32b', '64b'], ids=['32b', '64b']) @pytest.mark.parametrize( 'tensorlib', [ @@ -143,12 +139,7 @@ def test_optimizer_mixin_extra_kwargs(optimizer): @pytest.mark.parametrize( 'backend,backend_new', itertools.permutations( - [ - ('numpy', False), - ('pytorch', True), - ('tensorflow', True), - ('jax', True), - ], + [('numpy', False), ('pytorch', True), ('tensorflow', True), ('jax', True)], 2, ), ids=lambda pair: f'{pair[0]}', diff --git a/tests/test_patchset.py b/tests/test_patchset.py index ff21b2d9f4..d7bfbcf468 100644 --- a/tests/test_patchset.py +++ b/tests/test_patchset.py @@ -25,10 +25,7 @@ def patch(): @pytest.mark.parametrize( 'patchset_file', - [ - 'patchset_bad_empty_patches.json', - 'patchset_bad_no_version.json', - ], + ['patchset_bad_empty_patches.json', 'patchset_bad_no_version.json'], ) def test_patchset_invalid_spec(datadir, patchset_file): patchsetspec = json.load(open(datadir.join(patchset_file))) diff --git a/tests/test_pdf.py b/tests/test_pdf.py index d74f62af79..3b5307571b 100644 --- a/tests/test_pdf.py +++ b/tests/test_pdf.py @@ -753,10 +753,7 @@ def test_sample_wrong_bins(): [ ( None, - { - 'normsys': {'interpcode': 'code4'}, - 'histosys': {'interpcode': 'code4p'}, - }, + {'normsys': {'interpcode': 'code4'}, 'histosys': {'interpcode': 'code4p'}}, ) ], ) diff --git a/tests/test_schema.py b/tests/test_schema.py index 7f09a30daf..7757309a30 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -419,10 +419,7 @@ def test_jsonpatch_fail(patch): pyhf.utils.validate([patch], 'jsonpatch.json') -@pytest.mark.parametrize( - 'patchset_file', - ['patchset_good.json'], -) +@pytest.mark.parametrize('patchset_file', ['patchset_good.json']) def test_patchset(datadir, patchset_file): patchset = json.load(open(datadir.join(patchset_file))) pyhf.utils.validate(patchset, 'patchset.json') diff --git a/tests/test_scripts.py b/tests/test_scripts.py index b6ef1e8745..ce74887d1b 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -96,10 +96,7 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): assert 'CLs_exp' in d -@pytest.mark.parametrize( - "backend", - ["numpy", "tensorflow", "pytorch", "jax"], -) +@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) def test_cls_backend_option(tmpdir, script_runner, backend): temp = tmpdir.join("parsed_output.json") command = 'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {0:s}'.format( @@ -241,8 +238,7 @@ def test_testpoi(tmpdir, script_runner): 'optimizer', ['scipy', 'minuit', 'scipy_optimizer', 'minuit_optimizer'] ) @pytest.mark.parametrize( - 'opts,success', - [(['maxiter=1000'], True), (['maxiter=10'], False)], + 'opts,success', [(['maxiter=1000'], True), (['maxiter=10'], False)] ) def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): temp = tmpdir.join("parsed_output.json") @@ -309,9 +305,7 @@ def test_prune(tmpdir, script_runner): ret = script_runner.run(*shlex.split(command)) command = ( - 'pyhf prune -m staterror_channel1 --measurement GammaExample {0:s}'.format( - temp.strpath - ) + f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s}" ) ret = script_runner.run(*shlex.split(command)) assert ret.success From 8e823e199eb57da05c99ab891473b0cb112c1ea5 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 29 Aug 2020 02:45:58 -0500 Subject: [PATCH 11/12] Also get tensorviewer --- tests/test_tensorviewer.py | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/tests/test_tensorviewer.py b/tests/test_tensorviewer.py index 1c1dcc1e64..1acc80af71 100644 --- a/tests/test_tensorviewer.py +++ b/tests/test_tensorviewer.py @@ -4,11 +4,7 @@ def test_tensorviewer(backend): tb, _ = backend tv = _TensorViewer( - [ - tb.astensor([0, 4, 5]), - tb.astensor([1, 2, 3]), - tb.astensor([6]), - ], + [tb.astensor([0, 4, 5]), tb.astensor([1, 2, 3]), tb.astensor([6])], names=['zzz', 'aaa', 'x'], ) @@ -33,19 +29,11 @@ def test_tensorviewer(backend): assert a == [[0, 40, 50], [10, 20, 30], [60]] subviewer = _TensorViewer( - [ - tb.astensor([0]), - tb.astensor([1, 2, 3]), - ], - names=['x', 'aaa'], + [tb.astensor([0]), tb.astensor([1, 2, 3])], names=['x', 'aaa'] ) assert tb.tolist(subviewer.stitch(tv.split(data, ['x', 'aaa']))) == [60, 10, 20, 30] subviewer = _TensorViewer( - [ - tb.astensor([0, 1, 2]), - tb.astensor([3]), - ], - names=['aaa', 'x'], + [tb.astensor([0, 1, 2]), tb.astensor([3])], names=['aaa', 'x'] ) assert tb.tolist(subviewer.stitch(tv.split(data, ['aaa', 'x']))) == [10, 20, 30, 60] From 0b6afc0af487dc26197b6564f0de01a6522ae531 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 29 Aug 2020 02:52:08 -0500 Subject: [PATCH 12/12] Get missed test_optim --- tests/test_optim.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_optim.py b/tests/test_optim.py index 20da421bf5..7d0cc6b1ac 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -139,8 +139,7 @@ def test_optimizer_mixin_extra_kwargs(optimizer): @pytest.mark.parametrize( 'backend,backend_new', itertools.permutations( - [('numpy', False), ('pytorch', True), ('tensorflow', True), ('jax', True)], - 2, + [('numpy', False), ('pytorch', True), ('tensorflow', True), ('jax', True)], 2 ), ids=lambda pair: f'{pair[0]}', )