Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

style: Update to Black v20.8 formatting #1048

Merged
merged 12 commits into from Aug 31, 2020
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/psf/black
rev: stable
rev: 20.8b1
kratsg marked this conversation as resolved.
Show resolved Hide resolved
hooks:
- id: black
language_version: python3
9 changes: 9 additions & 0 deletions docs/development.rst
Expand Up @@ -19,6 +19,15 @@ Then setup the Git pre-commit hook for `Black <https://github.com/psf/black>`__

pre-commit install

as the ``rev`` gets updated through time to track changes of different hooks,
simply run

.. code-block:: console

pre-commit autoupdate

to have pre-commit install the new version.

Testing
-------

Expand Down
8 changes: 2 additions & 6 deletions src/pyhf/cli/patchset.py
Expand Up @@ -18,9 +18,7 @@ def cli():

@cli.command()
@click.argument('patchset', default='-')
@click.option(
'--name', help='The name of the patch to extract.', default=None,
)
@click.option('--name', help='The name of the patch to extract.', default=None)
@click.option(
'--output-file',
help='The location of the output json file. If not specified, prints to screen.',
Expand Down Expand Up @@ -64,9 +62,7 @@ def extract(patchset, name, output_file, with_metadata):
@cli.command()
@click.argument('background-only', default='-')
@click.argument('patchset', default='-')
@click.option(
'--name', help='The name of the patch to extract.', default=None,
)
@click.option('--name', help='The name of the patch to extract.', default=None)
@click.option(
'--output-file',
help='The location of the output json file. If not specified, prints to screen.',
Expand Down
2 changes: 1 addition & 1 deletion src/pyhf/cli/spec.py
Expand Up @@ -346,7 +346,7 @@ def digest(workspace, algorithm, output_json):
}

if output_json:
output = json.dumps(digests, indent=4, sort_keys=True,)
output = json.dumps(digests, indent=4, sort_keys=True)
else:
output = '\n'.join(
f"{hash_alg}:{digest}" for hash_alg, digest in digests.items()
Expand Down
8 changes: 4 additions & 4 deletions src/pyhf/constraints.py
Expand Up @@ -82,10 +82,10 @@ def _precompute(self):
self.access_field = tensorlib.astensor(self._access_field, dtype='int')

def has_pdf(self):
'''
"""
Returns:
flag (`bool`): Whether the model has a Gaussian Constraint
'''
"""
return bool(self.param_viewer.index_selection)

def make_pdf(self, pars):
Expand Down Expand Up @@ -213,10 +213,10 @@ def _precompute(self):
self.batched_factors = tensorlib.astensor(self._batched_factors)

def has_pdf(self):
'''
"""
Returns:
flag (`bool`): Whether the model has a Gaussian Constraint
'''
"""
return bool(self.param_viewer.index_selection)

def make_pdf(self, pars):
Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/histosys.py
Expand Up @@ -80,10 +80,10 @@ def _precompute(self):
)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/lumi.py
Expand Up @@ -57,10 +57,10 @@ def _precompute(self):
self.lumi_default = tensorlib.ones(self.lumi_mask.shape)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/normfactor.py
Expand Up @@ -58,10 +58,10 @@ def _precompute(self):
self.normfactor_default = tensorlib.ones(self.normfactor_mask.shape)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return
tensorlib, _ = get_backend()
Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/normsys.py
Expand Up @@ -81,10 +81,10 @@ def _precompute(self):
)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/shapefactor.py
Expand Up @@ -131,10 +131,10 @@ def _precompute(self):
self.sample_ones = tensorlib.ones(tensorlib.shape(self.shapefactor_mask)[1])

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/shapesys.py
Expand Up @@ -154,10 +154,10 @@ def finalize(self, pdfconfig):
pdfconfig.param_set(pname).auxdata = default_backend.tolist(factors)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
tensorlib, _ = get_backend()
if not self.param_viewer.index_selection:
return
Expand Down
2 changes: 1 addition & 1 deletion src/pyhf/optimize/mixins.py
Expand Up @@ -29,7 +29,7 @@ def __init__(self, **kwargs):
)

def _internal_minimize(
self, func, x0, do_grad=False, bounds=None, fixed_vals=None, options={},
self, func, x0, do_grad=False, bounds=None, fixed_vals=None, options={}
):

minimizer = self._get_minimizer(
Expand Down
6 changes: 2 additions & 4 deletions src/pyhf/parameters/paramview.py
Expand Up @@ -9,17 +9,15 @@ def _tensorviewer_from_parmap(par_map, batch_size):
names, slices, _ = list(
zip(
*sorted(
[(k, v['slice'], v['slice'].start,) for k, v in par_map.items()],
[(k, v['slice'], v['slice'].start) for k, v in par_map.items()],
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note that the trailing comma is being removed here:

(k, v['slice'], v['slice'].start)

key=lambda x: x[2],
)
)
)
return _tensorviewer_from_slices(slices, names, batch_size)


def extract_index_access(
baseviewer, subviewer, indices,
):
def extract_index_access(baseviewer, subviewer, indices):
tensorlib, _ = get_backend()

index_selection = []
Expand Down
2 changes: 1 addition & 1 deletion src/pyhf/pdf.py
Expand Up @@ -99,7 +99,7 @@ def _paramset_requirements_from_modelspec(spec, channel_nbins):

def _nominal_and_modifiers_from_spec(config, spec):
default_data_makers = {
'histosys': lambda: {'hi_data': [], 'lo_data': [], 'nom_data': [], 'mask': [],},
'histosys': lambda: {'hi_data': [], 'lo_data': [], 'nom_data': [], 'mask': []},
'lumi': lambda: {'mask': []},
'normsys': lambda: {'hi': [], 'lo': [], 'nom_data': [], 'mask': []},
'normfactor': lambda: {'mask': []},
Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/tensor/common.py
Expand Up @@ -74,7 +74,7 @@ def _tensorviewer_from_slices(target_slices, names, batch_size):


def _tensorviewer_from_sizes(sizes, names, batch_size):
'''
"""
Creates a _Tensorviewer based on tensor sizes.

the TV will be able to stitch together data with
Expand All @@ -83,7 +83,7 @@ def _tensorviewer_from_sizes(sizes, names, batch_size):
tv.stitch([foo[slice1],foo[slice2],foo[slice3])

and split them again accordingly.
'''
"""
target_slices = []
start = 0
for sz in sizes:
Expand Down
2 changes: 1 addition & 1 deletion src/pyhf/tensor/tensorflow_backend.py
Expand Up @@ -460,7 +460,7 @@ def normal_cdf(self, x, mu=0.0, sigma=1):
TensorFlow Tensor: The CDF
"""
normal = tfp.distributions.Normal(
self.astensor(mu, dtype='float'), self.astensor(sigma, dtype='float'),
self.astensor(mu, dtype='float'), self.astensor(sigma, dtype='float')
)
return normal.cdf(x)

Expand Down
2 changes: 1 addition & 1 deletion src/pyhf/writexml.py
Expand Up @@ -245,7 +245,7 @@ def writexml(spec, specdir, data_rootdir, resultprefix):
Path(specdir).parent.joinpath('HistFactorySchema.dtd'),
)
combination = ET.Element(
"Combination", OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)),
"Combination", OutputFilePrefix=str(Path(specdir).joinpath(resultprefix))
)

with uproot.recreate(
Expand Down
4 changes: 2 additions & 2 deletions tests/conftest.py
Expand Up @@ -144,11 +144,11 @@ def interpcode(request):

@pytest.fixture(scope='function')
def datadir(tmpdir, request):
'''
"""
Fixture responsible for searching a folder with the same name of test
module and, if available, moving all contents to a temporary directory so
tests can use them freely.
'''
"""
# this gets the module name (e.g. /path/to/pyhf/tests/test_schema.py)
# and then gets the directory by removing the suffix (e.g. /path/to/pyhf/tests/test_schema)
test_dir = pathlib.Path(request.module.__file__).with_suffix('')
Expand Down
2 changes: 1 addition & 1 deletion tests/test_backend_consistency.py
Expand Up @@ -115,7 +115,7 @@ def test_hypotest_qmu_tilde(
pyhf.set_backend(backend)

qmu_tilde = pyhf.infer.test_statistics.qmu_tilde(
1.0, data, pdf, pdf.config.suggested_init(), pdf.config.suggested_bounds(),
1.0, data, pdf, pdf.config.suggested_init(), pdf.config.suggested_bounds()
)
test_statistic.append(qmu_tilde)

Expand Down
17 changes: 7 additions & 10 deletions tests/test_combined_modifiers.py
Expand Up @@ -396,7 +396,7 @@ def test_shapesys(backend):
par_map={
'dummy1': {
'paramset': paramset(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False
),
'slice': slice(0, 1),
},
Expand Down Expand Up @@ -424,7 +424,7 @@ def test_shapesys(backend):
},
'dummy2': {
'paramset': paramset(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False
),
'slice': slice(4, 5),
},
Expand Down Expand Up @@ -495,13 +495,13 @@ def test_normfactor(backend):
par_map={
'mu1': {
'paramset': unconstrained(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False
),
'slice': slice(0, 1),
},
'mu2': {
'paramset': unconstrained(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False
),
'slice': slice(1, 2),
},
Expand Down Expand Up @@ -575,7 +575,7 @@ def test_shapesys_zero(backend):
par_map={
'SigXsecOverSM': {
'paramset': paramset(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False
),
'slice': slice(0, 1),
},
Expand Down Expand Up @@ -669,16 +669,13 @@ def test_shapefactor(backend):
par_map={
'shapefac1': {
'paramset': unconstrained(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False
),
'slice': slice(0, 1),
},
'shapefac2': {
'paramset': unconstrained(
n_parameters=2,
inits=[0, 0],
bounds=[[0, 10], [0, 10]],
fixed=False,
n_parameters=2, inits=[0, 0], bounds=[[0, 10], [0, 10]], fixed=False
),
'slice': slice(1, 3),
},
Expand Down
4 changes: 2 additions & 2 deletions tests/test_infer.py
Expand Up @@ -120,11 +120,11 @@ def test_hypotest_return_expected_set(tmpdir, hypotest_args):


def test_inferapi_pyhf_independence():
'''
"""
pyhf.infer should eventually be factored out so it should be
infependent from pyhf internals. This is testing that
a much simpler model still can run through pyhf.infer.hypotest
'''
"""
from pyhf import get_backend

class _NonPyhfConfig(object):
Expand Down
6 changes: 2 additions & 4 deletions tests/test_optim.py
Expand Up @@ -27,9 +27,7 @@ def rosen(x):


@pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch'])
@pytest.mark.parametrize(
'precision', ['32b', '64b'], ids=['32b', '64b'],
)
@pytest.mark.parametrize('precision', ['32b', '64b'], ids=['32b', '64b'])
@pytest.mark.parametrize(
'tensorlib',
[
Expand Down Expand Up @@ -141,7 +139,7 @@ def test_optimizer_mixin_extra_kwargs(optimizer):
@pytest.mark.parametrize(
'backend,backend_new',
itertools.permutations(
[('numpy', False), ('pytorch', True), ('tensorflow', True), ('jax', True),], 2
[('numpy', False), ('pytorch', True), ('tensorflow', True), ('jax', True)], 2
),
ids=lambda pair: f'{pair[0]}',
)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_patchset.py
Expand Up @@ -25,7 +25,7 @@ def patch():

@pytest.mark.parametrize(
'patchset_file',
['patchset_bad_empty_patches.json', 'patchset_bad_no_version.json',],
['patchset_bad_empty_patches.json', 'patchset_bad_no_version.json'],
)
def test_patchset_invalid_spec(datadir, patchset_file):
patchsetspec = json.load(open(datadir.join(patchset_file)))
Expand Down
2 changes: 1 addition & 1 deletion tests/test_pdf.py
Expand Up @@ -753,7 +753,7 @@ def test_sample_wrong_bins():
[
(
None,
{'normsys': {'interpcode': 'code4'}, 'histosys': {'interpcode': 'code4p'},},
{'normsys': {'interpcode': 'code4'}, 'histosys': {'interpcode': 'code4p'}},
)
],
)
Expand Down
4 changes: 1 addition & 3 deletions tests/test_schema.py
Expand Up @@ -419,9 +419,7 @@ def test_jsonpatch_fail(patch):
pyhf.utils.validate([patch], 'jsonpatch.json')


@pytest.mark.parametrize(
'patchset_file', ['patchset_good.json'],
)
@pytest.mark.parametrize('patchset_file', ['patchset_good.json'])
def test_patchset(datadir, patchset_file):
patchset = json.load(open(datadir.join(patchset_file)))
pyhf.utils.validate(patchset, 'patchset.json')
Expand Down