Skip to content

Commit e461eea

Browse files
twieckimichaelosthege
authored andcommitted
⬆️ UPGRADE: Autoupdate pre-commit config
1 parent a90457a commit e461eea

24 files changed

+56
-70
lines changed

.pre-commit-config.yaml

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
exclude: ^(docs/logos|pymc3/tests/data)/
22
repos:
33
- repo: https://github.com/pre-commit/pre-commit-hooks
4-
rev: v3.4.0
4+
rev: v4.0.1
55
hooks:
66
- id: check-merge-conflict
77
- id: check-toml
@@ -19,16 +19,16 @@ repos:
1919
- id: isort
2020
name: isort
2121
- repo: https://github.com/asottile/pyupgrade
22-
rev: v2.11.0
22+
rev: v2.19.4
2323
hooks:
2424
- id: pyupgrade
2525
args: [--py37-plus]
2626
- repo: https://github.com/psf/black
27-
rev: 20.8b1
27+
rev: 21.6b0
2828
hooks:
2929
- id: black
3030
- repo: https://github.com/PyCQA/pylint
31-
rev: pylint-2.7.4
31+
rev: v2.8.3
3232
hooks:
3333
- id: pylint
3434
args: [--rcfile=.pylintrc]

docs/source/sphinxext/gallery_generator.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def build_gallery(srcdir, gallery):
187187
with open(table_of_contents_file) as toc:
188188
table_of_contents = toc.read()
189189

190-
js_contents = "Gallery.examples = {}\n{}".format(json.dumps(data), table_of_contents)
190+
js_contents = f"Gallery.examples = {json.dumps(data)}\n{table_of_contents}"
191191

192192
with open(js_file, "w") as js:
193193
js.write(js_contents)

pymc3/aesaraf.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,7 @@ def grad(self, inp, grads):
543543
return grads
544544

545545
def c_code(self, node, name, inp, out, sub):
546-
return "{z} = {x};".format(x=inp[0], z=out[0])
546+
return f"{out[0]} = {inp[0]};"
547547

548548
def __eq__(self, other):
549549
return isinstance(self, type(other))

pymc3/backends/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -365,7 +365,7 @@ def __getattr__(self, name):
365365
return self.get_values(name)
366366
if name in self.stat_names:
367367
return self.get_sampler_stats(name)
368-
raise AttributeError("'{}' object has no attribute '{}'".format(type(self).__name__, name))
368+
raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'")
369369

370370
def __len__(self):
371371
chain = self.chains[-1]

pymc3/distributions/bart.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def get_residuals_loo(self, tree):
164164
return R_j
165165

166166
def draw_leaf_value(self, idx_data_points):
167-
""" Draw the residual mean."""
167+
"""Draw the residual mean."""
168168
R_j = self.get_residuals()[idx_data_points]
169169
draw = self.mean(R_j)
170170
return draw

pymc3/distributions/mixture.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def comp_dists(self, comp_dists):
187187
# All component distributions must broadcast with each other
188188
try:
189189
self._broadcast_shape = np.broadcast(
190-
*[np.empty(shape) for shape in self._comp_dist_shapes]
190+
*(np.empty(shape) for shape in self._comp_dist_shapes)
191191
).shape
192192
except Exception:
193193
raise TypeError(

pymc3/distributions/shape_utils.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -426,7 +426,7 @@ def broadcast_dist_samples_to(to_shape, samples, size=None):
426426

427427

428428
def convert_dims(dims: Dims) -> Optional[WeakDims]:
429-
""" Process a user-provided dims variable into None or a valid dims tuple. """
429+
"""Process a user-provided dims variable into None or a valid dims tuple."""
430430
if dims is None:
431431
return None
432432

@@ -444,7 +444,7 @@ def convert_dims(dims: Dims) -> Optional[WeakDims]:
444444

445445

446446
def convert_shape(shape: Shape) -> Optional[WeakShape]:
447-
""" Process a user-provided shape variable into None or a valid shape object. """
447+
"""Process a user-provided shape variable into None or a valid shape object."""
448448
if shape is None:
449449
return None
450450

@@ -466,7 +466,7 @@ def convert_shape(shape: Shape) -> Optional[WeakShape]:
466466

467467

468468
def convert_size(size: Size) -> Optional[StrongSize]:
469-
""" Process a user-provided size variable into None or a valid size object. """
469+
"""Process a user-provided size variable into None or a valid size object."""
470470
if size is None:
471471
return None
472472

pymc3/distributions/timeseries.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -158,15 +158,15 @@ def logp(self, value):
158158
"""
159159
if self.constant:
160160
x = at.add(
161-
*[self.rho[i + 1] * value[self.p - (i + 1) : -(i + 1)] for i in range(self.p)]
161+
*(self.rho[i + 1] * value[self.p - (i + 1) : -(i + 1)] for i in range(self.p))
162162
)
163163
eps = value[self.p :] - self.rho[0] - x
164164
else:
165165
if self.p == 1:
166166
x = self.rho * value[:-1]
167167
else:
168168
x = at.add(
169-
*[self.rho[i] * value[self.p - (i + 1) : -(i + 1)] for i in range(self.p)]
169+
*(self.rho[i] * value[self.p - (i + 1) : -(i + 1)] for i in range(self.p))
170170
)
171171
eps = value[self.p :] - x
172172

pymc3/exceptions.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ class ImputationWarning(UserWarning):
4343

4444

4545
class ShapeWarning(UserWarning):
46-
""" Something that could lead to shape problems down the line. """
46+
"""Something that could lead to shape problems down the line."""
4747

4848
pass
4949

pymc3/gp/cov.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def __array_wrap__(self, result):
158158
class Combination(Covariance):
159159
def __init__(self, factor_list):
160160
input_dim = max(
161-
[factor.input_dim for factor in factor_list if isinstance(factor, Covariance)]
161+
factor.input_dim for factor in factor_list if isinstance(factor, Covariance)
162162
)
163163
super().__init__(input_dim=input_dim)
164164
self.factor_list = []

pymc3/gp/util.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def infer_shape(X, n_points=None):
3636

3737

3838
def stabilize(K):
39-
""" adds small diagonal to a covariance matrix """
39+
"""adds small diagonal to a covariance matrix"""
4040
return K + 1e-6 * at.identity_like(K)
4141

4242

@@ -62,7 +62,7 @@ def kmeans_inducing_points(n_inducing, X):
6262

6363

6464
def conditioned_vars(varnames):
65-
""" Decorator for validating attrs that are conditioned on. """
65+
"""Decorator for validating attrs that are conditioned on."""
6666

6767
def gp_wrapper(cls):
6868
def make_getter(name):

pymc3/model_graph.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def _filter_parents(self, var, parents) -> Set[VarName]:
8484
if self.transform_map[p] != var.name:
8585
keep.add(self.transform_map[p])
8686
else:
87-
raise AssertionError("Do not know what to do with {}".format(get_var_name(p)))
87+
raise AssertionError(f"Do not know what to do with {get_var_name(p)}")
8888
return keep
8989

9090
def get_parents(self, var: TensorVariable) -> Set[VarName]:

pymc3/ode/ode.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ def _simulate(self, y0, theta):
137137

138138
def make_node(self, y0, theta):
139139
inputs = (y0, theta)
140-
_log.debug("make_node for inputs {}".format(hash(inputs)))
140+
_log.debug(f"make_node for inputs {hash(inputs)}")
141141
states = self._otypes[0]()
142142
sens = self._otypes[1]()
143143

@@ -221,7 +221,7 @@ def infer_shape(self, fgraph, node, input_shapes):
221221
return output_shapes
222222

223223
def grad(self, inputs, output_grads):
224-
_log.debug("grad w.r.t. inputs {}".format(hash(tuple(inputs))))
224+
_log.debug(f"grad w.r.t. inputs {hash(tuple(inputs))}")
225225

226226
# fetch symbolic sensitivity output node from cache
227227
ihash = hash(tuple(inputs))

pymc3/sampling.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2010,7 +2010,7 @@ def sample_prior_predictive(
20102010
inputs, vars_to_sample, allow_input_downcast=True, accept_inplace=True, mode=mode
20112011
)
20122012

2013-
values = zip(*[sampler_fn() for i in range(samples)])
2013+
values = zip(*(sampler_fn() for i in range(samples)))
20142014

20152015
data = {k: np.stack(v) for k, v in zip(names, values)}
20162016
if data is None:

pymc3/sampling_jax.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
xla_flags = os.getenv("XLA_FLAGS", "").lstrip("--")
77
xla_flags = re.sub(r"xla_force_host_platform_device_count=.+\s", "", xla_flags).split()
8-
os.environ["XLA_FLAGS"] = " ".join(["--xla_force_host_platform_device_count={}".format(100)])
8+
os.environ["XLA_FLAGS"] = " ".join([f"--xla_force_host_platform_device_count={100}"])
99

1010
import aesara.tensor as at
1111
import arviz as az
@@ -47,8 +47,8 @@ def __init__(
4747
self.seed = seed
4848

4949
self.inputs, self.outputs = clone(inputs, outputs, copy_inputs=False)
50-
self.inputs_type = tuple([input.type for input in inputs])
51-
self.outputs_type = tuple([output.type for output in outputs])
50+
self.inputs_type = tuple(input.type for input in inputs)
51+
self.outputs_type = tuple(output.type for output in outputs)
5252
self.nin = len(inputs)
5353
self.nout = len(outputs)
5454
self.nshared = len([v for v in inputs if isinstance(v, SharedVariable)])
@@ -174,7 +174,7 @@ def sample_numpyro_nuts(
174174
init_state_batched_at = [at.as_tensor(v) for v in init_state_batched]
175175

176176
nuts_inputs = sorted(
177-
[v for v in graph_inputs([model.logpt]) if not isinstance(v, Constant)],
177+
(v for v in graph_inputs([model.logpt]) if not isinstance(v, Constant)),
178178
key=lambda x: isinstance(x, SharedVariable),
179179
)
180180
map_seed = jax.random.split(seed, chains)

pymc3/step_methods/hmc/quadpotential.py

+3-9
Original file line numberDiff line numberDiff line change
@@ -161,13 +161,9 @@ def __init__(
161161
if initial_mean.ndim != 1:
162162
raise ValueError("Initial mean must be one-dimensional.")
163163
if initial_diag is not None and len(initial_diag) != n:
164-
raise ValueError(
165-
"Wrong shape for initial_diag: expected {} got {}".format(n, len(initial_diag))
166-
)
164+
raise ValueError(f"Wrong shape for initial_diag: expected {n} got {len(initial_diag)}")
167165
if len(initial_mean) != n:
168-
raise ValueError(
169-
"Wrong shape for initial_mean: expected {} got {}".format(n, len(initial_mean))
170-
)
166+
raise ValueError(f"Wrong shape for initial_mean: expected {n} got {len(initial_mean)}")
171167

172168
if dtype is None:
173169
dtype = aesara.config.floatX
@@ -512,9 +508,7 @@ def __init__(
512508
if initial_cov is not None and initial_cov.shape != (n, n):
513509
raise ValueError(f"Wrong shape for initial_cov: expected {n} got {initial_cov.shape}")
514510
if len(initial_mean) != n:
515-
raise ValueError(
516-
"Wrong shape for initial_mean: expected {} got {}".format(n, len(initial_mean))
517-
)
511+
raise ValueError(f"Wrong shape for initial_mean: expected {n} got {len(initial_mean)}")
518512

519513
if dtype is None:
520514
dtype = aesara.config.floatX

pymc3/step_methods/mlda.py

+6-14
Original file line numberDiff line numberDiff line change
@@ -896,24 +896,16 @@ def update_error_estimate(self, accepted, skipped_logp):
896896
pm.set_data(
897897
{
898898
"mu_B": sum(
899-
[
900-
bias.get_mu()
901-
for bias in self.bias_all[
902-
: len(self.bias_all) - self.num_levels + 2
903-
]
904-
]
899+
bias.get_mu()
900+
for bias in self.bias_all[: len(self.bias_all) - self.num_levels + 2]
905901
)
906902
}
907903
)
908904
pm.set_data(
909905
{
910906
"Sigma_B": sum(
911-
[
912-
bias.get_sigma()
913-
for bias in self.bias_all[
914-
: len(self.bias_all) - self.num_levels + 2
915-
]
916-
]
907+
bias.get_sigma()
908+
for bias in self.bias_all[: len(self.bias_all) - self.num_levels + 2]
917909
)
918910
}
919911
)
@@ -992,7 +984,7 @@ def extract_Q_estimate(trace, levels):
992984

993985
Q_0_raw = trace.get_sampler_stats("Q_0")
994986
# total number of base level samples from all iterations
995-
total_base_level_samples = sum([it.shape[0] for it in Q_0_raw])
987+
total_base_level_samples = sum(it.shape[0] for it in Q_0_raw)
996988
Q_0 = np.concatenate(Q_0_raw).reshape((1, total_base_level_samples))
997989
ess_Q_0 = az.ess(np.array(Q_0, np.float64))
998990
Q_0_var = Q_0.var() / ess_Q_0
@@ -1002,7 +994,7 @@ def extract_Q_estimate(trace, levels):
1002994
for l in range(1, levels):
1003995
Q_diff_raw = trace.get_sampler_stats(f"Q_{l}_{l-1}")
1004996
# total number of samples from all iterations
1005-
total_level_samples = sum([it.shape[0] for it in Q_diff_raw])
997+
total_level_samples = sum(it.shape[0] for it in Q_diff_raw)
1006998
Q_diff = np.concatenate(Q_diff_raw).reshape((1, total_level_samples))
1007999
ess_diff = az.ess(np.array(Q_diff, np.float64))
10081000

pymc3/tests/test_distributions.py

+11-11
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ def product(domains, n_samples=-1):
191191
names, domains = zip(*domains.items())
192192
except ValueError: # domains.items() is empty
193193
return [{}]
194-
all_vals = [zip(names, val) for val in itertools.product(*[d.vals for d in domains])]
194+
all_vals = [zip(names, val) for val in itertools.product(*(d.vals for d in domains))]
195195
if n_samples > 0 and len(all_vals) > n_samples:
196196
return (all_vals[j] for j in nr.choice(len(all_vals), n_samples, replace=False))
197197
return all_vals
@@ -294,7 +294,7 @@ def multinomial_logpdf(value, n, p):
294294

295295

296296
def dirichlet_multinomial_logpmf(value, n, a):
297-
value, n, a = [np.asarray(x) for x in [value, n, a]]
297+
value, n, a = (np.asarray(x) for x in [value, n, a])
298298
assert value.ndim == 1
299299
assert n.ndim == 0
300300
assert a.shape == value.shape
@@ -318,7 +318,7 @@ def beta_mu_sigma(value, mu, sigma):
318318

319319
class ProductDomain:
320320
def __init__(self, domains):
321-
self.vals = list(itertools.product(*[d.vals for d in domains]))
321+
self.vals = list(itertools.product(*(d.vals for d in domains)))
322322
self.shape = (len(domains),) + domains[0].shape
323323
self.lower = [d.lower for d in domains]
324324
self.upper = [d.upper for d in domains]
@@ -2187,7 +2187,7 @@ def test_multinomial_vec(self):
21872187
)
21882188

21892189
assert_almost_equal(
2190-
sum([model_single.fastlogp({"m": val}) for val in vals]),
2190+
sum(model_single.fastlogp({"m": val}) for val in vals),
21912191
model_many.fastlogp({"m": vals}),
21922192
decimal=4,
21932193
)
@@ -2201,7 +2201,7 @@ def test_multinomial_vec_1d_n(self):
22012201
Multinomial("m", n=ns, p=p)
22022202

22032203
assert_almost_equal(
2204-
sum([multinomial_logpdf(val, n, p) for val, n in zip(vals, ns)]),
2204+
sum(multinomial_logpdf(val, n, p) for val, n in zip(vals, ns)),
22052205
model.fastlogp({"m": vals}),
22062206
decimal=4,
22072207
)
@@ -2215,7 +2215,7 @@ def test_multinomial_vec_1d_n_2d_p(self):
22152215
Multinomial("m", n=ns, p=ps)
22162216

22172217
assert_almost_equal(
2218-
sum([multinomial_logpdf(val, n, p) for val, n, p in zip(vals, ns, ps)]),
2218+
sum(multinomial_logpdf(val, n, p) for val, n, p in zip(vals, ns, ps)),
22192219
model.fastlogp({"m": vals}),
22202220
decimal=4,
22212221
)
@@ -2229,7 +2229,7 @@ def test_multinomial_vec_2d_p(self):
22292229
Multinomial("m", n=n, p=ps)
22302230

22312231
assert_almost_equal(
2232-
sum([multinomial_logpdf(val, n, p) for val, p in zip(vals, ps)]),
2232+
sum(multinomial_logpdf(val, n, p) for val, p in zip(vals, ps)),
22332233
model.fastlogp({"m": vals}),
22342234
decimal=4,
22352235
)
@@ -2309,7 +2309,7 @@ def test_dirichlet_multinomial_vec(self):
23092309
)
23102310

23112311
assert_almost_equal(
2312-
sum([model_single.fastlogp({"m": val}) for val in vals]),
2312+
sum(model_single.fastlogp({"m": val}) for val in vals),
23132313
model_many.fastlogp({"m": vals}),
23142314
decimal=4,
23152315
)
@@ -2324,7 +2324,7 @@ def test_dirichlet_multinomial_vec_1d_n(self):
23242324
DirichletMultinomial("m", n=ns, a=a, size=vals.shape)
23252325

23262326
assert_almost_equal(
2327-
sum([dirichlet_multinomial_logpmf(val, n, a) for val, n in zip(vals, ns)]),
2327+
sum(dirichlet_multinomial_logpmf(val, n, a) for val, n in zip(vals, ns)),
23282328
model.fastlogp({"m": vals}),
23292329
decimal=4,
23302330
)
@@ -2339,7 +2339,7 @@ def test_dirichlet_multinomial_vec_1d_n_2d_a(self):
23392339
DirichletMultinomial("m", n=ns, a=as_, size=vals.shape)
23402340

23412341
assert_almost_equal(
2342-
sum([dirichlet_multinomial_logpmf(val, n, a) for val, n, a in zip(vals, ns, as_)]),
2342+
sum(dirichlet_multinomial_logpmf(val, n, a) for val, n, a in zip(vals, ns, as_)),
23432343
model.fastlogp({"m": vals}),
23442344
decimal=4,
23452345
)
@@ -2354,7 +2354,7 @@ def test_dirichlet_multinomial_vec_2d_a(self):
23542354
DirichletMultinomial("m", n=n, a=as_, size=vals.shape)
23552355

23562356
assert_almost_equal(
2357-
sum([dirichlet_multinomial_logpmf(val, n, a) for val, a in zip(vals, as_)]),
2357+
sum(dirichlet_multinomial_logpmf(val, n, a) for val, a in zip(vals, as_)),
23582358
model.fastlogp({"m": vals}),
23592359
decimal=4,
23602360
)

0 commit comments

Comments
 (0)