Skip to content

Commit c89ce70

Browse files
committed
bump nbqa and pyupgrade
1 parent 5255505 commit c89ce70

14 files changed

+26
-28
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
repos:
22
- repo: https://github.com/nbQA-dev/nbQA
3-
rev: 0.6.0
3+
rev: 0.10.0
44
hooks:
55
- id: nbqa-black
66
additional_dependencies: [black==20.8b1]
77
- id: nbqa-isort
88
additional_dependencies: [isort==5.6.4]
99
- id: nbqa-pyupgrade
10-
additional_dependencies: [pyupgrade==2.7.4]
10+
additional_dependencies: [pyupgrade==2.19.0]
1111
args: [--py37-plus]
1212
- repo: https://github.com/MarcoGorelli/check-execution-order
1313
rev: v0.1.1

examples/case_studies/blackbox_external_likelihood.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -688,7 +688,7 @@
688688
"for i, samples in enumerate([samples_pymc3, samples_pymc3_2, samples_pymc3_3]):\n",
689689
" # get maximum chain autocorrelartion length\n",
690690
" autocorrlen = int(np.max(emcee.autocorr.integrated_time(samples, c=3)))\n",
691-
" print(\"Auto-correlation length ({}): {}\".format(labels[i], autocorrlen))\n",
691+
" print(f\"Auto-correlation length ({labels[i]}): {autocorrlen}\")\n",
692692
"\n",
693693
" if i == 0:\n",
694694
" fig = corner.corner(\n",

examples/case_studies/conditional-autoregressive-model.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@
215215
" dtype=theano.config.floatX,\n",
216216
")\n",
217217
"\n",
218-
"maxwz = max([sum(w) for w in weights])\n",
218+
"maxwz = max(sum(w) for w in weights)\n",
219219
"N = len(weights)\n",
220220
"wmat = np.zeros((N, maxwz))\n",
221221
"amat = np.zeros((N, maxwz), dtype=\"int32\")\n",
@@ -1556,7 +1556,7 @@
15561556
}
15571557
],
15581558
"source": [
1559-
"maxwz = max([sum(w) for w in weights])\n",
1559+
"maxwz = max(sum(w) for w in weights)\n",
15601560
"N = len(weights)\n",
15611561
"wmat2 = np.zeros((N, N))\n",
15621562
"amat2 = np.zeros((N, N), dtype=\"int32\")\n",

examples/case_studies/factor_analysis.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,7 @@
256256
"source": [
257257
"for i in trace.posterior.chain.values:\n",
258258
" samples = trace.posterior[\"W\"].sel(chain=i, observed_columns=3, latent_columns=1)\n",
259-
" plt.plot(samples, label=\"Chain {}\".format(i + 1))\n",
259+
" plt.plot(samples, label=f\"Chain {i + 1}\")\n",
260260
" plt.axhline(samples.mean(), color=f\"C{i}\")\n",
261261
"plt.legend(ncol=4, loc=\"upper center\", fontsize=12, frameon=True), plt.xlabel(\"Sample\");"
262262
]
@@ -417,7 +417,7 @@
417417
"\n",
418418
"for i in range(4):\n",
419419
" samples = trace.posterior[\"W\"].sel(chain=i, observed_columns=3, latent_columns=1)\n",
420-
" plt.plot(samples, label=\"Chain {}\".format(i + 1))\n",
420+
" plt.plot(samples, label=f\"Chain {i + 1}\")\n",
421421
"\n",
422422
"plt.legend(ncol=4, loc=\"lower center\", fontsize=8), plt.xlabel(\"Sample\");"
423423
]
@@ -532,7 +532,7 @@
532532
" mcmc_samples = trace.posterior[\"W\"].sel(chain=i, observed_columns=3, latent_columns=1)\n",
533533
" az.plot_kde(\n",
534534
" mcmc_samples,\n",
535-
" label=\"MCMC posterior for chain {}\".format(i + 1),\n",
535+
" label=f\"MCMC posterior for chain {i + 1}\",\n",
536536
" plot_kwargs={\"color\": f\"C{i}\"},\n",
537537
" )\n",
538538
"az.plot_kde(\n",

examples/case_studies/probabilistic_matrix_factorization.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1078,7 +1078,7 @@
10781078
" Method = baseline_methods[name]\n",
10791079
" method = Method(train)\n",
10801080
" baselines[name] = method.rmse(test)\n",
1081-
" print(\"{} RMSE:\\t{:.5f}\".format(method, baselines[name]))"
1081+
" print(f\"{method} RMSE:\\t{baselines[name]:.5f}\")"
10821082
]
10831083
},
10841084
{

examples/gaussian_processes/GP-MaunaLoa.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1137,7 +1137,7 @@
11371137
],
11381138
"source": [
11391139
"# display the results, dont show transformed parameter values\n",
1140-
"sorted([name + \":\" + str(mp[name]) for name in mp.keys() if not name.endswith(\"_\")])"
1140+
"sorted(name + \":\" + str(mp[name]) for name in mp.keys() if not name.endswith(\"_\"))"
11411141
]
11421142
},
11431143
{

examples/generalized_linear_models/GLM-hierarchical-binominal-model.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@
214214
"ix_z, ix_x = np.unravel_index(np.argmax(surface, axis=None), surface.shape)\n",
215215
"ax.scatter([X[0, ix_x]], [Z[ix_z, 0]], color=\"red\")\n",
216216
"\n",
217-
"text = r\"$({a},{b})$\".format(a=np.round(X[0, ix_x], 2), b=np.round(Z[ix_z, 0], 2))\n",
217+
"text = fr\"$({np.round(X[0, ix_x], 2)},{np.round(Z[ix_z, 0], 2)})$\"\n",
218218
"\n",
219219
"ax.annotate(\n",
220220
" text,\n",

examples/generalized_linear_models/GLM-logistic.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@
107107
" with model:\n",
108108
" ax = az.plot_trace(\n",
109109
" traces[-retain:],\n",
110-
" lines=tuple([(k, {}, v[\"mean\"]) for k, v in az.summary(traces[-retain:]).iterrows()]),\n",
110+
" lines=tuple((k, {}, v[\"mean\"]) for k, v in az.summary(traces[-retain:]).iterrows()),\n",
111111
" )\n",
112112
"\n",
113113
" for i, mn in enumerate(az.summary(traces[-retain:])[\"mean\"]):\n",
@@ -130,7 +130,7 @@
130130
" Create a polynomial modelspec string for patsy\n",
131131
" \"\"\"\n",
132132
" return (\n",
133-
" \"income ~ educ + hours + age \" + \" \".join([f\"+ np.power(age,{j})\" for j in range(2, k + 1)])\n",
133+
" \"income ~ educ + hours + age \" + \" \".join(f\"+ np.power(age,{j})\" for j in range(2, k + 1))\n",
134134
" ).strip()"
135135
]
136136
},
@@ -1088,7 +1088,7 @@
10881088
"source": [
10891089
"lb, ub = np.percentile(b, 2.5), np.percentile(b, 97.5)\n",
10901090
"\n",
1091-
"print(\"P({:.3f} < O.R. < {:.3f}) = 0.95\".format(np.exp(lb), np.exp(ub)))"
1091+
"print(f\"P({np.exp(lb):.3f} < O.R. < {np.exp(ub):.3f}) = 0.95\")"
10921092
]
10931093
},
10941094
{

examples/generalized_linear_models/GLM-model-selection.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@
192192
"\n",
193193
" ax = az.plot_trace(\n",
194194
" traces[-retain:],\n",
195-
" lines=tuple([(k, {}, v[\"mean\"]) for k, v in az.summary(traces[-retain:]).iterrows()]),\n",
195+
" lines=tuple((k, {}, v[\"mean\"]) for k, v in az.summary(traces[-retain:]).iterrows()),\n",
196196
" )\n",
197197
"\n",
198198
" for i, mn in enumerate(az.summary(traces[-retain:])[\"mean\"]):\n",
@@ -214,7 +214,7 @@
214214
" Convenience function:\n",
215215
" Create a polynomial modelspec string for patsy\n",
216216
" \"\"\"\n",
217-
" return (\"y ~ 1 + x \" + \" \".join([f\"+ np.power(x,{j})\" for j in range(2, k + 1)])).strip()\n",
217+
" return (\"y ~ 1 + x \" + \" \".join(f\"+ np.power(x,{j})\" for j in range(2, k + 1))).strip()\n",
218218
"\n",
219219
"\n",
220220
"def run_models(df, upper_order=5):\n",

examples/generalized_linear_models/GLM-robust-with-outlier-detection.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,7 @@
297297
" columns=[\"id\", \"x\", \"y\", \"sigma_y\", \"sigma_x\", \"rho_xy\"],\n",
298298
")\n",
299299
"\n",
300-
"dfhogg[\"id\"] = dfhogg[\"id\"].apply(lambda x: \"p{}\".format(int(x)))\n",
300+
"dfhogg[\"id\"] = dfhogg[\"id\"].apply(lambda x: f\"p{int(x)}\")\n",
301301
"dfhogg.set_index(\"id\", inplace=True)\n",
302302
"dfhogg.head()"
303303
]

examples/ode_models/ODE_with_manual_gradients.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@
100100
" self._y0 = y0\n",
101101
"\n",
102102
" def simulate(self, parameters, times):\n",
103-
" alpha, beta, gamma, delta, Xt0, Yt0 = [x for x in parameters]\n",
103+
" alpha, beta, gamma, delta, Xt0, Yt0 = (x for x in parameters)\n",
104104
"\n",
105105
" def rhs(y, t, p):\n",
106106
" X, Y = y\n",
@@ -165,7 +165,7 @@
165165
" return self._simulate(parameters, times, True)\n",
166166
"\n",
167167
" def _simulate(self, parameters, times, sensitivities):\n",
168-
" alpha, beta, gamma, delta, Xt0, Yt0 = [x for x in parameters]\n",
168+
" alpha, beta, gamma, delta, Xt0, Yt0 = (x for x in parameters)\n",
169169
"\n",
170170
" def r(y, t, p):\n",
171171
" X, Y = y\n",
@@ -954,7 +954,7 @@
954954
" self._times = times\n",
955955
"\n",
956956
" def _simulate(self, parameters, times):\n",
957-
" a, b, c = [float(x) for x in parameters]\n",
957+
" a, b, c = (float(x) for x in parameters)\n",
958958
"\n",
959959
" def rhs(y, t, p):\n",
960960
" V, R = y\n",

examples/samplers/MLDA_gravity_surveying.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2369,7 +2369,7 @@
23692369
" y_offset = trace.get_values(\"theta\").max() + 0.25 * (\n",
23702370
" trace.get_values(\"theta\").max() - trace.get_values(\"theta\").min()\n",
23712371
" )\n",
2372-
" plt.text(x_offset, y_offset, \"{} Sampler\".format(method_names[i]))"
2372+
" plt.text(x_offset, y_offset, f\"{method_names[i]} Sampler\")"
23732373
]
23742374
},
23752375
{

examples/variational_inference/bayesian_neural_network_advi.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -533,7 +533,7 @@
533533
}
534534
],
535535
"source": [
536-
"print(\"Accuracy = {}%\".format((Y_test == pred).mean() * 100))"
536+
"print(f\"Accuracy = {(Y_test == pred).mean() * 100}%\")"
537537
]
538538
},
539539
{

examples/variational_inference/lda-advi-aevb.ipynb

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -154,14 +154,12 @@
154154
"n_samples_te = tf.shape[0] - n_samples_tr\n",
155155
"docs_tr = tf[:n_samples_tr, :]\n",
156156
"docs_te = tf[n_samples_tr:, :]\n",
157-
"print(\"Number of docs for training = {}\".format(docs_tr.shape[0]))\n",
158-
"print(\"Number of docs for test = {}\".format(docs_te.shape[0]))\n",
157+
"print(f\"Number of docs for training = {docs_tr.shape[0]}\")\n",
158+
"print(f\"Number of docs for test = {docs_te.shape[0]}\")\n",
159159
"\n",
160160
"n_tokens = np.sum(docs_tr[docs_tr.nonzero()])\n",
161161
"print(f\"Number of tokens in training set = {n_tokens}\")\n",
162-
"print(\n",
163-
" \"Sparsity = {}\".format(len(docs_tr.nonzero()[0]) / float(docs_tr.shape[0] * docs_tr.shape[1]))\n",
164-
")"
162+
"print(f\"Sparsity = {len(docs_tr.nonzero()[0]) / float(docs_tr.shape[0] * docs_tr.shape[1])}\")"
165163
]
166164
},
167165
{
@@ -546,7 +544,7 @@
546544
" for i in range(len(beta)):\n",
547545
" print(\n",
548546
" (\"Topic #%d: \" % i)\n",
549-
" + \" \".join([feature_names[j] for j in beta[i].argsort()[: -n_top_words - 1 : -1]])\n",
547+
" + \" \".join(feature_names[j] for j in beta[i].argsort()[: -n_top_words - 1 : -1])\n",
550548
" )\n",
551549
"\n",
552550
"\n",

0 commit comments

Comments
 (0)