From 5d4d2b51233b55f276693076d825da696f4c9c49 Mon Sep 17 00:00:00 2001 From: Michael Osthege Date: Sat, 10 Dec 2022 23:11:21 +0100 Subject: [PATCH 1/2] Run `pydocstyle` in pre-commit Includes docstring formatting to reduce the number of error codes that had to be silenced for pydocstyle to be happy. Closes #6378 --- .pre-commit-config.yaml | 8 ++++++++ pymc/blocking.py | 2 +- pymc/distributions/continuous.py | 1 - pymc/distributions/discrete.py | 1 - pymc/distributions/dist_math.py | 24 +++++++++++++----------- pymc/distributions/distribution.py | 3 +-- pymc/distributions/multivariate.py | 1 - pymc/distributions/transforms.py | 1 - pymc/distributions/truncated.py | 6 ++++-- pymc/gp/util.py | 3 +-- pymc/logprob/abstract.py | 2 +- pymc/logprob/joint_logprob.py | 4 ++-- pymc/logprob/rewriting.py | 6 +++--- pymc/logprob/scan.py | 6 +++--- pymc/logprob/transforms.py | 6 +++--- pymc/logprob/utils.py | 8 ++++---- pymc/math.py | 2 +- pymc/model.py | 8 +++----- pymc/model_graph.py | 1 + pymc/ode/__init__.py | 2 +- pymc/ode/ode.py | 4 +--- pymc/pytensorf.py | 8 ++++---- pymc/sampling/population.py | 4 ++-- pymc/step_methods/hmc/hmc.py | 3 ++- pymc/step_methods/slicer.py | 2 +- pymc/util.py | 2 +- pymc/variational/approximations.py | 2 +- pymc/variational/opvi.py | 1 - pymc/variational/updates.py | 2 +- 29 files changed, 63 insertions(+), 60 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dffc2ecb3c..27a09051f5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,6 +35,14 @@ repos: args: [--rcfile=.pylintrc] files: ^pymc/ exclude: (?x)(pymc/_version.py) +- repo: https://github.com/PyCQA/pydocstyle + rev: 6.1.1 + hooks: + - id: pydocstyle + args: + - --ignore=D100,D101,D102,D103,D104,D105,D107,D200,D202,D203,D204,D205,D209,D212,D213,D301,D400,D401,D403,D413,D415,D417 + files: ^pymc/ + exclude: ^pymc/tests/ - repo: https://github.com/MarcoGorelli/madforhooks rev: 0.3.0 hooks: diff --git a/pymc/blocking.py b/pymc/blocking.py index 88c71e79ec..9d5c29849a 100644 --- a/pymc/blocking.py +++ b/pymc/blocking.py @@ -79,7 +79,7 @@ def rmap( """Map 1D concatenated array to a dictionary of variables in their original spaces. Parameters - ========== + ---------- array The array to map. start_point diff --git a/pymc/distributions/continuous.py b/pymc/distributions/continuous.py index 8441858470..dae67808d7 100644 --- a/pymc/distributions/continuous.py +++ b/pymc/distributions/continuous.py @@ -1593,7 +1593,6 @@ class LogNormal(PositiveContinuous): Examples -------- - .. code-block:: python # Example to show that we pass in only ``sigma`` or ``tau`` but not both. diff --git a/pymc/distributions/discrete.py b/pymc/distributions/discrete.py index 881ce2f097..7b50a9506c 100644 --- a/pymc/distributions/discrete.py +++ b/pymc/distributions/discrete.py @@ -1549,7 +1549,6 @@ class OrderedLogistic: Examples -------- - .. code-block:: python # Generate data for a simple 1 dimensional example problem diff --git a/pymc/distributions/dist_math.py b/pymc/distributions/dist_math.py index a17cd588d3..a7fa8103e4 100644 --- a/pymc/distributions/dist_math.py +++ b/pymc/distributions/dist_math.py @@ -355,17 +355,19 @@ def grad(self, inp, grads): def random_choice(p, size): """Return draws from categorical probability functions - Args: - p: array - Probability of each class. If p.ndim > 1, the last axis is - interpreted as the probability of each class, and numpy.random.choice - is iterated for every other axis element. - size: int or tuple - Shape of the desired output array. If p is multidimensional, size - should broadcast with p.shape[:-1]. - - Returns: - random sample: array + Parameters + ---------- + p : array + Probability of each class. If p.ndim > 1, the last axis is + interpreted as the probability of each class, and numpy.random.choice + is iterated for every other axis element. + size : int or tuple + Shape of the desired output array. If p is multidimensional, size + should broadcast with p.shape[:-1]. + + Returns + ------- + random_sample : array """ k = p.shape[-1] diff --git a/pymc/distributions/distribution.py b/pymc/distributions/distribution.py index c96316a8cd..f59f5b633c 100644 --- a/pymc/distributions/distribution.py +++ b/pymc/distributions/distribution.py @@ -413,7 +413,7 @@ def _get_measurable_outputs_symbolic_random_variable(op, node): @node_rewriter([SymbolicRandomVariable]) def inline_symbolic_random_variable(fgraph, node): """ - This optimization expands the internal graph of a SymbolicRV when obtaining the logp + Optimization that expands the internal graph of a SymbolicRV when obtaining the logp graph, if the flag `inline_logprob` is True. """ op = node.op @@ -828,7 +828,6 @@ class CustomDist: Examples -------- - Create a CustomDist that wraps a black-box logp function. This variable cannot be used in prior or posterior predictive sampling because no random function was provided diff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py index 1805e83e1f..2ac9884037 100644 --- a/pymc/distributions/multivariate.py +++ b/pymc/distributions/multivariate.py @@ -796,7 +796,6 @@ class OrderedMultinomial: Examples -------- - .. code-block:: python # Generate data for a simple 1 dimensional example problem diff --git a/pymc/distributions/transforms.py b/pymc/distributions/transforms.py index 199af64720..023ff57679 100644 --- a/pymc/distributions/transforms.py +++ b/pymc/distributions/transforms.py @@ -240,7 +240,6 @@ class Interval(IntervalTransform): Examples -------- - Create an interval transform between -1 and +1 .. code-block:: python diff --git a/pymc/distributions/truncated.py b/pymc/distributions/truncated.py index 3e5eec4066..e10410a0b5 100644 --- a/pymc/distributions/truncated.py +++ b/pymc/distributions/truncated.py @@ -30,8 +30,10 @@ class TruncatedRV(SymbolicRandomVariable): - """An `Op` constructed from an PyTensor graph that represents a truncated univariate - random variable.""" + """ + An `Op` constructed from an PyTensor graph + that represents a truncated univariate random variable. + """ default_output = 1 base_rv_op = None diff --git a/pymc/gp/util.py b/pymc/gp/util.py index 9f13df8360..0ebf728849 100644 --- a/pymc/gp/util.py +++ b/pymc/gp/util.py @@ -188,7 +188,7 @@ def plot_gp_dist( ): """A helper function for plotting 1D GP posteriors from trace - Parameters + Parameters ---------- ax: axes Matplotlib axes. @@ -213,7 +213,6 @@ def plot_gp_dist( Returns ------- - ax: Matplotlib axes """ import matplotlib.pyplot as plt diff --git a/pymc/logprob/abstract.py b/pymc/logprob/abstract.py index 070f28a95e..11794161ce 100644 --- a/pymc/logprob/abstract.py +++ b/pymc/logprob/abstract.py @@ -194,7 +194,7 @@ def assign_custom_measurable_outputs( `factorized_joint_logprob`. Parameters - ========== + ---------- node The node to recreate with a new cloned `Op`. measurable_outputs_fn diff --git a/pymc/logprob/joint_logprob.py b/pymc/logprob/joint_logprob.py index fbf9d6fc5f..75b6d5452a 100644 --- a/pymc/logprob/joint_logprob.py +++ b/pymc/logprob/joint_logprob.py @@ -95,7 +95,7 @@ def factorized_joint_logprob( Parameters - ========== + ---------- rv_values A ``dict`` of variables that maps stochastic elements (e.g. `RandomVariable`\s) to symbolic `Variable`\s representing their @@ -111,7 +111,7 @@ def factorized_joint_logprob( etc.) Returns - ======= + ------- A ``dict`` that maps each value variable to the log-probability factor derived from the respective `RandomVariable`. diff --git a/pymc/logprob/rewriting.py b/pymc/logprob/rewriting.py index 59e380dd56..9fa8a948df 100644 --- a/pymc/logprob/rewriting.py +++ b/pymc/logprob/rewriting.py @@ -67,7 +67,7 @@ class NoCallbackEquilibriumDB(EquilibriumDB): - r"""This `EquilibriumDB` doesn't hide its exceptions. + r"""An `EquilibriumDB` that doesn't hide its exceptions. By setting `failure_callback` to ``None`` in the `EquilibriumGraphRewriter`\s that `EquilibriumDB` generates, we're able to directly emit the desired @@ -102,7 +102,7 @@ class PreserveRVMappings(Feature): def __init__(self, rv_values: Dict[TensorVariable, TensorVariable]): """ Parameters - ========== + ---------- rv_values Mappings between random variables and their value variables. The keys of this map are what this `Feature` keeps updated. @@ -130,7 +130,7 @@ def update_rv_maps( original value variables. Parameters - ========== + ---------- old_rv The random variable whose mappings will be updated. new_value diff --git a/pymc/logprob/scan.py b/pymc/logprob/scan.py index 32669db412..31bd397556 100644 --- a/pymc/logprob/scan.py +++ b/pymc/logprob/scan.py @@ -79,7 +79,7 @@ def convert_outer_out_to_in( r"""Convert outer-graph outputs into outer-graph inputs. Parameters - ========== + ---------- input_scan_args: The source `Scan` arguments. outer_out_vars: @@ -253,7 +253,7 @@ def get_random_outer_outputs( """Get the `MeasurableVariable` outputs of a `Scan` (well, its `ScanArgs`). Returns - ======= + ------- A tuple of tuples containing the index of each outer-output variable, the outer-output variable itself, and the inner-output variable that is an instance of `MeasurableVariable`. @@ -329,7 +329,7 @@ def create_inner_out_logp(value_map: Dict[TensorVariable, TensorVariable]) -> Te @node_rewriter([Scan]) def find_measurable_scans(fgraph, node): - r"""Finds `Scan`\s for which a `logprob` can be computed. + r"""Find `Scan`\s for which a `logprob` can be computed. This will convert said `Scan`\s into `MeasurableScan`\s. It also updates random variable and value variable mappings that have been specified for diff --git a/pymc/logprob/transforms.py b/pymc/logprob/transforms.py index 0e89635af6..8ae8b43240 100644 --- a/pymc/logprob/transforms.py +++ b/pymc/logprob/transforms.py @@ -295,7 +295,7 @@ def __init__( ): """ Parameters - ========== + ---------- values_to_transforms Mapping between value variables and their transformations. Each value variable can be assigned one of `RVTransform`, or ``None``. @@ -514,7 +514,7 @@ def __init__(self, args_fn: Callable[..., Tuple[Optional[Variable], Optional[Var """ Parameters - ========== + ---------- args_fn Function that expects inputs of RandomVariable and returns the lower and upper bounds for the interval transformation. If one of these is @@ -660,7 +660,7 @@ def _create_transformed_rv_op( also behaving exactly as it did before. Parameters - ========== + ---------- rv_op The `RandomVariable` for which we want to construct a `TransformedRV`. transform diff --git a/pymc/logprob/utils.py b/pymc/logprob/utils.py index 2887a03182..688934de33 100644 --- a/pymc/logprob/utils.py +++ b/pymc/logprob/utils.py @@ -62,7 +62,7 @@ def walk_model( By default, these walks will not go past ``MeasurableVariable`` nodes. Parameters - ========== + ---------- graphs The graphs to walk. walk_past_rvs @@ -104,12 +104,12 @@ def replace_rvs_in_graphs( This will *not* recompute test values. Parameters - ========== + ---------- graphs The graphs in which random variables are to be replaced. Returns - ======= + ------- A ``tuple`` containing the transformed graphs and a ``dict`` of the replacements that were made. """ @@ -154,7 +154,7 @@ def rvs_to_value_vars( This will *not* recompute test values in the resulting graphs. Parameters - ========== + ---------- graphs The graphs in which to perform the replacements. initial_replacements diff --git a/pymc/math.py b/pymc/math.py index 384e385576..798220da05 100644 --- a/pymc/math.py +++ b/pymc/math.py @@ -196,7 +196,7 @@ def kron_matrix_op(krons, m, op): r"""Apply op to krons and m in a way that reproduces ``op(kronecker(*krons), m)`` Parameters - ----------- + ---------- krons : list of square 2D array-like objects D square matrices :math:`[A_1, A_2, ..., A_D]` to be Kronecker'ed :math:`A = A_1 \otimes A_2 \otimes ... \otimes A_D` diff --git a/pymc/model.py b/pymc/model.py index fa8b4fcbc5..7d3972a4c6 100644 --- a/pymc/model.py +++ b/pymc/model.py @@ -153,7 +153,7 @@ class ContextMeta(type): """ def __new__(cls, name, bases, dct, **kwargs): # pylint: disable=unused-argument - "Add __enter__ and __exit__ methods to the class." + """Add __enter__ and __exit__ methods to the class.""" def __enter__(self): self.__class__.context_class.get_contexts().append(self) @@ -457,7 +457,6 @@ class Model(WithMemoization, metaclass=ContextMeta): Examples -------- - How to define a custom model .. code-block:: python @@ -1356,7 +1355,7 @@ def make_obs_var( """Create a `TensorVariable` for an observed random variable. Parameters - ========== + ---------- rv_var The random variable that is observed. Its dimensionality must be compatible with the data already. @@ -1808,7 +1807,7 @@ def point_logps(self, point=None, round_vals=2): class BlockModelAccess(Model): - """This class can be used to prevent user access to Model contexts""" + """Can be used to prevent user access to Model contexts""" def __init__(self, *args, error_msg_on_access="Model access is blocked", **kwargs): self.error_msg_on_access = error_msg_on_access @@ -1829,7 +1828,6 @@ def set_data(new_data, model=None, *, coords=None): Examples -------- - This example shows how to change the shape of the likelihood to correspond automatically with `x`, the predictor in a regression model. diff --git a/pymc/model_graph.py b/pymc/model_graph.py index 48c0318aca..28d0746f62 100644 --- a/pymc/model_graph.py +++ b/pymc/model_graph.py @@ -199,6 +199,7 @@ def get_plates(self, var_names: Optional[Iterable[VarName]] = None) -> Dict[str, Just groups by the shape of the underlying distribution. Will be wrong if there are two plates with the same shape. + Returns ------- dict diff --git a/pymc/ode/__init__.py b/pymc/ode/__init__.py index 4e70355888..48506c0c2e 100644 --- a/pymc/ode/__init__.py +++ b/pymc/ode/__init__.py @@ -1,5 +1,5 @@ """ -This submodule contains tools used to perform inference on ordinary differential equations. +Contains tools used to perform inference on ordinary differential equations. Due to the nature of the model (as well as included solvers), ODE solution may perform slowly. Another library based on PyMC--sunode--has implemented Adams' method and BDF (backward differentation formula) using the very fast SUNDIALS suite of ODE and PDE solvers. diff --git a/pymc/ode/ode.py b/pymc/ode/ode.py index 4d67942b05..c2c03ce0ed 100644 --- a/pymc/ode/ode.py +++ b/pymc/ode/ode.py @@ -42,7 +42,6 @@ class DifferentialEquation(Op): Parameters ---------- - func : callable Function specifying the differential equation. Must take arguments y (n_states,), t (scalar), p (n_theta,) times : array @@ -57,7 +56,6 @@ class DifferentialEquation(Op): Examples -------- - .. code-block:: python def odefunc(y, t, p): @@ -108,7 +106,7 @@ def __init__(self, func, times, *, n_states, n_theta, t0=0): self._output_sensitivities = {} def _system(self, Y, t, p): - r"""This is the function that will be passed to odeint. Solves both ODE and sensitivities. + r"""The function that will be passed to odeint. Solves both ODE and sensitivities. Parameters ---------- diff --git a/pymc/pytensorf.py b/pymc/pytensorf.py index d186f95f22..211c4a99a6 100644 --- a/pymc/pytensorf.py +++ b/pymc/pytensorf.py @@ -203,7 +203,7 @@ def walk_model( """Walk model graphs and yield their nodes. Parameters - ========== + ---------- graphs The graphs to walk. stop_at_vars @@ -235,12 +235,12 @@ def _replace_rvs_in_graphs( This will *not* recompute test values. Parameters - ========== + ---------- graphs The graphs in which random variables are to be replaced. Returns - ======= + ------- Tuple containing the transformed graphs and a ``dict`` of the replacements that were made. """ @@ -296,7 +296,7 @@ def rvs_to_value_vars( This will *not* recompute test values in the resulting graphs. Parameters - ========== + ---------- graphs The graphs in which to perform the replacements. apply_transforms diff --git a/pymc/sampling/population.py b/pymc/sampling/population.py index 07fbd6d340..ba5cd8540a 100644 --- a/pymc/sampling/population.py +++ b/pymc/sampling/population.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This module specializes on running MCMCs with population step methods.""" +"""Specializes on running MCMCs with population step methods.""" import logging @@ -187,7 +187,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): @staticmethod def _run_secondary(c, stepper_dumps, secondary_end): - """This method is started on a separate process to perform stepping of a chain. + """The method is started on a separate process to perform stepping of a chain. Parameters ---------- diff --git a/pymc/step_methods/hmc/hmc.py b/pymc/step_methods/hmc/hmc.py index bf3a11d4cf..e3ffbf4d77 100644 --- a/pymc/step_methods/hmc/hmc.py +++ b/pymc/step_methods/hmc/hmc.py @@ -62,7 +62,8 @@ class HamiltonianMC(BaseHMC): ] def __init__(self, vars=None, path_length=2.0, max_steps=1024, **kwargs): - """Set up the Hamiltonian Monte Carlo sampler. + """ + Set up the Hamiltonian Monte Carlo sampler. Parameters ---------- diff --git a/pymc/step_methods/slicer.py b/pymc/step_methods/slicer.py index bf72cadfd7..ccde3d3f82 100644 --- a/pymc/step_methods/slicer.py +++ b/pymc/step_methods/slicer.py @@ -32,7 +32,7 @@ class Slice(ArrayStep): """ - Univariate slice sampler step method + Univariate slice sampler step method. Parameters ---------- diff --git a/pymc/util.py b/pymc/util.py index fc24554b71..00d47a4c42 100644 --- a/pymc/util.py +++ b/pymc/util.py @@ -449,7 +449,7 @@ def _get_unique_seeds_per_chain(integers_fn): def get_value_vars_from_user_vars( vars: Union[Variable, Sequence[Variable]], model ) -> List[Variable]: - """This function converts user "vars" input into value variables + """Converts user "vars" input into value variables. More often than not, users will pass random variables, and we will extract the respective value variables, but we also allow for the input to already be value diff --git a/pymc/variational/approximations.py b/pymc/variational/approximations.py index 9ebc02ef40..363276b661 100644 --- a/pymc/variational/approximations.py +++ b/pymc/variational/approximations.py @@ -377,7 +377,7 @@ def __init__(self, trace=None, size=None, **kwargs): def evaluate_over_trace(self, node): R""" - This allows to statically evaluate any symbolic expression over the trace. + Allows to statically evaluate any symbolic expression over the trace. Parameters ---------- diff --git a/pymc/variational/opvi.py b/pymc/variational/opvi.py index 018d65b010..80733bf1e5 100644 --- a/pymc/variational/opvi.py +++ b/pymc/variational/opvi.py @@ -593,7 +593,6 @@ class Group(WithMemoization): Examples -------- - **Basic Initialization** :class:`Group` is a factory class. You do not need to call every ApproximationGroup explicitly. diff --git a/pymc/variational/updates.py b/pymc/variational/updates.py index e712ab2592..5de07059f9 100644 --- a/pymc/variational/updates.py +++ b/pymc/variational/updates.py @@ -665,7 +665,7 @@ def rmsprop(loss_or_grads=None, params=None, learning_rate=1.0, rho=0.9, epsilon def adadelta(loss_or_grads=None, params=None, learning_rate=1.0, rho=0.95, epsilon=1e-6): - r""" Adadelta updates + r"""Adadelta updates Scale learning rates by the ratio of accumulated gradients to accumulated updates, see [1]_ and notes for further description. From 944b85d95d1b7dd966af4d0c3a1e4ca19b0a2665 Mon Sep 17 00:00:00 2001 From: Michael Osthege Date: Mon, 12 Dec 2022 18:29:21 +0100 Subject: [PATCH 2/2] Remove pydocstyle settings from `setup.cfg` --- setup.cfg | 4 ---- 1 file changed, 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index b2cb3e5033..889555c1ec 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,10 +4,6 @@ testpaths = pymc/tests [coverage:run] omit = *examples* -[pydocstyle] -add-ignore = D100,D104 -convention = numpy - [isort] lines_between_types = 1 profile = black