diff --git a/spec/API_specification/dataframe_api/column_object.py b/spec/API_specification/dataframe_api/column_object.py index 11b96ef2..10ba9a35 100644 --- a/spec/API_specification/dataframe_api/column_object.py +++ b/spec/API_specification/dataframe_api/column_object.py @@ -641,3 +641,16 @@ def fill_nan(self, value: float | 'null', /) -> Column: """ ... + + def fill_null(self, value: Scalar, /) -> Column: + """ + Fill null values with the given fill value. + + Parameters + ---------- + value : Scalar + Value used to replace any ``null`` values in the column with. + Must be of the Python scalar type matching the dtype of the column. + + """ + ... diff --git a/spec/API_specification/dataframe_api/dataframe_object.py b/spec/API_specification/dataframe_api/dataframe_object.py index c6fbb6f5..f56dae67 100644 --- a/spec/API_specification/dataframe_api/dataframe_object.py +++ b/spec/API_specification/dataframe_api/dataframe_object.py @@ -774,3 +774,35 @@ def fill_nan(self, value: float | 'null', /) -> DataFrame: """ ... + + def fill_null( + self, value: Scalar, /, *, column_names : list[str] | None = None + ) -> DataFrame: + """ + Fill null values with the given fill value. + + This method can only be used if all columns that are to be filled are + of the same dtype (e.g., all of ``Float64`` or all of string dtype). + If that is not the case, it is not possible to use a single Python + scalar type that matches the dtype of all columns to which + ``fill_null`` is being applied, and hence an exception will be raised. + + Parameters + ---------- + value : Scalar + Value used to replace any ``null`` values in the dataframe with. + Must be of the Python scalar type matching the dtype(s) of the dataframe. + column_names : list[str] | None + A list of column names for which to replace nulls with the given + scalar value. If ``None``, nulls will be replaced in all columns. + + Raises + ------ + TypeError + If the columns of the dataframe are not all of the same kind. + KeyError + If ``column_names`` contains a column name that is not present in + the dataframe. + + """ + ...