Skip to content

Commit b385544

Browse files
committed
fixed resample.Resampler docstring errors in min, max, mean, prod, std, var as indicated by vaildate_docstrings.py
1 parent 6b16ae5 commit b385544

File tree

1 file changed

+47
-63
lines changed

1 file changed

+47
-63
lines changed

Diff for: pandas/core/resample.py

+47-63
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from __future__ import annotations
22

33
import copy
4-
import warnings
54
from textwrap import dedent
65
from typing import (
76
TYPE_CHECKING,
@@ -11,10 +10,10 @@
1110
no_type_check,
1211
overload,
1312
)
13+
import warnings
1414

1515
import numpy as np
1616

17-
import pandas.core.algorithms as algos
1817
from pandas._libs import lib
1918
from pandas._libs.tslibs import (
2019
BaseOffset,
@@ -26,12 +25,17 @@
2625
to_offset,
2726
)
2827
from pandas._typing import NDFrameT
29-
from pandas.core.apply import ResamplerWindowApply
30-
from pandas.core.arrays import ArrowExtensionArray
31-
from pandas.core.base import (
32-
PandasObject,
33-
SelectionMixin,
28+
from pandas.errors import AbstractMethodError
29+
from pandas.util._decorators import (
30+
Appender,
31+
Substitution,
32+
doc,
3433
)
34+
from pandas.util._exceptions import (
35+
find_stack_level,
36+
rewrite_warning,
37+
)
38+
3539
from pandas.core.dtypes.dtypes import (
3640
ArrowDtype,
3741
PeriodDtype,
@@ -40,6 +44,14 @@
4044
ABCDataFrame,
4145
ABCSeries,
4246
)
47+
48+
import pandas.core.algorithms as algos
49+
from pandas.core.apply import ResamplerWindowApply
50+
from pandas.core.arrays import ArrowExtensionArray
51+
from pandas.core.base import (
52+
PandasObject,
53+
SelectionMixin,
54+
)
4355
from pandas.core.generic import (
4456
NDFrame,
4557
_shared_docs,
@@ -68,7 +80,7 @@
6880
timedelta_range,
6981
)
7082
from pandas.core.reshape.concat import concat
71-
from pandas.errors import AbstractMethodError
83+
7284
from pandas.tseries.frequencies import (
7385
is_subperiod,
7486
is_superperiod,
@@ -77,26 +89,13 @@
7789
Day,
7890
Tick,
7991
)
80-
from pandas.util._decorators import (
81-
Appender,
82-
Substitution,
83-
doc,
84-
)
85-
from pandas.util._exceptions import (
86-
find_stack_level,
87-
rewrite_warning,
88-
)
8992

9093
if TYPE_CHECKING:
9194
from collections.abc import (
9295
Callable,
9396
Hashable,
9497
)
9598

96-
from pandas import (
97-
DataFrame,
98-
Series,
99-
)
10099
from pandas._typing import (
101100
Any,
102101
AnyArrayLike,
@@ -115,6 +114,11 @@
115114
npt,
116115
)
117116

117+
from pandas import (
118+
DataFrame,
119+
Series,
120+
)
121+
118122
_shared_docs_kwargs: dict[str, str] = {}
119123

120124

@@ -356,8 +360,7 @@ def pipe(
356360
axis="",
357361
)
358362
def aggregate(self, func=None, *args, **kwargs):
359-
result = ResamplerWindowApply(
360-
self, func, args=args, kwargs=kwargs).agg()
363+
result = ResamplerWindowApply(self, func, args=args, kwargs=kwargs).agg()
361364
if result is None:
362365
how = func
363366
result = self._groupby_and_aggregate(how, *args, **kwargs)
@@ -444,14 +447,13 @@ def _groupby_and_aggregate(self, how, *args, **kwargs):
444447
# Excludes `on` column when provided
445448
obj = self._obj_with_exclusions
446449

447-
grouped = get_groupby(obj, by=None, grouper=grouper,
448-
group_keys=self.group_keys)
450+
grouped = get_groupby(obj, by=None, grouper=grouper, group_keys=self.group_keys)
449451

450452
try:
451453
if callable(how):
452454
# TODO: test_resample_apply_with_additional_args fails if we go
453455
# through the non-lambda path, not clear that it should.
454-
def func(x): return how(x, *args, **kwargs)
456+
func = lambda x: how(x, *args, **kwargs)
455457
result = grouped.aggregate(func)
456458
else:
457459
result = grouped.aggregate(how, *args, **kwargs)
@@ -1647,16 +1649,14 @@ def _apply(self, f, *args, **kwargs):
16471649
"""
16481650

16491651
def func(x):
1650-
x = self._resampler_cls(
1651-
x, timegrouper=self._timegrouper, gpr_index=self.ax)
1652+
x = self._resampler_cls(x, timegrouper=self._timegrouper, gpr_index=self.ax)
16521653

16531654
if isinstance(f, str):
16541655
return getattr(x, f)(**kwargs)
16551656

16561657
return x.apply(f, *args, **kwargs)
16571658

1658-
result = _apply(self._groupby, func,
1659-
include_groups=self.include_groups)
1659+
result = _apply(self._groupby, func, include_groups=self.include_groups)
16601660
return self._wrap_result(result)
16611661

16621662
_upsample = _apply
@@ -2074,17 +2074,14 @@ def __init__(
20742074
if closed not in {None, "left", "right"}:
20752075
raise ValueError(f"Unsupported value {closed} for `closed`")
20762076
if convention not in {None, "start", "end", "e", "s"}:
2077-
raise ValueError(
2078-
f"Unsupported value {convention} for `convention`")
2077+
raise ValueError(f"Unsupported value {convention} for `convention`")
20792078

20802079
if (
2081-
(key is None and obj is not None and isinstance(
2082-
obj.index, PeriodIndex)) # type: ignore[attr-defined]
2080+
(key is None and obj is not None and isinstance(obj.index, PeriodIndex)) # type: ignore[attr-defined]
20832081
or (
20842082
key is not None
20852083
and obj is not None
2086-
# type: ignore[index]
2087-
and getattr(obj[key], "dtype", None) == "period"
2084+
and getattr(obj[key], "dtype", None) == "period" # type: ignore[index]
20882085
)
20892086
):
20902087
freq = to_offset(freq, is_period=True)
@@ -2304,8 +2301,7 @@ def _adjust_bin_edges(
23042301
edges_dti = binner.tz_localize(None)
23052302
edges_dti = (
23062303
edges_dti
2307-
+ Timedelta(days=1,
2308-
unit=edges_dti.unit).as_unit(edges_dti.unit)
2304+
+ Timedelta(days=1, unit=edges_dti.unit).as_unit(edges_dti.unit)
23092305
- Timedelta(1, unit=edges_dti.unit).as_unit(edges_dti.unit)
23102306
)
23112307
bin_edges = edges_dti.tz_localize(binner.tz).asi8
@@ -2335,8 +2331,7 @@ def _get_time_delta_bins(self, ax: TimedeltaIndex):
23352331
)
23362332

23372333
if not len(ax):
2338-
binner = labels = TimedeltaIndex(
2339-
data=[], freq=self.freq, name=ax.name)
2334+
binner = labels = TimedeltaIndex(data=[], freq=self.freq, name=ax.name)
23402335
return binner, [], labels
23412336

23422337
start, end = ax.min(), ax.max()
@@ -2375,8 +2370,7 @@ def _get_time_period_bins(self, ax: DatetimeIndex):
23752370
)
23762371
return binner, [], labels
23772372

2378-
labels = binner = period_range(
2379-
start=ax[0], end=ax[-1], freq=freq, name=ax.name)
2373+
labels = binner = period_range(start=ax[0], end=ax[-1], freq=freq, name=ax.name)
23802374

23812375
end_stamps = (labels + freq).asfreq(freq, "s").to_timestamp()
23822376
if ax.tz:
@@ -2405,12 +2399,10 @@ def _get_period_bins(self, ax: PeriodIndex):
24052399
if not len(memb):
24062400
# index contains no valid (non-NaT) values
24072401
bins = np.array([], dtype=np.int64)
2408-
binner = labels = PeriodIndex(
2409-
data=[], freq=self.freq, name=ax.name)
2402+
binner = labels = PeriodIndex(data=[], freq=self.freq, name=ax.name)
24102403
if len(ax) > 0:
24112404
# index is all NaT
2412-
binner, bins, labels = _insert_nat_bin(
2413-
binner, bins, labels, len(ax))
2405+
binner, bins, labels = _insert_nat_bin(binner, bins, labels, len(ax))
24142406
return binner, bins, labels
24152407

24162408
freq_mult = self.freq.n
@@ -2434,8 +2426,7 @@ def _get_period_bins(self, ax: PeriodIndex):
24342426
)
24352427

24362428
# Get offset for bin edge (not label edge) adjustment
2437-
start_offset = Period(start, self.freq) - \
2438-
Period(p_start, self.freq)
2429+
start_offset = Period(start, self.freq) - Period(p_start, self.freq)
24392430
# error: Item "Period" of "Union[Period, Any]" has no attribute "n"
24402431
bin_shift = start_offset.n % freq_mult # type: ignore[union-attr]
24412432
start = p_start
@@ -2459,8 +2450,7 @@ def _get_period_bins(self, ax: PeriodIndex):
24592450
bins = memb.searchsorted(prng, side="left")
24602451

24612452
if nat_count > 0:
2462-
binner, bins, labels = _insert_nat_bin(
2463-
binner, bins, labels, nat_count)
2453+
binner, bins, labels = _insert_nat_bin(binner, bins, labels, nat_count)
24642454

24652455
return binner, bins, labels
24662456

@@ -2497,8 +2487,7 @@ def _take_new_index(
24972487
new_values = algos.take_nd(obj._values, indexer)
24982488
return obj._constructor(new_values, index=new_index, name=obj.name)
24992489
elif isinstance(obj, ABCDataFrame):
2500-
new_mgr = obj._mgr.reindex_indexer(
2501-
new_axis=new_index, indexer=indexer, axis=1)
2490+
new_mgr = obj._mgr.reindex_indexer(new_axis=new_index, indexer=indexer, axis=1)
25022491
return obj._constructor_from_mgr(new_mgr, axes=new_mgr.axes)
25032492
else:
25042493
raise ValueError("'obj' should be either a Series or a DataFrame")
@@ -2548,8 +2537,7 @@ def _get_timestamp_range_edges(
25482537
if isinstance(freq, Tick):
25492538
index_tz = first.tz
25502539
if isinstance(origin, Timestamp) and (origin.tz is None) != (index_tz is None):
2551-
raise ValueError(
2552-
"The origin must have the same timezone as the index.")
2540+
raise ValueError("The origin must have the same timezone as the index.")
25532541
if origin == "epoch":
25542542
# set the epoch based on the timezone to have similar bins results when
25552543
# resampling on the same kind of indexes on different timezones
@@ -2778,8 +2766,7 @@ def asfreq(
27782766
if isinstance(obj.index, DatetimeIndex):
27792767
# TODO: should we disallow non-DatetimeIndex?
27802768
unit = obj.index.unit
2781-
dti = date_range(obj.index.min(), obj.index.max(),
2782-
freq=freq, unit=unit)
2769+
dti = date_range(obj.index.min(), obj.index.max(), freq=freq, unit=unit)
27832770
dti.name = obj.index.name
27842771
new_obj = obj.reindex(dti, method=method, fill_value=fill_value)
27852772
if normalize:
@@ -2809,11 +2796,9 @@ def _asfreq_compat(index: FreqIndexT, freq) -> FreqIndexT:
28092796
if isinstance(index, PeriodIndex):
28102797
new_index = index.asfreq(freq=freq)
28112798
elif isinstance(index, DatetimeIndex):
2812-
new_index = DatetimeIndex(
2813-
[], dtype=index.dtype, freq=freq, name=index.name)
2799+
new_index = DatetimeIndex([], dtype=index.dtype, freq=freq, name=index.name)
28142800
elif isinstance(index, TimedeltaIndex):
2815-
new_index = TimedeltaIndex(
2816-
[], dtype=index.dtype, freq=freq, name=index.name)
2801+
new_index = TimedeltaIndex([], dtype=index.dtype, freq=freq, name=index.name)
28172802
else: # pragma: no cover
28182803
raise TypeError(type(index))
28192804
return new_index
@@ -2830,6 +2815,5 @@ def _apply(
28302815
target_category=DeprecationWarning,
28312816
new_message=new_message,
28322817
):
2833-
result = grouped.apply(
2834-
how, *args, include_groups=include_groups, **kwargs)
2835-
return result
2818+
result = grouped.apply(how, *args, include_groups=include_groups, **kwargs)
2819+
return result

0 commit comments

Comments
 (0)