Skip to content

Commit fd92fb7

Browse files
authored
SPEC 0: Bump minimum supported version to pandas>=2.1 (#3895)
* SPEC 0: Bump minimum supported version to pandas>=2.1 * Remove compatibility codes with pandas<2.1
1 parent 723cac6 commit fd92fb7

File tree

6 files changed

+6
-25
lines changed

6 files changed

+6
-25
lines changed

.github/workflows/ci_tests.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ jobs:
7373
# Python 3.11 + core packages (minimum supported versions) + optional packages (minimum supported versions if any)
7474
- python-version: '3.11'
7575
numpy-version: '1.25'
76-
pandas-version: '=2.0'
76+
pandas-version: '=2.1'
7777
xarray-version: '=2023.04'
7878
optional-packages: ' contextily geopandas<1 ipython pyarrow-core rioxarray sphinx-gallery'
7979
# Python 3.13 + core packages (latest versions) + optional packages

environment.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ dependencies:
88
- gmt=6.5.0
99
- ghostscript=10.04.0
1010
- numpy>=1.25
11-
- pandas>=2.0
11+
- pandas>=2.1
1212
- xarray>=2023.04
1313
- netCDF4
1414
- packaging

pygmt/clib/conversion.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -198,10 +198,6 @@ def _to_numpy(data: Any) -> np.ndarray:
198198
elif isinstance(dtype, pd.ArrowDtype) and hasattr(dtype.pyarrow_dtype, "tz"):
199199
# pd.ArrowDtype[pa.Timestamp]
200200
numpy_dtype = getattr(dtype, "numpy_dtype", None)
201-
# TODO(pandas>=2.1): Remove the workaround for pandas<2.1.
202-
if Version(pd.__version__) < Version("2.1"):
203-
# In pandas 2.0, dtype.numpy_type is dtype("O").
204-
numpy_dtype = np.dtype(f"M8[{dtype.pyarrow_dtype.unit}]") # type: ignore[assignment, attr-defined]
205201

206202
array = np.ascontiguousarray(data, dtype=numpy_dtype)
207203

pygmt/tests/test_clib_to_numpy.py

Lines changed: 2 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -391,17 +391,7 @@ def test_to_numpy_pandas_numeric_with_na(dtype, expected_dtype):
391391
"U10",
392392
"string[python]",
393393
pytest.param("string[pyarrow]", marks=skip_if_no(package="pyarrow")),
394-
pytest.param(
395-
"string[pyarrow_numpy]",
396-
marks=[
397-
skip_if_no(package="pyarrow"),
398-
# TODO(pandas>=2.1): Remove the skipif marker for pandas<2.1.
399-
pytest.mark.skipif(
400-
Version(pd.__version__) < Version("2.1"),
401-
reason="string[pyarrow_numpy] was added since pandas 2.1",
402-
),
403-
],
404-
),
394+
pytest.param("string[pyarrow_numpy]", marks=skip_if_no(package="pyarrow")),
405395
],
406396
)
407397
def test_to_numpy_pandas_string(dtype):
@@ -536,12 +526,7 @@ def test_to_numpy_pandas_datetime(dtype, expected_dtype):
536526

537527
# Convert to UTC if the dtype is timezone-aware
538528
if "," in str(dtype): # A hacky way to decide if the dtype is timezone-aware.
539-
# TODO(pandas>=2.1): Simplify the if-else statement.
540-
if Version(pd.__version__) < Version("2.1") and dtype.startswith("timestamp"):
541-
# pandas 2.0 doesn't have the dt.tz_convert method for pyarrow.Timestamp.
542-
series = pd.to_datetime(series, utc=True)
543-
else:
544-
series = series.dt.tz_convert("UTC")
529+
series = series.dt.tz_convert("UTC")
545530
# Remove time zone information and preserve local time.
546531
expected_series = series.dt.tz_localize(tz=None)
547532
npt.assert_array_equal(result, np.array(expected_series, dtype=expected_dtype))

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ classifiers = [
3636
]
3737
dependencies = [
3838
"numpy>=1.25",
39-
"pandas>=2.0",
39+
"pandas>=2.1",
4040
"xarray>=2023.04",
4141
"netCDF4",
4242
"packaging",

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# Required packages
22
numpy>=1.25
3-
pandas>=2.0
3+
pandas>=2.1
44
xarray>=2023.04
55
netCDF4
66
packaging

0 commit comments

Comments
 (0)