diff --git a/docs/sphinx/source/reference/iotools.rst b/docs/sphinx/source/reference/iotools.rst
index 481f46ddb5..183f7b7bae 100644
--- a/docs/sphinx/source/reference/iotools.rst
+++ b/docs/sphinx/source/reference/iotools.rst
@@ -36,6 +36,11 @@ of sources and file formats relevant to solar energy modeling.
iotools.get_cams
iotools.read_cams
iotools.parse_cams
+ iotools.get_acis_prism
+ iotools.get_acis_nrcc
+ iotools.get_acis_mpe
+ iotools.get_acis_station_data
+ iotools.get_acis_available_stations
A :py:class:`~pvlib.location.Location` object may be created from metadata
in some files.
diff --git a/docs/sphinx/source/whatsnew/v0.9.6.rst b/docs/sphinx/source/whatsnew/v0.9.6.rst
index 729de3e457..6adbc3b8a2 100644
--- a/docs/sphinx/source/whatsnew/v0.9.6.rst
+++ b/docs/sphinx/source/whatsnew/v0.9.6.rst
@@ -48,6 +48,11 @@ Enhancements
* :py:func:`pvlib.iotools.get_psm3` now uses the new NSRDB 3.2.2 endpoint for
hourly and half-hourly single-year datasets. (:issue:`1591`, :pull:`1736`)
* The default solar position algorithm (NREL SPA) is now 50-100% faster. (:pull:`1748`)
+* Added functions to retrieve daily precipitation, temperature, and snowfall data
+ from the NOAA's ACIS service: :py:func:`~pvlib.iotools.get_acis_prism`,
+ :py:func:`~pvlib.iotools.get_acis_nrcc`, :py:func:`~pvlib.iotools.get_acis_mpe`,
+ :py:func:`~pvlib.iotools.get_acis_station_data`, and
+ :py:func:`~pvlib.iotools.get_acis_available_stations`. (:issue:`1293`, :pull:`1767`)
Bug fixes
~~~~~~~~~
diff --git a/pvlib/iotools/__init__.py b/pvlib/iotools/__init__.py
index 0a94e79f53..cfb49e1414 100644
--- a/pvlib/iotools/__init__.py
+++ b/pvlib/iotools/__init__.py
@@ -20,3 +20,8 @@
from pvlib.iotools.sodapro import get_cams # noqa: F401
from pvlib.iotools.sodapro import read_cams # noqa: F401
from pvlib.iotools.sodapro import parse_cams # noqa: F401
+from pvlib.iotools.acis import get_acis_prism # noqa: F401
+from pvlib.iotools.acis import get_acis_nrcc # noqa: F401
+from pvlib.iotools.acis import get_acis_mpe # noqa: F401
+from pvlib.iotools.acis import get_acis_station_data # noqa: F401
+from pvlib.iotools.acis import get_acis_available_stations # noqa: F401
diff --git a/pvlib/iotools/acis.py b/pvlib/iotools/acis.py
new file mode 100644
index 0000000000..3be16cfa4c
--- /dev/null
+++ b/pvlib/iotools/acis.py
@@ -0,0 +1,516 @@
+import requests
+import pandas as pd
+import numpy as np
+
+
+VARIABLE_MAP = {
+ # time series names
+ 'pcpn': 'precipitation',
+ 'maxt': 'temp_air_max',
+ 'avgt': 'temp_air_average',
+ 'obst': 'temp_air_observation',
+ 'mint': 'temp_air_min',
+ 'cdd': 'cooling_degree_days',
+ 'hdd': 'heating_degree_days',
+ 'gdd': 'growing_degree_days',
+ 'snow': 'snowfall',
+ 'snwd': 'snowdepth',
+
+ # metadata names
+ 'lat': 'latitude',
+ 'lon': 'longitude',
+ 'elev': 'altitude',
+}
+
+
+def _get_acis(start, end, params, map_variables, url, **kwargs):
+ """
+ generic helper for the public get_acis_X functions
+ """
+ params = {
+ # use pd.to_datetime so that strings (e.g. '2021-01-01') are accepted
+ 'sdate': pd.to_datetime(start).strftime('%Y-%m-%d'),
+ 'edate': pd.to_datetime(end).strftime('%Y-%m-%d'),
+ 'output': 'json',
+ **params, # endpoint-specific parameters
+ }
+ response = requests.post(url,
+ json=params,
+ headers={"Content-Type": "application/json"},
+ **kwargs)
+ response.raise_for_status()
+ payload = response.json()
+
+ # somewhat inconveniently, the ACIS API tends to return errors as "valid"
+ # responses instead of using proper HTTP error codes:
+ if "error" in payload:
+ raise requests.HTTPError(payload['error'], response=response)
+
+ columns = ['date'] + [e['name'] for e in params['elems']]
+ df = pd.DataFrame(payload['data'], columns=columns)
+ df = df.set_index('date')
+ df.index = pd.to_datetime(df.index)
+ df.index.name = None
+
+ metadata = payload['meta']
+
+ try:
+ # for StnData endpoint, unpack combination "ll" into lat, lon
+ metadata['lon'], metadata['lat'] = metadata.pop('ll')
+ except KeyError:
+ pass
+
+ try:
+ metadata['elev'] = metadata['elev'] * 0.3048 # feet to meters
+ except KeyError:
+ # some queries don't return elevation
+ pass
+
+ if map_variables:
+ df = df.rename(columns=VARIABLE_MAP)
+
+ for key in list(metadata.keys()):
+ if key in VARIABLE_MAP:
+ metadata[VARIABLE_MAP[key]] = metadata.pop(key)
+
+ return df, metadata
+
+
+def get_acis_prism(latitude, longitude, start, end, map_variables=True,
+ url="https://data.rcc-acis.org/GridData", **kwargs):
+ """
+ Retrieve estimated daily precipitation and temperature data from PRISM
+ via the Applied Climate Information System (ACIS).
+
+ ACIS [2]_, [3]_ aggregates and provides access to climate data
+ from many underlying sources. This function retrieves daily data from
+ the Parameter-elevation Regressions on Independent Slopes Model
+ (PRISM) [1]_, a gridded precipitation and temperature model
+ from Oregon State University.
+
+ Geographical coverage: US, Central America, and part of South America.
+ Approximately 0° to 50° in latitude and -130° to -65° in longitude.
+
+ Parameters
+ ----------
+ latitude : float
+ in decimal degrees, between -90 and 90, north is positive
+ longitude : float
+ in decimal degrees, between -180 and 180, east is positive
+ start : datetime-like
+ First day of the requested period
+ end : datetime-like
+ Last day of the requested period
+ map_variables : bool, default True
+ When True, rename data columns and metadata keys to pvlib variable
+ names where applicable. See variable :const:`VARIABLE_MAP`.
+ url : str, default: 'https://data.rcc-acis.org/GridData'
+ API endpoint URL
+ kwargs:
+ Optional parameters passed to ``requests.post``.
+
+ Returns
+ -------
+ data : pandas.DataFrame
+ Daily precipitation [mm], temperature [Celsius], and degree day
+ [Celsius-days] data
+ metadata : dict
+ Metadata of the selected grid cell
+
+ Raises
+ ------
+ requests.HTTPError
+ A message from the ACIS server if the request is rejected
+
+ Notes
+ -----
+ PRISM data is aggregated from 12:00 to 12:00 UTC, meaning data labeled
+ May 26 reflects to the 24 hours ending at 7:00am Eastern Standard Time
+ on May 26.
+
+ References
+ ----------
+ .. [1] `PRISM `_
+ .. [2] `ACIS Gridded Data `_
+ .. [3] `ACIS Web Services `_
+
+ Examples
+ --------
+ >>> from pvlib.iotools import get_acis_prism
+ >>> df, meta = get_acis_prism(40, 80, '2020-01-01', '2020-12-31')
+ """
+ elems = [
+ {"name": "pcpn", "interval": "dly", "units": "mm"},
+ {"name": "maxt", "interval": "dly", "units": "degreeC"},
+ {"name": "mint", "interval": "dly", "units": "degreeC"},
+ {"name": "avgt", "interval": "dly", "units": "degreeC"},
+ {"name": "cdd", "interval": "dly", "units": "degreeC"},
+ {"name": "hdd", "interval": "dly", "units": "degreeC"},
+ {"name": "gdd", "interval": "dly", "units": "degreeC"},
+ ]
+ params = {
+ 'loc': f"{longitude},{latitude}",
+ 'grid': "21",
+ 'elems': elems,
+ 'meta': ["ll", "elev"],
+ }
+ df, meta = _get_acis(start, end, params, map_variables, url, **kwargs)
+ df = df.replace(-999, np.nan)
+ return df, meta
+
+
+def get_acis_nrcc(latitude, longitude, start, end, grid, map_variables=True,
+ url="https://data.rcc-acis.org/GridData", **kwargs):
+ """
+ Retrieve estimated daily precipitation and temperature data from the
+ Northeast Regional Climate Center via the Applied Climate
+ Information System (ACIS).
+
+ ACIS [2]_, [3]_ aggregates and provides access to climate data
+ from many underlying sources. This function retrieves daily data from
+ Cornell's Northeast Regional Climate Center (NRCC) [1]_.
+
+ Geographical coverage: US, Central America, and part of South America.
+ Approximately 0° to 50° in latitude and -130° to -65° in longitude.
+
+ Parameters
+ ----------
+ latitude : float
+ in decimal degrees, between -90 and 90, north is positive
+ longitude : float
+ in decimal degrees, between -180 and 180, east is positive
+ start : datetime-like
+ First day of the requested period
+ end : datetime-like
+ Last day of the requested period
+ grid : int
+ Options are either 1 (for "NRCC Interpolated") or 3
+ (for "NRCC Hi-Resolution"). See [2]_ for details.
+ map_variables : bool, default True
+ When True, rename data columns and metadata keys to pvlib variable
+ names where applicable. See variable :const:`VARIABLE_MAP`.
+ url : str, default: 'https://data.rcc-acis.org/GridData'
+ API endpoint URL
+ kwargs:
+ Optional parameters passed to ``requests.post``.
+
+ Returns
+ -------
+ data : pandas.DataFrame
+ Daily precipitation [mm], temperature [Celsius], and degree day
+ [Celsius-days] data
+ metadata : dict
+ Metadata of the selected grid cell
+
+ Raises
+ ------
+ requests.HTTPError
+ A message from the ACIS server if the request is rejected
+
+ Notes
+ -----
+ The returned values are 24-hour aggregates, but
+ the aggregation period may not be midnight to midnight in local time.
+ Check the ACIS and NRCC documentation for details.
+
+ References
+ ----------
+ .. [1] `NRCC `_
+ .. [2] `ACIS Gridded Data `_
+ .. [3] `ACIS Web Services `_
+
+ Examples
+ --------
+ >>> from pvlib.iotools import get_acis_nrcc
+ >>> df, meta = get_acis_nrcc(40, -80, '2020-01-01', '2020-12-31', grid=1)
+ """
+ elems = [
+ {"name": "pcpn", "interval": "dly", "units": "mm"},
+ {"name": "maxt", "interval": "dly", "units": "degreeC"},
+ {"name": "mint", "interval": "dly", "units": "degreeC"},
+ {"name": "avgt", "interval": "dly", "units": "degreeC"},
+ {"name": "cdd", "interval": "dly", "units": "degreeC"},
+ {"name": "hdd", "interval": "dly", "units": "degreeC"},
+ {"name": "gdd", "interval": "dly", "units": "degreeC"},
+ ]
+ params = {
+ 'loc': f"{longitude},{latitude}",
+ 'grid': grid,
+ 'elems': elems,
+ 'meta': ["ll", "elev"],
+ }
+ df, meta = _get_acis(start, end, params, map_variables, url, **kwargs)
+ df = df.replace(-999, np.nan)
+ return df, meta
+
+
+
+def get_acis_mpe(latitude, longitude, start, end, map_variables=True,
+ url="https://data.rcc-acis.org/GridData", **kwargs):
+ """
+ Retrieve estimated daily Multi-sensor Precipitation Estimates
+ via the Applied Climate Information System (ACIS).
+
+ ACIS [2]_, [3]_ aggregates and provides access to climate data
+ from many underlying sources. This function retrieves daily data from
+ the National Weather Service's Multi-sensor Precipitation Estimates
+ (MPE) [1]_, a gridded precipitation model.
+
+ This dataset covers the contiguous United States, Mexico, and parts of
+ Central America.
+
+ Parameters
+ ----------
+ latitude : float
+ in decimal degrees, between -90 and 90, north is positive
+ longitude : float
+ in decimal degrees, between -180 and 180, east is positive
+ start : datetime-like
+ First day of the requested period
+ end : datetime-like
+ Last day of the requested period
+ map_variables : bool, default True
+ When True, rename data columns and metadata keys to pvlib variable
+ names where applicable. See variable :const:`VARIABLE_MAP`.
+ url : str, default: 'https://data.rcc-acis.org/GridData'
+ API endpoint URL
+ kwargs:
+ Optional parameters passed to ``requests.post``.
+
+ Returns
+ -------
+ data : pandas.DataFrame
+ Daily precipitation [mm] data
+ metadata : dict
+ Coordinates of the selected grid cell
+
+ Raises
+ ------
+ requests.HTTPError
+ A message from the ACIS server if the request is rejected
+
+ Notes
+ -----
+ The returned values are 24-hour aggregates, but
+ the aggregation period may not be midnight to midnight in local time.
+ Check the ACIS and MPE documentation for details.
+
+ References
+ ----------
+ .. [1] `Multisensor Precipitation Estimates
+ `_
+ .. [2] `ACIS Gridded Data `_
+ .. [3] `ACIS Web Services `_
+
+ Examples
+ --------
+ >>> from pvlib.iotools import get_acis_mpe
+ >>> df, meta = get_acis_mpe(40, -80, '2020-01-01', '2020-12-31')
+ """
+ elems = [
+ # only precipitation is supported in this dataset
+ {"name": "pcpn", "interval": "dly", "units": "mm"},
+ ]
+ params = {
+ 'loc': f"{longitude},{latitude}",
+ 'grid': "2",
+ 'elems': elems,
+ 'meta': ["ll"], # "elev" is not supported for this dataset
+ }
+ df, meta = _get_acis(start, end, params, map_variables, url, **kwargs)
+ df = df.replace(-999, np.nan)
+ return df, meta
+
+
+def get_acis_station_data(station, start, end, trace_val=0.001,
+ map_variables=True,
+ url="https://data.rcc-acis.org/StnData", **kwargs):
+ """
+ Retrieve weather station climate records via the Applied Climate
+ Information System (ACIS).
+
+ ACIS [1]_, [2]_ aggregates and provides access to climate data
+ from many underlying sources. This function retrieves measurements
+ from ground stations belonging to various global networks.
+
+ This function can query data from stations all over the world.
+ The stations available in a given area can be listed using
+ :py:func:`get_acis_available_stations`.
+
+ Parameters
+ ----------
+ station : str
+ Identifier code for the station to query. Identifiers from many
+ station networks are accepted, including WBAN, COOP, FAA, WMO, GHCN,
+ and others. See [1]_ and [2]_ for details.
+ start : datetime-like
+ First day of the requested period
+ end : datetime-like
+ Last day of the requested period
+ map_variables : bool, default True
+ When True, rename data columns and metadata keys to pvlib variable
+ names where applicable. See variable :const:`VARIABLE_MAP`.
+ trace_val : float, default 0.001
+ Value to replace "trace" values in the precipitation data
+ url : str, default: 'https://data.rcc-acis.org/GridData'
+ API endpoint URL
+ kwargs:
+ Optional parameters passed to ``requests.post``.
+
+ Returns
+ -------
+ data : pandas.DataFrame
+ Daily precipitation [mm], temperature [Celsius], snow [mm], and
+ degree day [Celsius-days] data
+ metadata : dict
+ station metadata
+
+ Raises
+ ------
+ requests.HTTPError
+ A message from the ACIS server if the request is rejected
+
+ See Also
+ --------
+ get_acis_available_stations
+
+ References
+ ----------
+ .. [1] `ACIS Web Services `_
+ .. [2] `ACIS Metadata `_
+
+ Examples
+ --------
+ >>> # Using an FAA code (Chicago O'Hare airport)
+ >>> from pvlib.iotools import get_acis_station_data
+ >>> df, meta = get_acis_station_data('ORD', '2020-01-01', '2020-12-31')
+ >>>
+ >>> # Look up available stations in a lat/lon rectangle, with data
+ >>> # available in the specified date range:
+ >>> from pvlib.iotools import get_acis_available_stations
+ >>> stations = get_acis_available_stations([39.5, 40.5], [-80.5, -79.5],
+ ... '2020-01-01', '2020-01-03')
+ >>> stations['sids'][0]
+ ['369367 2', 'USC00369367 6', 'WYNP1 7']
+ >>> df, meta = get_acis_station_data('369367', '2020-01-01', '2020-01-03')
+ """
+ elems = [
+ {"name": "maxt", "interval": "dly", "units": "degreeC"},
+ {"name": "mint", "interval": "dly", "units": "degreeC"},
+ {"name": "avgt", "interval": "dly", "units": "degreeC"},
+ {"name": "obst", "interval": "dly", "units": "degreeC"},
+ {"name": "pcpn", "interval": "dly", "units": "mm"},
+ {"name": "snow", "interval": "dly", "units": "cm"},
+ {"name": "snwd", "interval": "dly", "units": "cm"},
+ {"name": "cdd", "interval": "dly", "units": "degreeC"},
+ {"name": "hdd", "interval": "dly", "units": "degreeC"},
+ {"name": "gdd", "interval": "dly", "units": "degreeC"},
+ ]
+ params = {
+ 'sid': str(station),
+ 'elems': elems,
+ 'meta': ('name,state,sids,sid_dates,ll,elev,uid,county,'
+ 'climdiv,valid_daterange,tzo,network')
+ }
+ df, metadata = _get_acis(start, end, params, map_variables, url, **kwargs)
+ df = df.replace("M", np.nan)
+ df = df.replace("T", trace_val)
+ df = df.astype(float)
+ return df, metadata
+
+
+def get_acis_available_stations(latitude_range, longitude_range,
+ start=None, end=None,
+ url="https://data.rcc-acis.org/StnMeta",
+ **kwargs):
+ """
+ List weather stations in a given area available from the
+ Applied Climate Information System (ACIS).
+
+ The ``sids`` returned by this function can be used with
+ :py:func:`get_acis_station_data` to retrieve weather measurements
+ from the station.
+
+ Parameters
+ ----------
+ latitude_range : list
+ A 2-element list of [southern bound, northern bound]
+ in decimal degrees, between -90 and 90, north is positive
+ longitude_range : list
+ A 2-element list of [western bound, eastern bound]
+ in decimal degrees, between -180 and 180, east is positive
+ start : datetime-like, optional
+ If specified, return only stations that have data between ``start`` and
+ ``end``. If not specified, all stations in the region are returned.
+ end : datetime-like, optional
+ See ``start``
+ url : str, default: 'https://data.rcc-acis.org/StnMeta'
+ API endpoint URL
+ kwargs:
+ Optional parameters passed to ``requests.post``.
+
+ Returns
+ -------
+ stations : pandas.DataFrame
+ A dataframe of station metadata, one row per station.
+ The ``sids`` column contains IDs that can be used with
+ :py:func:`get_acis_station_data`.
+
+ Raises
+ ------
+ requests.HTTPError
+ A message from the ACIS server if the request is rejected
+
+ See Also
+ --------
+ get_acis_station_data
+
+ References
+ ----------
+ .. [1] `ACIS Web Services `_
+ .. [2] `ACIS Metadata `_
+
+ Examples
+ --------
+ >>> # Look up available stations in a lat/lon rectangle, with data
+ >>> # available in the specified date range:
+ >>> from pvlib.iotools import get_acis_available_stations
+ >>> stations = get_acis_available_stations([39.5, 40.5], [-80.5, -79.5],
+ ... '2020-01-01', '2020-01-03')
+ >>> stations['sids'][0]
+ ['369367 2', 'USC00369367 6', 'WYNP1 7']
+ """
+ bbox = "{},{},{},{}".format(
+ longitude_range[0],
+ latitude_range[0],
+ longitude_range[1],
+ latitude_range[1],
+ )
+ params = {
+ "bbox": bbox,
+ "meta": ("name,state,sids,sid_dates,ll,elev,"
+ "uid,county,climdiv,tzo,network"),
+ }
+ if start is not None and end is not None:
+ params['elems'] = ['maxt', 'mint', 'avgt', 'obst',
+ 'pcpn', 'snow', 'snwd']
+ params['sdate'] = pd.to_datetime(start).strftime('%Y-%m-%d')
+ params['edate'] = pd.to_datetime(end).strftime('%Y-%m-%d')
+
+ response = requests.post(url,
+ json=params,
+ headers={"Content-Type": "application/json"},
+ **kwargs)
+ response.raise_for_status()
+ payload = response.json()
+ if "error" in payload:
+ raise requests.HTTPError(payload['error'], response=response)
+
+ metadata = payload['meta']
+ for station_record in metadata:
+ station_record['altitude'] = station_record.pop('elev')
+ station_record['longitude'], station_record['latitude'] = \
+ station_record.pop('ll')
+
+ df = pd.DataFrame(metadata)
+ return df
diff --git a/pvlib/tests/iotools/test_acis.py b/pvlib/tests/iotools/test_acis.py
new file mode 100644
index 0000000000..8458e9b930
--- /dev/null
+++ b/pvlib/tests/iotools/test_acis.py
@@ -0,0 +1,213 @@
+"""
+tests for :mod:`pvlib.iotools.acis`
+"""
+
+import pandas as pd
+import numpy as np
+import pytest
+from pvlib.iotools import (
+ get_acis_prism, get_acis_nrcc, get_acis_mpe,
+ get_acis_station_data, get_acis_available_stations
+)
+from ..conftest import RERUNS, RERUNS_DELAY, assert_frame_equal
+from requests import HTTPError
+
+
+@pytest.mark.remote_data
+@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
+def test_get_acis_prism():
+ # map_variables=True
+ df, meta = get_acis_prism(40.001, -80.001, '2020-01-01', '2020-01-02')
+ expected = pd.DataFrame(
+ [
+ [0.5, 5, 0, 2.5, 0, 62, 0],
+ [0, 5, -3, 1, 0, 64, 0]
+ ],
+ columns=['precipitation', 'temp_air_max', 'temp_air_min',
+ 'temp_air_average', 'cooling_degree_days',
+ 'heating_degree_days', 'growing_degree_days'],
+ index=pd.to_datetime(['2020-01-01', '2020-01-02']),
+ )
+ assert_frame_equal(df, expected)
+ expected_meta = {'latitude': 40, 'longitude': -80, 'altitude': 298.0944}
+ assert meta == expected_meta
+
+ # map_variables=False
+ df, meta = get_acis_prism(40.001, -80.001, '2020-01-01', '2020-01-02',
+ map_variables=False)
+ expected.columns = ['pcpn', 'maxt', 'mint', 'avgt', 'cdd', 'hdd', 'gdd']
+ assert_frame_equal(df, expected)
+ expected_meta = {'lat': 40, 'lon': -80, 'elev': 298.0944}
+ assert meta == expected_meta
+
+
+@pytest.mark.remote_data
+@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
+@pytest.mark.parametrize('grid, expected', [
+ (1, [[0.51, 5, 0, 2.5, 0, 62, 0]]),
+ (3, [[0.51, 5, -1, 2.0, 0, 63, 0]])
+])
+def test_get_acis_nrcc(grid, expected):
+ # map_variables=True
+ df, meta = get_acis_nrcc(40.001, -80.001, '2020-01-01', '2020-01-01', grid)
+ expected = pd.DataFrame(
+ expected,
+ columns=['precipitation', 'temp_air_max', 'temp_air_min',
+ 'temp_air_average', 'cooling_degree_days',
+ 'heating_degree_days', 'growing_degree_days'],
+ index=pd.to_datetime(['2020-01-01']),
+ )
+ assert_frame_equal(df, expected)
+ expected_meta = {'latitude': 40., 'longitude': -80., 'altitude': 356.9208}
+ assert meta == pytest.approx(expected_meta)
+
+ # map_variables=False
+ df, meta = get_acis_nrcc(40.001, -80.001, '2020-01-01', '2020-01-01', grid,
+ map_variables=False)
+ expected.columns = ['pcpn', 'maxt', 'mint', 'avgt', 'cdd', 'hdd', 'gdd']
+ assert_frame_equal(df, expected)
+ expected_meta = {'lat': 40., 'lon': -80., 'elev': 356.9208}
+ assert meta == pytest.approx(expected_meta)
+
+
+@pytest.mark.remote_data
+@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
+def test_get_acis_nrcc_error():
+ with pytest.raises(HTTPError, match='invalid grid'):
+ # 50 is not a valid dataset (or "grid", in ACIS lingo)
+ _ = get_acis_nrcc(40, -80, '2012-01-01', '2012-01-01', 50)
+
+
+@pytest.mark.remote_data
+@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
+def test_get_acis_mpe():
+ # map_variables=True
+ df, meta = get_acis_mpe(40.001, -80.001, '2020-01-01', '2020-01-02')
+ expected = pd.DataFrame(
+ {'precipitation': [0.4, 0.0]},
+ index=pd.to_datetime(['2020-01-01', '2020-01-02']),
+ )
+ assert_frame_equal(df, expected)
+ expected_meta = {'latitude': 40.0083, 'longitude': -79.9653}
+ assert meta == expected_meta
+
+ # map_variables=False
+ df, meta = get_acis_mpe(40.001, -80.001, '2020-01-01', '2020-01-02',
+ map_variables=False)
+ expected.columns = ['pcpn']
+ assert_frame_equal(df, expected)
+ expected_meta = {'lat': 40.0083, 'lon': -79.9653}
+ assert meta == expected_meta
+
+
+@pytest.mark.remote_data
+@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
+def test_get_acis_station_data():
+ # map_variables=True
+ df, meta = get_acis_station_data('ORD', '2020-01-10', '2020-01-12',
+ trace_val=-99)
+ expected = pd.DataFrame(
+ [[10., 2., 6., np.nan, 21.34, 0., 0., 0., 59., 0.],
+ [3., -4., -0.5, np.nan, 9.4, 5.3, 0., 0., 65., 0.],
+ [-1., -5., -3., np.nan, -99, -99, 5., 0., 68., 0.]],
+ columns=['temp_air_max', 'temp_air_min', 'temp_air_average',
+ 'temp_air_observation', 'precipitation', 'snowfall',
+ 'snowdepth', 'cooling_degree_days',
+ 'heating_degree_days', 'growing_degree_days'],
+ index=pd.to_datetime(['2020-01-10', '2020-01-11', '2020-01-12']),
+ )
+ assert_frame_equal(df, expected)
+ expected_meta = {
+ 'uid': 48,
+ 'state': 'IL',
+ 'name': 'CHICAGO OHARE INTL AP',
+ 'altitude': 204.8256,
+ 'latitude': 41.96017,
+ 'longitude': -87.93164
+ }
+ expected_meta = {
+ 'valid_daterange': [
+ ['1958-11-01', '2023-06-15'],
+ ['1958-11-01', '2023-06-15'],
+ ['1958-11-01', '2023-06-15'],
+ [],
+ ['1958-11-01', '2023-06-15'],
+ ['1958-11-01', '2023-06-15'],
+ ['1958-11-01', '2023-06-15'],
+ ['1958-11-01', '2023-06-15'],
+ ['1958-11-01', '2023-06-15'],
+ ['1958-11-01', '2023-06-15']
+ ],
+ 'name': 'CHICAGO OHARE INTL AP',
+ 'sids': ['94846 1', '111549 2', 'ORD 3', '72530 4', 'KORD 5',
+ 'USW00094846 6', 'ORD 7', 'USW00094846 32'],
+ 'county': '17031',
+ 'state': 'IL',
+ 'climdiv': 'IL02',
+ 'uid': 48,
+ 'tzo': -6.0,
+ 'sid_dates': [
+ ['94846 1', '1989-01-19', '9999-12-31'],
+ ['94846 1', '1958-10-30', '1989-01-01'],
+ ['111549 2', '1989-01-19', '9999-12-31'],
+ ['111549 2', '1958-10-30', '1989-01-01'],
+ ['ORD 3', '1989-01-19', '9999-12-31'],
+ ['ORD 3', '1958-10-30', '1989-01-01'],
+ ['72530 4', '1989-01-19', '9999-12-31'],
+ ['72530 4', '1958-10-30', '1989-01-01'],
+ ['KORD 5', '1989-01-19', '9999-12-31'],
+ ['KORD 5', '1958-10-30', '1989-01-01'],
+ ['USW00094846 6', '1989-01-19', '9999-12-31'],
+ ['USW00094846 6', '1958-10-30', '1989-01-01'],
+ ['ORD 7', '1989-01-19', '9999-12-31'],
+ ['ORD 7', '1958-10-30', '1989-01-01'],
+ ['USW00094846 32', '1989-01-19', '9999-12-31'],
+ ['USW00094846 32', '1958-10-30', '1989-01-01']],
+ 'altitude': 204.8256,
+ 'longitude': -87.93164,
+ 'latitude': 41.96017
+ }
+ # don't check valid dates since they get extended every day
+ meta.pop("valid_daterange")
+ expected_meta.pop("valid_daterange")
+ assert meta == expected_meta
+
+ # map_variables=False
+ df, meta = get_acis_station_data('ORD', '2020-01-10', '2020-01-12',
+ trace_val=-99, map_variables=False)
+ expected.columns = ['maxt', 'mint', 'avgt', 'obst', 'pcpn', 'snow',
+ 'snwd', 'cdd', 'hdd', 'gdd']
+ assert_frame_equal(df, expected)
+ expected_meta['lat'] = expected_meta.pop('latitude')
+ expected_meta['lon'] = expected_meta.pop('longitude')
+ expected_meta['elev'] = expected_meta.pop('altitude')
+ meta.pop("valid_daterange")
+ assert meta == expected_meta
+
+
+@pytest.mark.remote_data
+@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
+def test_get_acis_available_stations():
+ # use a very narrow bounding box to hopefully make this test less likely
+ # to fail due to new stations being added in the future
+ lat, lon = 39.8986, -80.1656
+ stations = get_acis_available_stations([lat - 0.0001, lat + 0.0001],
+ [lon - 0.0001, lon + 0.0001])
+ assert len(stations) == 1
+ station = stations.iloc[0]
+
+ # test the more relevant values
+ assert station['name'] == 'WAYNESBURG 1 E'
+ assert station['sids'] == ['369367 2', 'USC00369367 6', 'WYNP1 7']
+ assert station['state'] == 'PA'
+ assert station['altitude'] == 940.
+ assert station['tzo'] == -5.0
+ assert station['latitude'] == lat
+ assert station['longitude'] == lon
+
+ # check that start/end work as filters
+ stations = get_acis_available_stations([lat - 0.0001, lat + 0.0001],
+ [lon - 0.0001, lon + 0.0001],
+ start='1900-01-01',
+ end='1900-01-02')
+ assert stations.empty