Skip to content

Commit b5ab200

Browse files
mikofskiwholmgren
authored andcommitted
reorganize tests into subfolders and use pathlib for conftest DATA_DIR (pvlib#859)
* reorganize tests into subfolders * closes pvlib#848 * create pvlib/tests/iotools and move all iotools tests into that subfolder * use fixtures in test_ecmwf_macc.py for expected_test_data * use conftest.data_dir instead of redundant boilerplate process of retrieving test folder for each tests, mostly for iotools * fix test_ivtools.py was using full path to conftest but tests is not a python package, so no * change "pvlib/test" -> "pvlib/tests" to conform to popular convention Signed-off-by: Mark Mikofski <[email protected]> * in azure pipelines, pytest pvlib only, not pvlib/test * Update v0.7.1.rst * Update .codecov.yml * Update docs/sphinx/source/whatsnew/v0.7.1.rst with suggestion from Will Co-Authored-By: Will Holmgren <[email protected]> * use pathlib, remove os, inspect * some iotools expect string to check for url using startswith() - so stringify path objects first * in midc, use requests.get params arg to build querystring instead of doing it manually * a couple of places chnage constants to all CAPS, some other places make a fixture, sometimes nothing, not consistent * also in test_midc, comment out unused URL, was it part of a test once? * path objects only work seemlessly on Python-3.6 or later * stringify a few more path objects * in psm3.read_psm3 still expects a string filename * tmy3 and tmy2 also could be strings * last two: stringify epw and surfrad * fingers crossed! * fix typo, mention pathlib in what's new Co-authored-by: Will Holmgren <[email protected]>
1 parent 618fe26 commit b5ab200

40 files changed

+100
-138
lines changed

.codecov.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,6 @@ coverage:
2222
tests:
2323
target: 95%
2424
paths:
25-
- "pvlib/test/.*"
25+
- "pvlib/tests/.*"
2626

2727
comment: off

azure-pipelines.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ jobs:
155155
- script: |
156156
pip install pytest pytest-cov pytest-mock pytest-timeout pytest-azurepipelines
157157
pip install -e .
158-
pytest pvlib/test --junitxml=junit/test-results.xml --cov=pvlib --cov-report=xml --cov-report=html
158+
pytest pvlib --junitxml=junit/test-results.xml --cov=pvlib --cov-report=xml --cov-report=html
159159
displayName: 'Test with pytest'
160160
161161
- task: PublishTestResults@2

docs/sphinx/source/whatsnew/v0.7.1.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,9 @@ Testing
2626
~~~~~~~
2727
* Added single-year PSM3 API test for `iotools.get_psm3`.
2828
* Added tests for `iotools.parse_psm3` and `iotools.read_psm3`.
29+
* Change `pvlib/test` folder to `pvlib/tests` and reorganize tests into
30+
subfolders, *e.g.*: created `pvlib/tests/iotools` (:pull:`859`)
31+
* replace `os.path` with `pathlib` and stringify path objects for Python<=3.5
2932

3033
Documentation
3134
~~~~~~~~~~~~~

pvlib/iotools/crn.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def read_crn(filename):
4646
4747
Parameters
4848
----------
49-
filename: str
49+
filename: str, path object, or file-like
5050
filepath or url to read for the fixed-width file.
5151
5252
Returns

pvlib/iotools/epw.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,7 @@ def read_epw(filename, coerce_year=None):
217217
<https://energyplus.net/documentation>`_
218218
'''
219219

220-
if filename.startswith('http'):
220+
if str(filename).startswith('http'):
221221
# Attempts to download online EPW file
222222
# See comments above for possible online sources
223223
request = Request(filename, headers={'User-Agent': (
@@ -228,7 +228,7 @@ def read_epw(filename, coerce_year=None):
228228
csvdata = io.StringIO(response.read().decode(errors='ignore'))
229229
else:
230230
# Assume it's accessible via the file system
231-
csvdata = open(filename, 'r')
231+
csvdata = open(str(filename), 'r')
232232
try:
233233
data, meta = parse_epw(csvdata, coerce_year)
234234
finally:

pvlib/iotools/midc.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -249,12 +249,12 @@ def read_midc_raw_data_from_nrel(site, start, end, variable_map={},
249249
args = {'site': site,
250250
'begin': start.strftime('%Y%m%d'),
251251
'end': end.strftime('%Y%m%d')}
252-
endpoint = 'https://midcdmz.nrel.gov/apps/data_api.pl?'
253-
url = endpoint + '&'.join(['{}={}'.format(k, v) for k, v in args.items()])
252+
url = 'https://midcdmz.nrel.gov/apps/data_api.pl'
253+
# NOTE: just use requests.get(url, params=args) to build querystring
254254
# number of header columns and data columns do not always match,
255255
# so first parse the header to determine the number of data columns
256256
# to parse
257-
csv_request = requests.get(url, timeout=timeout)
257+
csv_request = requests.get(url, timeout=timeout, params=args)
258258
csv_request.raise_for_status()
259259
raw_csv = io.StringIO(csv_request.text)
260260
first_row = pd.read_csv(raw_csv, nrows=0)

pvlib/iotools/psm3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -295,6 +295,6 @@ def read_psm3(filename):
295295
.. [2] `Standard Time Series Data File Format
296296
<https://rredc.nrel.gov/solar/old_data/nsrdb/2005-2012/wfcsv.pdf>`_
297297
"""
298-
with open(filename, 'r') as fbuf:
298+
with open(str(filename), 'r') as fbuf:
299299
content = parse_psm3(fbuf)
300300
return content

pvlib/iotools/solrad.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def read_solrad(filename):
8181
Program. Bull. Amer. Meteor. Soc., 77, 2857-2864.
8282
:doi:`10.1175/1520-0477(1996)077<2857:TNISIS>2.0.CO;2`
8383
"""
84-
if 'msn' in filename:
84+
if 'msn' in str(filename):
8585
names = MADISON_HEADERS
8686
widths = MADISON_WIDTHS
8787
dtypes = MADISON_DTYPES

pvlib/iotools/surfrad.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,12 +122,12 @@ def read_surfrad(filename, map_variables=True):
122122
.. [2] NOAA SURFRAD Data Archive
123123
`SURFRAD Archive <ftp://aftp.cmdl.noaa.gov/data/radiation/surfrad/>`_
124124
"""
125-
if filename.startswith('ftp'):
125+
if str(filename).startswith('ftp'):
126126
req = Request(filename)
127127
response = urlopen(req)
128128
file_buffer = io.StringIO(response.read().decode(errors='ignore'))
129129
else:
130-
file_buffer = open(filename, 'r')
130+
file_buffer = open(str(filename), 'r')
131131

132132
# Read and parse the first two lines to build the metadata dict.
133133
station = file_buffer.readline()

pvlib/iotools/tmy.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def read_tmy3(filename=None, coerce_year=None, recolumn=True):
160160

161161
head = ['USAF', 'Name', 'State', 'TZ', 'latitude', 'longitude', 'altitude']
162162

163-
if filename.startswith('http'):
163+
if str(filename).startswith('http'):
164164
request = Request(filename, headers={'User-Agent': (
165165
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) '
166166
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 '
@@ -169,7 +169,7 @@ def read_tmy3(filename=None, coerce_year=None, recolumn=True):
169169
csvdata = io.StringIO(response.read().decode(errors='ignore'))
170170
else:
171171
# assume it's accessible via the file system
172-
csvdata = open(filename, 'r')
172+
csvdata = open(str(filename), 'r')
173173

174174
# read in file metadata, advance buffer to second line
175175
firstline = csvdata.readline()
@@ -409,7 +409,7 @@ def read_tmy2(filename):
409409
columns = 'year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUncertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint' # noqa: E501
410410
hdr_columns = 'WBAN,City,State,TZ,latitude,longitude,altitude'
411411

412-
tmy2, tmy2_meta = _read_tmy2(string, columns, hdr_columns, filename)
412+
tmy2, tmy2_meta = _read_tmy2(string, columns, hdr_columns, str(filename))
413413

414414
return tmy2, tmy2_meta
415415

pvlib/test/conftest.py renamed to pvlib/tests/conftest.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
import inspect
2-
import os
1+
from pathlib import Path
32
import platform
43

54
import numpy as np
@@ -36,9 +35,8 @@ def inner():
3635

3736

3837
# commonly used directories in the tests
39-
test_dir = os.path.dirname(
40-
os.path.abspath(inspect.getfile(inspect.currentframe())))
41-
data_dir = os.path.join(test_dir, os.pardir, 'data')
38+
TEST_DIR = Path(__file__).parent
39+
DATA_DIR = TEST_DIR.parent / 'data'
4240

4341

4442
platform_is_windows = platform.system() == 'Windows'

pvlib/test/test_crn.py renamed to pvlib/tests/iotools/test_crn.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,18 @@
1-
import inspect
2-
import os
3-
41
import pandas as pd
52
from pandas.util.testing import assert_frame_equal
63
import numpy as np
74
from numpy import dtype, nan
8-
5+
import pytest
96
from pvlib.iotools import crn
7+
from conftest import DATA_DIR
108

119

12-
test_dir = os.path.dirname(
13-
os.path.abspath(inspect.getfile(inspect.currentframe())))
14-
testfile = os.path.join(test_dir,
15-
'../data/CRNS0101-05-2019-AZ_Tucson_11_W.txt')
10+
@pytest.fixture
11+
def testfile():
12+
return DATA_DIR / 'CRNS0101-05-2019-AZ_Tucson_11_W.txt'
1613

1714

18-
def test_read_crn():
15+
def test_read_crn(testfile):
1916
columns = [
2017
'WBANNO', 'UTC_DATE', 'UTC_TIME', 'LST_DATE', 'LST_TIME', 'CRX_VN',
2118
'longitude', 'latitude', 'temp_air', 'PRECIPITATION', 'ghi',

pvlib/test/test_ecmwf_macc.py renamed to pvlib/tests/iotools/test_ecmwf_macc.py

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,10 @@
55
import os
66
import datetime
77
import numpy as np
8-
from conftest import requires_netCDF4
8+
import pytest
9+
from conftest import requires_netCDF4, DATA_DIR
910
from pvlib.iotools import ecmwf_macc
1011

11-
DIRNAME = os.path.dirname(__file__)
12-
PROJNAME = os.path.dirname(DIRNAME)
13-
DATADIR = os.path.join(PROJNAME, 'data')
1412
TESTDATA = 'aod550_tcwv_20121101_test.nc'
1513

1614
# for creating test data
@@ -21,19 +19,24 @@
2119
LAT_BND = (90, -90)
2220

2321

22+
@pytest.fixture
23+
def expected_test_data():
24+
return DATA_DIR / TESTDATA
25+
26+
2427
@requires_netCDF4
25-
def test_get_nearest_indices():
28+
def test_get_nearest_indices(expected_test_data):
2629
"""Test getting indices given latitude, longitude from ECMWF_MACC data."""
27-
data = ecmwf_macc.ECMWF_MACC(os.path.join(DATADIR, TESTDATA))
30+
data = ecmwf_macc.ECMWF_MACC(expected_test_data)
2831
ilat, ilon = data.get_nearest_indices(38, -122)
2932
assert ilat == 17
3033
assert ilon == 79
3134

3235

3336
@requires_netCDF4
34-
def test_interp_data():
37+
def test_interp_data(expected_test_data):
3538
"""Test interpolating UTC time from ECMWF_MACC data."""
36-
data = ecmwf_macc.ECMWF_MACC(os.path.join(DATADIR, TESTDATA))
39+
data = ecmwf_macc.ECMWF_MACC(expected_test_data)
3740
test9am = data.interp_data(
3841
38, -122, datetime.datetime(2012, 11, 1, 9, 0, 0), 'aod550')
3942
assert np.isclose(test9am, data.data.variables['aod550'][2, 17, 79])
@@ -47,10 +50,10 @@ def test_interp_data():
4750

4851

4952
@requires_netCDF4
50-
def test_read_ecmwf_macc():
53+
def test_read_ecmwf_macc(expected_test_data):
5154
"""Test reading ECMWF_MACC data from netCDF4 file."""
5255
data = ecmwf_macc.read_ecmwf_macc(
53-
os.path.join(DATADIR, TESTDATA), 38, -122)
56+
expected_test_data, 38, -122)
5457
expected_times = [
5558
1351738800, 1351749600, 1351760400, 1351771200, 1351782000, 1351792800,
5659
1351803600, 1351814400]
@@ -67,7 +70,7 @@ def test_read_ecmwf_macc():
6770
datetimes = (datetime.datetime(2012, 11, 1, 9, 0, 0),
6871
datetime.datetime(2012, 11, 1, 12, 0, 0))
6972
data_9am_12pm = ecmwf_macc.read_ecmwf_macc(
70-
os.path.join(DATADIR, TESTDATA), 38, -122, datetimes)
73+
expected_test_data, 38, -122, datetimes)
7174
assert np.allclose(data_9am_12pm.aod550.values, expected_aod[2:4])
7275
assert np.allclose(data_9am_12pm.tcwv.values, expected_tcwv[2:4])
7376

pvlib/test/test_epw.py renamed to pvlib/tests/iotools/test_epw.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
1-
import os
2-
31
from pandas.util.testing import network
42

53
from pvlib.iotools import epw
6-
from conftest import data_dir
4+
from conftest import DATA_DIR
75

8-
epw_testfile = os.path.join(data_dir, 'NLD_Amsterdam062400_IWEC.epw')
6+
epw_testfile = DATA_DIR / 'NLD_Amsterdam062400_IWEC.epw'
97

108

119
def test_read_epw():

pvlib/test/test_midc.py renamed to pvlib/tests/iotools/test_midc.py

Lines changed: 14 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
1-
import inspect
2-
import os
3-
41
import pandas as pd
52
from pandas.util.testing import network
63
import pytest
74
import pytz
85

96
from pvlib.iotools import midc
7+
from conftest import DATA_DIR
108

119

1210
@pytest.fixture
@@ -20,18 +18,18 @@ def test_mapping():
2018
}
2119

2220

23-
test_dir = os.path.dirname(
24-
os.path.abspath(inspect.getfile(inspect.currentframe())))
25-
midc_testfile = os.path.join(test_dir, '../data/midc_20181014.txt')
26-
midc_raw_testfile = os.path.join(test_dir, '../data/midc_raw_20181018.txt')
27-
midc_raw_short_header_testfile = os.path.join(
28-
test_dir, '../data/midc_raw_short_header_20191115.txt')
29-
midc_network_testfile = ('https://midcdmz.nrel.gov/apps/data_api.pl'
30-
'?site=UAT&begin=20181018&end=20181019')
21+
MIDC_TESTFILE = DATA_DIR / 'midc_20181014.txt'
22+
MIDC_RAW_TESTFILE = DATA_DIR / 'midc_raw_20181018.txt'
23+
MIDC_RAW_SHORT_HEADER_TESTFILE = (
24+
DATA_DIR / 'midc_raw_short_header_20191115.txt')
25+
26+
# TODO: not used, remove?
27+
# midc_network_testfile = ('https://midcdmz.nrel.gov/apps/data_api.pl'
28+
# '?site=UAT&begin=20181018&end=20181019')
3129

3230

3331
def test_midc_format_index():
34-
data = pd.read_csv(midc_testfile)
32+
data = pd.read_csv(MIDC_TESTFILE)
3533
data = midc.format_index(data)
3634
start = pd.Timestamp("20181014 00:00")
3735
start = start.tz_localize("MST")
@@ -43,14 +41,14 @@ def test_midc_format_index():
4341

4442

4543
def test_midc_format_index_tz_conversion():
46-
data = pd.read_csv(midc_testfile)
44+
data = pd.read_csv(MIDC_TESTFILE)
4745
data = data.rename(columns={'MST': 'PST'})
4846
data = midc.format_index(data)
4947
assert data.index[0].tz == pytz.timezone('Etc/GMT+8')
5048

5149

5250
def test_midc_format_index_raw():
53-
data = pd.read_csv(midc_raw_testfile)
51+
data = pd.read_csv(MIDC_RAW_TESTFILE)
5452
data = midc.format_index_raw(data)
5553
start = pd.Timestamp('20181018 00:00')
5654
start = start.tz_localize('MST')
@@ -61,7 +59,7 @@ def test_midc_format_index_raw():
6159

6260

6361
def test_read_midc_var_mapping_as_arg(test_mapping):
64-
data = midc.read_midc(midc_testfile, variable_map=test_mapping)
62+
data = midc.read_midc(MIDC_TESTFILE, variable_map=test_mapping)
6563
assert 'ghi' in data.columns
6664
assert 'temp_air' in data.columns
6765

@@ -79,7 +77,7 @@ def test_read_midc_raw_data_from_nrel():
7977

8078
def test_read_midc_header_length_mismatch(mocker):
8179
mock_data = mocker.MagicMock()
82-
with open(midc_raw_short_header_testfile, 'r') as f:
80+
with MIDC_RAW_SHORT_HEADER_TESTFILE.open() as f:
8381
mock_data.text = f.read()
8482
mocker.patch('pvlib.iotools.midc.requests.get',
8583
return_value=mock_data)

pvlib/test/test_psm3.py renamed to pvlib/tests/iotools/test_psm3.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,17 @@
22
test iotools for PSM3
33
"""
44

5-
import os
65
from pvlib.iotools import psm3
7-
from conftest import needs_pandas_0_22
6+
from conftest import needs_pandas_0_22, DATA_DIR
87
import numpy as np
98
import pandas as pd
109
import pytest
1110
from requests import HTTPError
1211
from io import StringIO
1312

14-
BASEDIR = os.path.abspath(os.path.dirname(__file__))
15-
PROJDIR = os.path.dirname(BASEDIR)
16-
DATADIR = os.path.join(PROJDIR, 'data')
17-
TMY_TEST_DATA = os.path.join(DATADIR, 'test_psm3_tmy-2017.csv')
18-
YEAR_TEST_DATA = os.path.join(DATADIR, 'test_psm3_2017.csv')
19-
MANUAL_TEST_DATA = os.path.join(DATADIR, 'test_read_psm3.csv')
13+
TMY_TEST_DATA = DATA_DIR / 'test_psm3_tmy-2017.csv'
14+
YEAR_TEST_DATA = DATA_DIR / 'test_psm3_2017.csv'
15+
MANUAL_TEST_DATA = DATA_DIR / 'test_read_psm3.csv'
2016
LATITUDE, LONGITUDE = 40.5137, -108.5449
2117
HEADER_FIELDS = [
2218
'Source', 'Location ID', 'City', 'State', 'Country', 'Latitude',
@@ -100,7 +96,7 @@ def test_get_psm3_singleyear():
10096
@pytest.fixture
10197
def io_input(request):
10298
"""file-like object for parse_psm3"""
103-
with open(MANUAL_TEST_DATA, 'r') as f:
99+
with MANUAL_TEST_DATA.open() as f:
104100
data = f.read()
105101
obj = StringIO(data)
106102
return obj

0 commit comments

Comments
 (0)