Skip to content

Linke turbidity factor fixes #264

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Nov 18, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions docs/sphinx/source/whatsnew/v0.4.2.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ Bug fixes
* PVSystem.pvwatts_ac could not use the eta_inv_ref kwarg and
PVSystem.pvwatts_dc could not use the temp_ref kwarg. Fixed. (:issue:`252`)
* Fixed typo in ModelChain.infer_spectral_model error message. (:issue:`251`)
* Fixed Linke turbdity factor out of bounds error at 90-degree latitude or at
180-degree longitude (:issue:`262`)
* Fixed Linke turbidity factor grid spacing and centers (:issue:`263`)


API Changes
Expand Down Expand Up @@ -41,6 +44,14 @@ Enhancements
* Added name attribute to ModelChain and PVSystem. (:issue:`254`)
* Restructured API section of the documentation so that there are
separate pages for each function, class, or method. (:issue:`258`)
* Improved Linke turbidity factor time interpolation with Python `calendar`
month days and leap years (:issue:`265`)


Other
~~~~~
* Typical modeling results could change by ~1%, depending on location, if they
depend on the turbidity table


Code Contributors
Expand All @@ -49,3 +60,4 @@ Code Contributors
* Uwe Krien
* Will Holmgren
* Volker Beutner
* Mark Mikofski
89 changes: 68 additions & 21 deletions pvlib/clearsky.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import os
from collections import OrderedDict
import calendar

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -186,6 +187,12 @@ def lookup_linke_turbidity(time, latitude, longitude, filepath=None,
# so divide the number from the file by 20 to get the
# turbidity.

# The nodes of the grid are 5' (1/12=0.0833[arcdeg]) apart.
# From Section 8 of Aerosol optical depth and Linke turbidity climatology
# http://www.meteonorm.com/images/uploads/downloads/ieashc36_report_TL_AOD_climatologies.pdf
# 1st row: 89.9583 S, 2nd row: 89.875 S
# 1st column: 179.9583 W, 2nd column: 179.875 W

try:
import scipy.io
except ImportError:
Expand All @@ -201,27 +208,37 @@ def lookup_linke_turbidity(time, latitude, longitude, filepath=None,
linke_turbidity_table = mat['LinkeTurbidity']

latitude_index = (
np.around(_linearly_scale(latitude, 90, -90, 1, 2160))
np.around(_linearly_scale(latitude, 90, -90, 0, 2160))
.astype(np.int64))
longitude_index = (
np.around(_linearly_scale(longitude, -180, 180, 1, 4320))
np.around(_linearly_scale(longitude, -180, 180, 0, 4320))
.astype(np.int64))

g = linke_turbidity_table[latitude_index][longitude_index]

if interp_turbidity:
# Data covers 1 year.
# Assume that data corresponds to the value at
# the middle of each month.
# This means that we need to add previous Dec and next Jan
# to the array so that the interpolation will work for
# Data covers 1 year. Assume that data corresponds to the value at the
# middle of each month. This means that we need to add previous Dec and
# next Jan to the array so that the interpolation will work for
# Jan 1 - Jan 15 and Dec 16 - Dec 31.
# Then we map the month value to the day of year value.
# This is approximate and could be made more accurate.
g2 = np.concatenate([[g[-1]], g, [g[0]]])
days = np.linspace(-15, 380, num=14)
linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2),
index=time)
# Then we map the month value to the day of year value.
isleap = [calendar.isleap(t.year) for t in time]
if all(isleap):
days = _calendar_month_middles(2016) # all years are leap
elif not any(isleap):
days = _calendar_month_middles(2015) # none of the years are leap
else:
days = None # some of the years are leap years and some are not
if days is None:
# Loop over different years, might be slow for large timeserires
linke_turbidity = pd.Series([
np.interp(t.dayofyear, _calendar_month_middles(t.year), g2)
for t in time
], index=time)
else:
linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2),
index=time)
else:
linke_turbidity = pd.DataFrame(time.month, index=time)
# apply monthly data
Expand All @@ -232,6 +249,45 @@ def lookup_linke_turbidity(time, latitude, longitude, filepath=None,
return linke_turbidity


def _calendar_month_middles(year):
"""list of middle day of each month, used by Linke turbidity lookup"""
# remove mdays[0] since January starts at mdays[1]
# make local copy of mdays since we need to change February for leap years
mdays = np.array(calendar.mdays[1:])
ydays = 365
# handle leap years
if calendar.isleap(year):
mdays[1] = mdays[1] + 1
ydays = 366
return np.concatenate([[-calendar.mdays[-1] / 2.0], # Dec last year
np.cumsum(mdays) - np.array(mdays) / 2., # this year
[ydays + calendar.mdays[1] / 2.0]]) # Jan next year


def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax):
"""linearly scale input to output, used by Linke turbidity lookup"""
inputrange = inputmax - inputmin
outputrange = outputmax - outputmin
delta = outputrange/inputrange # number of indices per input unit
inputmin = inputmin + 1.0 / delta / 2.0 # shift to center of index
outputmax = outputmax - 1 # shift index to zero indexing
outputmatrix = (inputmatrix - inputmin) * delta + outputmin
err = IndexError('Input, %g, is out of range (%g, %g).' %
(inputmatrix, inputmax - inputrange, inputmax))
# round down if input is within half an index or else raise index error
if outputmatrix > outputmax:
if np.around(outputmatrix - outputmax, 1) <= 0.5:
outputmatrix = outputmax
else:
raise err
elif outputmatrix < outputmin:
if np.around(outputmin - outputmatrix, 1) <= 0.5:
outputmatrix = outputmin
else:
raise err
return outputmatrix


def haurwitz(apparent_zenith):
'''
Determine clear sky GHI from Haurwitz model.
Expand Down Expand Up @@ -281,15 +337,6 @@ def haurwitz(apparent_zenith):
return df_out


def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax):
""" used by linke turbidity lookup function """

inputrange = inputmax - inputmin
outputrange = outputmax - outputmin
outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin
return outputmatrix


def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1.,
pressure=101325., dni_extra=1364.):
"""
Expand Down
82 changes: 73 additions & 9 deletions pvlib/test/test_clearsky.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,9 +160,23 @@ def test_lookup_linke_turbidity():
freq='12h', tz='America/Phoenix')
# expect same value on 2014-06-24 0000 and 1200, and
# diff value on 2014-06-25
expected = pd.Series(np.array([3.10126582, 3.10126582, 3.11443038]),
index=times)
out = clearsky.lookup_linke_turbidity(times, 32.2, -111)
expected = pd.Series(
np.array([3.11803278689, 3.11803278689, 3.13114754098]), index=times
)
out = clearsky.lookup_linke_turbidity(times, 32.125, -110.875)
assert_series_equal(expected, out)


@requires_scipy
def test_lookup_linke_turbidity_leapyear():
times = pd.date_range(start='2016-06-24', end='2016-06-25',
freq='12h', tz='America/Phoenix')
# expect same value on 2016-06-24 0000 and 1200, and
# diff value on 2016-06-25
expected = pd.Series(
np.array([3.11803278689, 3.11803278689, 3.13114754098]), index=times
)
out = clearsky.lookup_linke_turbidity(times, 32.125, -110.875)
assert_series_equal(expected, out)


Expand All @@ -172,7 +186,7 @@ def test_lookup_linke_turbidity_nointerp():
freq='12h', tz='America/Phoenix')
# expect same value for all days
expected = pd.Series(np.array([3., 3., 3.]), index=times)
out = clearsky.lookup_linke_turbidity(times, 32.2, -111,
out = clearsky.lookup_linke_turbidity(times, 32.125, -110.875,
interp_turbidity=False)
assert_series_equal(expected, out)

Expand All @@ -181,9 +195,21 @@ def test_lookup_linke_turbidity_nointerp():
def test_lookup_linke_turbidity_months():
times = pd.date_range(start='2014-04-01', end='2014-07-01',
freq='1M', tz='America/Phoenix')
expected = pd.Series(np.array([2.8943038, 2.97316456, 3.18025316]),
index=times)
out = clearsky.lookup_linke_turbidity(times, 32.2, -111)
expected = pd.Series(
np.array([2.89918032787, 2.97540983607, 3.19672131148]), index=times
)
out = clearsky.lookup_linke_turbidity(times, 32.125, -110.875)
assert_series_equal(expected, out)


@requires_scipy
def test_lookup_linke_turbidity_months_leapyear():
times = pd.date_range(start='2016-04-01', end='2016-07-01',
freq='1M', tz='America/Phoenix')
expected = pd.Series(
np.array([2.89918032787, 2.97540983607, 3.19672131148]), index=times
)
out = clearsky.lookup_linke_turbidity(times, 32.125, -110.875)
assert_series_equal(expected, out)


Expand All @@ -192,13 +218,13 @@ def test_lookup_linke_turbidity_nointerp_months():
times = pd.date_range(start='2014-04-10', end='2014-07-10',
freq='1M', tz='America/Phoenix')
expected = pd.Series(np.array([2.85, 2.95, 3.]), index=times)
out = clearsky.lookup_linke_turbidity(times, 32.2, -111,
out = clearsky.lookup_linke_turbidity(times, 32.125, -110.875,
interp_turbidity=False)
assert_series_equal(expected, out)
# changing the dates shouldn't matter if interp=False
times = pd.date_range(start='2014-04-05', end='2014-07-05',
freq='1M', tz='America/Phoenix')
out = clearsky.lookup_linke_turbidity(times, 32.2, -111,
out = clearsky.lookup_linke_turbidity(times, 32.125, -110.875,
interp_turbidity=False)
assert_series_equal(expected, out)

Expand Down Expand Up @@ -440,3 +466,41 @@ def test_simplified_solis_nans_series():
precipitable_water, pressure, dni_extra)

assert_frame_equal(expected, out)


@requires_scipy
def test_linke_turbidity_corners():
"""Test Linke turbidity corners out of bounds."""
months = pd.DatetimeIndex('%d/1/2016' % (m + 1) for m in range(12))

def monthly_lt_nointerp(lat, lon, time=months):
"""monthly Linke turbidity factor without time interpolation"""
return clearsky.lookup_linke_turbidity(
time, lat, lon, interp_turbidity=False
)

# Northwest
assert np.allclose(
monthly_lt_nointerp(90, -180),
[1.9, 1.9, 1.9, 2.0, 2.05, 2.05, 2.1, 2.1, 2.0, 1.95, 1.9, 1.9])
# Southwest
assert np.allclose(
monthly_lt_nointerp(-90, -180),
[1.35, 1.3, 1.45, 1.35, 1.35, 1.35, 1.35, 1.35, 1.35, 1.4, 1.4, 1.3])
# Northeast
assert np.allclose(
monthly_lt_nointerp(90, 180),
[1.9, 1.9, 1.9, 2.0, 2.05, 2.05, 2.1, 2.1, 2.0, 1.95, 1.9, 1.9])
# Southeast
assert np.allclose(
monthly_lt_nointerp(-90, 180),
[1.35, 1.7, 1.35, 1.35, 1.35, 1.35, 1.35, 1.35, 1.35, 1.35, 1.35, 1.7])
# test out of range exceptions at corners
with pytest.raises(IndexError):
monthly_lt_nointerp(91, -122) # exceeds max latitude
with pytest.raises(IndexError):
monthly_lt_nointerp(38.2, 181) # exceeds max longitude
with pytest.raises(IndexError):
monthly_lt_nointerp(-91, -122) # exceeds min latitude
with pytest.raises(IndexError):
monthly_lt_nointerp(38.2, -181) # exceeds min longitude
42 changes: 31 additions & 11 deletions pvlib/test/test_location.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,26 @@ def test_location_invalid_tz_type():

def test_location_print_all():
tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
expected_str = 'Location: \n name: Tucson\n latitude: 32.2\n longitude: -111\n altitude: 700\n tz: US/Arizona'
expected_str = '\n'.join([
'Location: ',
' name: Tucson',
' latitude: 32.2',
' longitude: -111',
' altitude: 700',
' tz: US/Arizona'
])
assert tus.__str__() == expected_str

def test_location_print_pytz():
tus = Location(32.2, -111, aztz, 700, 'Tucson')
expected_str = 'Location: \n name: Tucson\n latitude: 32.2\n longitude: -111\n altitude: 700\n tz: US/Arizona'
expected_str = '\n'.join([
'Location: ',
' name: Tucson',
' latitude: 32.2',
' longitude: -111',
' altitude: 700',
' tz: US/Arizona'
])
assert tus.__str__() == expected_str


Expand All @@ -58,14 +72,13 @@ def test_get_clearsky():
end='20160101T1800-0700',
freq='3H')
clearsky = tus.get_clearsky(times)
expected = pd.DataFrame(data=np.
array([[ 0. , 0. , 0. ],
[ 258.60422702, 761.57329257, 50.1235982 ],
[ 611.96347869, 956.95353414, 70.8232806 ],
[ 415.10904044, 878.52649603, 59.07820922],
[ 0. , 0. , 0. ]]),
columns=['ghi', 'dni', 'dhi'],
index=times)
expected = pd.DataFrame(data=np.array([
( 0.0, 0.0, 0.0),
(262.77734276159333, 791.1972825869296, 46.18714900637892),
(616.764693938387, 974.9610353623959, 65.44157429054201),
(419.6512657626518, 901.6234995035793, 54.26016437839348),
( 0.0, 0.0, 0.0)],
dtype=[('ghi', '<f8'), ('dni', '<f8'), ('dhi', '<f8')]), index=times)
assert_frame_equal(expected, clearsky, check_less_precise=2)


Expand Down Expand Up @@ -284,5 +297,12 @@ def test_get_airmass_valueerror():
def test_Location___repr__():
tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')

expected = 'Location: \n name: Tucson\n latitude: 32.2\n longitude: -111\n altitude: 700\n tz: US/Arizona'
expected = '\n'.join([
'Location: ',
' name: Tucson',
' latitude: 32.2',
' longitude: -111',
' altitude: 700',
' tz: US/Arizona'
])
assert tus.__repr__() == expected
Loading