Skip to content

Correct examples in docstrings #703

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
May 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion xrspatial/curvature.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ def curvature(agg: xr.DataArray,
.. sourcecode:: python

>>> import numpy as np
>>> import dask.array as da
>>> import xarray as xr
>>> from xrspatial import curvature
>>> flat_data = np.zeros((5, 5), dtype=np.float32)
Expand Down Expand Up @@ -187,7 +188,7 @@ def curvature(agg: xr.DataArray,
cupy.asarray(concave_data),
attrs={'res': (10, 10)}, name='concave_cupy_raster')
>>> concave_curv = curvature(concave_raster)
>>> print(type(concave_curv))
>>> print(type(concave_curv.data))
<class 'cupy.core.core.ndarray'>
>>> print(concave_curv)
<xarray.DataArray 'curvature' (dim_0: 5, dim_1: 5)>
Expand Down
28 changes: 15 additions & 13 deletions xrspatial/focal.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,11 +193,11 @@ def mean(agg, passes=1, excludes=[np.nan], name='mean'):
<class 'cupy.core.core.ndarray'>
>>> print(mean_cupy)
<xarray.DataArray 'mean' (dim_0: 5, dim_1: 5)>
array([[0.47928994, 0.47928994, 0.47928994, 0.47928994, 0.47928994],
[0.47928994, 0.47928994, 0.47928994, 0.47928994, 0.47928994],
[0.47928994, 0.47928994, 0.47928994, 0.47928994, 0.47928994],
[0.47928994, 0.47928994, 0.47928994, 0.47928994, 0.47928994],
[0.47928994, 0.47928994, 0.47928994, 0.47928994, 0.47928994]])
array([[0.47928995, 0.47928995, 0.47928995, 0.47928995, 0.47928995],
[0.47928995, 0.47928995, 0.47928995, 0.47928995, 0.47928995],
[0.47928995, 0.47928995, 0.47928995, 0.47928995, 0.47928995],
[0.47928995, 0.47928995, 0.47928995, 0.47928995, 0.47928995],
[0.47928995, 0.47928995, 0.47928995, 0.47928995, 0.47928995]])
Dimensions without coordinates: dim_0, dim_1
"""

Expand Down Expand Up @@ -358,6 +358,7 @@ def apply(raster, kernel, func=_calc_mean, name='focal_apply'):
[0., 1., 0.]])
>>> # apply kernel mean by default
>>> apply_mean_agg = apply(raster, kernel)
>>> apply_mean_agg
<xarray.DataArray 'focal_apply' (y: 4, x: 5)>
array([[ 2. , 2.25 , 3.25 , 4.25 , 5.33333333],
[ 5.25 , 6. , 7. , 8. , 8.75 ],
Expand Down Expand Up @@ -387,13 +388,14 @@ def apply(raster, kernel, func=_calc_mean, name='focal_apply'):
])
>>> @ngjit
>>> def func(kernel_data):
>>> weight = np.array([
[0, 0.5, 0],
[0, 1, 0.5],
[0, 0.5, 0],
])
>>> return np.nansum(kernel_data * weight)

... weight = np.array([
... [0, 0.5, 0],
... [0, 1, 0.5],
... [0, 0.5, 0],
... ])
... return np.nansum(kernel_data * weight)

>>> import dask.array as da
>>> data_da = da.from_array(np.ones((6, 4), dtype=np.float64), chunks=(3, 2))
>>> raster_da = xr.DataArray(data_da, dims=['y', 'x'], name='raster_da')
>>> print(raster_da)
Expand Down Expand Up @@ -713,7 +715,7 @@ def hotspots(raster, kernel):
>>> import numpy as np
>>> import xarray as xr
>>> from xrspatial.convolution import custom_kernel
>>> kernel = custom_kernel(np.array([1, 1, 0]))
>>> kernel = custom_kernel(np.array([[1, 1, 0]]))
>>> data = np.array([
... [0, 1000, 1000, 0, 0, 0],
... [0, 0, 0, -1000, -1000, 0],
Expand Down
6 changes: 3 additions & 3 deletions xrspatial/proximity.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,14 +112,14 @@ def manhattan_distance(x1: float, x2: float, y1: float, y2: float) -> float:
>>> from xrspatial import manhattan_distance
>>> point_a = (142.32, 23.23)
>>> point_b = (312.54, 432.01)
>>> # Calculate Euclidean Distance
>>> # Calculate Manhattan Distance
>>> dist = manhattan_distance(
... point_a[0],
... point_b[0],
... point_a[1],
... point_b[1])
>>> print(dist)
196075.9368
579.0
"""

x = x1 - x2
Expand Down Expand Up @@ -165,7 +165,7 @@ def great_circle_distance(
>>> from xrspatial import great_circle_distance
>>> point_a = (123.2, 82.32)
>>> point_b = (178.0, 65.09)
>>> # Calculate Euclidean Distance
>>> # Calculate Great Circle Distance
>>> dist = great_circle_distance(
... point_a[0],
... point_b[0],
Expand Down
2 changes: 1 addition & 1 deletion xrspatial/viewshed.py
Original file line number Diff line number Diff line change
Expand Up @@ -1637,7 +1637,7 @@ def viewshed(raster: xarray.DataArray,
array([[ 0, 0, 1, 0, 0],
[ 1, 3, 0, 0, 0],
[10, 2, 5, 2, -1],
[20, 1, 2, 9, 0]])
[11, 1, 2, 9, 0]])
Coordinates:
* y (y) float64 1.0 2.0 3.0 4.0
* x (x) float64 1.0 2.0 3.0 4.0 5.0
Expand Down
37 changes: 14 additions & 23 deletions xrspatial/zonal.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,9 +517,8 @@ def stats(
stats() works with Dask with NumPy backed DataArray
>>> import dask.array as da
>>> import dask.array as da
>>> values_dask = xr.DataArray(da.from_array(values, chunks=(3, 3)))
>>> zones_dask = xr.DataArray(da.from_array(zones, chunks=(3, 3)))

>>> values_dask = xr.DataArray(da.from_array(values_data, chunks=(3, 3)))
>>> zones_dask = xr.DataArray(da.from_array(zones_data, chunks=(3, 3)))
>>> # Calculate Stats with dask backed xarray DataArrays
>>> dask_stats_df = stats(zones=zones_dask, values=values_dask)
>>> print(type(dask_stats_df))
Expand All @@ -530,20 +529,6 @@ def stats(
1 10 27.0 49 5 675 14.21267 202.0 25
2 20 72.0 94 50 1800 14.21267 202.0 25
3 30 77.0 99 55 1925 14.21267 202.0 25

>>> # Custom Stats with dask backed xarray DataArrays
>>> dask_custom_stats ={'double_sum': lambda val: val.sum()*2}
>>> dask_custom_stats_df = stats(
>>> zones=zones_dask, values=values_dask, stats_funcs=custom_stats
>>> )
>>> print(type(dask_custom_stats_df))
<class 'dask.dataframe.core.DataFrame'>
>>> print(dask_custom_stats_df.compute())
zone double_sum
0 0 1100
1 10 1350
2 20 3600
3 30 3850
"""

validate_arrays(zones, values)
Expand Down Expand Up @@ -968,10 +953,10 @@ def crosstab(
.. sourcecode:: python

>>> import dask.array as da
>>> values_dask = xr.DataArray(da.from_array(values, chunks=(3, 3)))
>>> zones_dask = xr.DataArray(da.from_array(zones, chunks=(3, 3)))
>>> df = crosstab(zones=zones_dask, values=values_dask)
>>> print(df)
>>> values_dask = xr.DataArray(da.from_array(values_data, chunks=(3, 3)))
>>> zones_dask = xr.DataArray(da.from_array(zones_data, chunks=(3, 3)))
>>> dask_df = crosstab(zones=zones_dask, values=values_dask)
>>> print(dask_df)
Dask DataFrame Structure:
zone 0.0 10.0 20.0 30.0 40.0 50.0
npartitions=5
Expand All @@ -981,7 +966,7 @@ def crosstab(
4 ... ... ... ... ... ... ...
5 ... ... ... ... ... ... ...
Dask Name: astype, 1186 tasks
>>> print(dask_df.compute)
>>> print(dask_df.compute())
zone 0.0 10.0 20.0 30.0 40.0 50.0
0 0 1 0 0 0 0 0
1 1 3 0 0 0 0 0
Expand Down Expand Up @@ -1120,6 +1105,7 @@ def apply(

>>> import numpy as np
>>> import xarray as xr
>>> from xrspatial.zonal import apply
>>> zones_val = np.array([
[1, 1, 0, 2],
[0, 2, 1, 2]])
Expand All @@ -1132,7 +1118,7 @@ def apply(
>>> apply(zones, agg, func)
>>> agg
array([[0, 0, 5, 0],
[3, 0, 0, 0]])
[3, np.nan, 0, 0]])
"""
if not isinstance(zones, xr.DataArray):
raise TypeError("zones must be instance of DataArray")
Expand Down Expand Up @@ -1276,6 +1262,7 @@ def suggest_zonal_canvas(
>>> from spatialpandas import GeoDataFrame
>>> import geopandas as gpd
>>> import datashader as ds
>>> from xrspatial.zonal import suggest_zonal_canvas

>>> df = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres'))
>>> df = df.to_crs("EPSG:3857")
Expand All @@ -1298,12 +1285,16 @@ def suggest_zonal_canvas(
crs='Mercator',
min_pixels=min_pixels,
)
>>> height, width
(1537, 2376)
>>> cvs = ds.Canvas(x_range=x_range, y_range=y_range,
>>> plot_height=height, plot_width=width)
>>> spatial_df = GeoDataFrame(df, geometry='geometry')
>>> agg = cvs.polygons(spatial_df, 'geometry', agg=ds.max('id'))
>>> min_poly_id = df.area.argmin()
>>> actual_min_pixels = len(np.where(agg.data==min_poly_id)[0])
>>> actual_min_pixels
22
"""
full_xrange, full_yrange = get_full_extent(crs)
xmin, xmax = full_xrange
Expand Down