Skip to content

Commit da192a7

Browse files
9prady9syurkevi
authored andcommitted
Refactor cov, var, stdev APIs to reflect 3.8 release
Also adds set_cublas_mode utility function to explicitly enable tensor ops for blas functions
1 parent 8ac4a8d commit da192a7

File tree

5 files changed

+71
-32
lines changed

5 files changed

+71
-32
lines changed

Diff for: arrayfire/algorithm.py

+28
Original file line numberDiff line numberDiff line change
@@ -250,6 +250,34 @@ def max(a, dim=None):
250250
else:
251251
return _reduce_all(a, backend.get().af_max_all)
252252

253+
def maxRagged(vals, lens, dim):
254+
"""
255+
Find the maximum value of a subset of elements along a specified dimension.
256+
257+
The size of the subset of elements along the given dimension are decided based on the lengths
258+
provided in the `lens` array.
259+
260+
Parameters
261+
----------
262+
vals : af.Array
263+
Multi dimensional arrayfire array.
264+
lens : af.Array
265+
Multi dimensional arrayfire array containing number of elements to reduce along given `dim`
266+
dim: optional: int. default: None
267+
Dimension along which the maximum value is required.
268+
269+
Returns
270+
-------
271+
(values, indices): A tuple of af.Array(s)
272+
`values` af.Array will have the maximum values along given dimension for
273+
subsets determined by lengths provided in `lens`
274+
`idx` contains the locations of the maximum values as per the lengths provided in `lens`
275+
"""
276+
out_vals = Array()
277+
out_idx = Array()
278+
safe_call(backend().get().af_max_ragged(c_pointer(out_vals.arr), c_pointer(out_idx.arr), c_pointer(vals.arr), c_pointer(lens.arr), c_int_t(dim)))
279+
return out_vals, out_idx
280+
253281
def maxByKey(keys, vals, dim=-1):
254282
"""
255283
Calculate the max of elements along a specified dimension according to a key.

Diff for: arrayfire/cuda.py

+7
Original file line numberDiff line numberDiff line change
@@ -85,3 +85,10 @@ def set_native_id(idx):
8585

8686
safe_call(backend.get().afcu_set_native_id(idx))
8787
return
88+
89+
def set_cublas_mode(mode=CUBLAS_MATH_MODE.DEFAULT):
90+
"""
91+
Set's cuBLAS math mode for CUDA backend. In other backends, this has no effect.
92+
"""
93+
safe_call(backend().get().afcu_cublasSetMathMode(mode.value))
94+
return

Diff for: arrayfire/library.py

+7
Original file line numberDiff line numberDiff line change
@@ -490,6 +490,13 @@ class VARIANCE(_Enum):
490490
SAMPLE = _Enum_Type(1)
491491
POPULATION = _Enum_Type(2)
492492

493+
class CUBLAS_MATH_MODE(_Enum):
494+
"""
495+
Enable Tensor Core usage if available on CUDA backend GPUs
496+
"""
497+
DEFAULT = _Enum_Type(0)
498+
TENSOR_OP = _Enum_Type(1)
499+
493500
_VER_MAJOR_PLACEHOLDER = "__VER_MAJOR__"
494501

495502
def _setup():

Diff for: arrayfire/statistics.py

+25-28
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def mean(a, weights=None, dim=None):
5959

6060
return real if imag == 0 else real + imag * 1j
6161

62-
def var(a, isbiased=False, weights=None, dim=None):
62+
def var(a, bias=VARIANCE.DEFAULT, weights=None, dim=None):
6363
"""
6464
Calculate variance along a given dimension.
6565
@@ -68,9 +68,9 @@ def var(a, isbiased=False, weights=None, dim=None):
6868
a: af.Array
6969
The input array.
7070
71-
isbiased: optional: Boolean. default: False.
72-
Boolean denoting population variance (false) or sample
73-
variance (true).
71+
bias: optional: af.VARIANCE. default: DEFAULT.
72+
population variance(VARIANCE.POPULATION) or sample variance(VARIANCE.SAMPLE).
73+
This is ignored if weights are provided.
7474
7575
weights: optional: af.Array. default: None.
7676
Array to calculate for the weighted mean. Must match size of
@@ -89,7 +89,7 @@ def var(a, isbiased=False, weights=None, dim=None):
8989
out = Array()
9090

9191
if weights is None:
92-
safe_call(backend.get().af_var(c_pointer(out.arr), a.arr, isbiased, c_int_t(dim)))
92+
safe_call(backend.get().af_var_v2(c_pointer(out.arr), a.arr, bias.value, c_int_t(dim)))
9393
else:
9494
safe_call(backend.get().af_var_weighted(c_pointer(out.arr), a.arr, weights.arr, c_int_t(dim)))
9595

@@ -99,7 +99,7 @@ def var(a, isbiased=False, weights=None, dim=None):
9999
imag = c_double_t(0)
100100

101101
if weights is None:
102-
safe_call(backend.get().af_var_all(c_pointer(real), c_pointer(imag), a.arr, isbiased))
102+
safe_call(backend.get().af_var_all_v2(c_pointer(real), c_pointer(imag), a.arr, bias.value))
103103
else:
104104
safe_call(backend.get().af_var_all_weighted(c_pointer(real), c_pointer(imag), a.arr, weights.arr))
105105

@@ -150,7 +150,7 @@ def meanvar(a, weights=None, bias=VARIANCE.DEFAULT, dim=-1):
150150
return mean_out, var_out
151151

152152

153-
def stdev(a, dim=None):
153+
def stdev(a, bias=VARIANCE.DEFAULT, dim=None):
154154
"""
155155
Calculate standard deviation along a given dimension.
156156
@@ -159,6 +159,10 @@ def stdev(a, dim=None):
159159
a: af.Array
160160
The input array.
161161
162+
bias: optional: af.VARIANCE. default: DEFAULT.
163+
population variance(VARIANCE.POPULATION) or sample variance(VARIANCE.SAMPLE).
164+
This is ignored if weights are provided.
165+
162166
dim: optional: int. default: None.
163167
The dimension for which to obtain the standard deviation from
164168
input data.
@@ -171,48 +175,41 @@ def stdev(a, dim=None):
171175
"""
172176
if dim is not None:
173177
out = Array()
174-
safe_call(backend.get().af_stdev(c_pointer(out.arr), a.arr, c_int_t(dim)))
178+
safe_call(backend.get().af_stdev_v2(c_pointer(out.arr), a.arr, bias.value,
179+
c_int_t(dim)))
175180
return out
176181
else:
177182
real = c_double_t(0)
178183
imag = c_double_t(0)
179-
safe_call(backend.get().af_stdev_all(c_pointer(real), c_pointer(imag), a.arr))
184+
safe_call(backend.get().af_stdev_all_v2(c_pointer(real), c_pointer(imag), a.arr,
185+
bias.value))
180186
real = real.value
181187
imag = imag.value
182188
return real if imag == 0 else real + imag * 1j
183189

184-
def cov(a, isbiased=False, dim=None):
190+
def cov(a, b, bias=VARIANCE.DEFAULT):
185191
"""
186192
Calculate covariance along a given dimension.
187193
188194
Parameters
189195
----------
190196
a: af.Array
191-
The input array.
197+
Input array.
192198
193-
isbiased: optional: Boolean. default: False.
194-
Boolean denoting whether biased estimate should be taken.
199+
b: af.Array
200+
Input array.
195201
196-
dim: optional: int. default: None.
197-
The dimension for which to obtain the covariance from input data.
202+
bias: optional: af.VARIANCE. default: DEFAULT.
203+
population variance(VARIANCE.POPULATION) or sample variance(VARIANCE.SAMPLE).
198204
199205
Returns
200206
-------
201207
output: af.Array
202-
Array containing the covariance of the input array along a
203-
given dimension.
208+
Array containing the covariance of the input array along a given dimension.
204209
"""
205-
if dim is not None:
206-
out = Array()
207-
safe_call(backend.get().af_cov(c_pointer(out.arr), a.arr, isbiased, c_int_t(dim)))
208-
return out
209-
else:
210-
real = c_double_t(0)
211-
imag = c_double_t(0)
212-
safe_call(backend.get().af_cov_all(c_pointer(real), c_pointer(imag), a.arr, isbiased))
213-
real = real.value
214-
imag = imag.value
215-
return real if imag == 0 else real + imag * 1j
210+
out = Array()
211+
safe_call(backend.get().af_cov_v2(c_pointer(out.arr), a.arr, b.arr, bias.value))
212+
return out
216213

217214
def median(a, dim=None):
218215
"""

Diff for: tests/simple/statistics.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,10 @@ def simple_statistics(verbose=False):
2828
print_func(af.mean(a, weights=w))
2929

3030
display_func(af.var(a, dim=0))
31-
display_func(af.var(a, isbiased=True, dim=0))
31+
display_func(af.var(a, bias=af.VARIANCE.SAMPLE, dim=0))
3232
display_func(af.var(a, weights=w, dim=0))
3333
print_func(af.var(a))
34-
print_func(af.var(a, isbiased=True))
34+
print_func(af.var(a, bias=af.VARIANCE.SAMPLE))
3535
print_func(af.var(a, weights=w))
3636

3737
mean, var = af.meanvar(a, dim=0)
@@ -45,9 +45,9 @@ def simple_statistics(verbose=False):
4545
print_func(af.stdev(a))
4646

4747
display_func(af.var(a, dim=0))
48-
display_func(af.var(a, isbiased=True, dim=0))
48+
display_func(af.var(a, bias=af.VARIANCE.SAMPLE, dim=0))
4949
print_func(af.var(a))
50-
print_func(af.var(a, isbiased=True))
50+
print_func(af.var(a, bias=af.VARIANCE.SAMPLE))
5151

5252
display_func(af.median(a, dim=0))
5353
print_func(af.median(w))

0 commit comments

Comments
 (0)