22
22
from tensorflow_probability .python .internal import tensor_util
23
23
from tensorflow_probability .python .math .psd_kernels import positive_semidefinite_kernel as psd_kernel
24
24
from tensorflow_probability .python .math .psd_kernels .internal import util
25
- from tensorflow .python .util import deprecation # pylint: disable=g-direct-tensorflow-import
26
25
27
26
__all__ = [
28
27
'Constant' ,
@@ -55,14 +54,7 @@ class Polynomial(psd_kernel.AutoCompositeTensorPsdKernel):
55
54
56
55
"""
57
56
58
- @deprecation .deprecated_args (
59
- '2022-11-01' ,
60
- '`bias_variance` and `slope_variance` are deprecated. Please use '
61
- '`bias_amplitude` and `slope_amplitude` instead.' ,
62
- 'bias_variance' , 'slope_variance' )
63
57
def __init__ (self ,
64
- bias_variance = None ,
65
- slope_variance = None ,
66
58
bias_amplitude = None ,
67
59
slope_amplitude = None ,
68
60
shift = None ,
@@ -74,18 +66,6 @@ def __init__(self,
74
66
"""Construct a Polynomial kernel instance.
75
67
76
68
Args:
77
- bias_variance: Deprecated. Non-negative floating point `Tensor` that
78
- controls the variance from the origin. If bias = 0, there is no
79
- variance and the fitted function goes through the origin. Must be
80
- broadcastable with `slope_variance`, `shift`, `exponent`, and inputs
81
- to `apply` and `matrix` methods. A value of `None` is treated like 0.
82
- Default Value: `None`
83
- slope_variance: Deprecated. Non-negative floating point `Tensor` that
84
- controls the variance of the regression line slope that is the basis
85
- for the polynomial. Must be broadcastable with `bias_variance`, `shift`,
86
- `exponent`, and inputs to `apply` and `matrix` methods. A value of
87
- `None` is treated like 1.
88
- Default Value: `None`
89
69
bias_amplitude: Non-negative floating point `Tensor` that controls the
90
70
stddev from the origin. If bias = 0, there is no stddev and the
91
71
fitted function goes through the origin. Must be broadcastable with
@@ -124,16 +104,10 @@ def __init__(self,
124
104
parameters = dict (locals ()) if parameters is None else parameters
125
105
with tf .name_scope (name ):
126
106
dtype = util .maybe_get_common_dtype (
127
- [bias_variance ,
128
- slope_variance ,
129
- bias_amplitude ,
107
+ [bias_amplitude ,
130
108
slope_amplitude ,
131
109
shift ,
132
110
exponent ])
133
- self ._bias_variance = tensor_util .convert_nonref_to_tensor (
134
- bias_variance , name = 'bias_variance' , dtype = dtype )
135
- self ._slope_variance = tensor_util .convert_nonref_to_tensor (
136
- slope_variance , name = 'slope_variance' , dtype = dtype )
137
111
self ._bias_amplitude = tensor_util .convert_nonref_to_tensor (
138
112
bias_amplitude , name = 'bias_amplitude' , dtype = dtype )
139
113
self ._slope_amplitude = tensor_util .convert_nonref_to_tensor (
@@ -156,30 +130,14 @@ def _parameter_properties(cls, dtype):
156
130
bias_amplitude = parameter_properties .ParameterProperties (
157
131
default_constraining_bijector_fn = (
158
132
lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
159
- bias_variance = parameter_properties .ParameterProperties (
160
- default_constraining_bijector_fn = (
161
- lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
162
133
exponent = parameter_properties .ParameterProperties (
163
134
default_constraining_bijector_fn = (
164
135
lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
165
136
slope_amplitude = parameter_properties .ParameterProperties (
166
137
default_constraining_bijector_fn = (
167
138
lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
168
- slope_variance = parameter_properties .ParameterProperties (
169
- default_constraining_bijector_fn = (
170
- lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
171
139
shift = parameter_properties .ParameterProperties ())
172
140
173
- @property
174
- def bias_variance (self ):
175
- """Variance on bias parameter."""
176
- return self ._bias_variance
177
-
178
- @property
179
- def slope_variance (self ):
180
- """Variance on slope parameter."""
181
- return self ._slope_variance
182
-
183
141
@property
184
142
def bias_amplitude (self ):
185
143
"""Stddev on bias parameter."""
@@ -200,16 +158,6 @@ def exponent(self):
200
158
"""Exponent of the polynomial term."""
201
159
return self ._exponent
202
160
203
- def _get_bias_amplitude (self ):
204
- if self .bias_amplitude is not None :
205
- return self .bias_amplitude
206
- return self .bias_variance
207
-
208
- def _get_slope_amplitude (self ):
209
- if self .slope_amplitude is not None :
210
- return self .slope_amplitude
211
- return self .slope_variance
212
-
213
161
def _apply (self , x1 , x2 , example_ndims = 0 ):
214
162
if self .shift is None :
215
163
dot_prod = util .sum_rightmost_ndims_preserving_shape (
@@ -226,13 +174,13 @@ def _apply(self, x1, x2, example_ndims=0):
226
174
exponent = util .pad_shape_with_ones (exponent , example_ndims )
227
175
dot_prod = dot_prod ** exponent
228
176
229
- slope_amplitude = self ._get_slope_amplitude ()
177
+ slope_amplitude = self .slope_amplitude
230
178
if slope_amplitude is not None :
231
179
slope_amplitude = tf .convert_to_tensor (slope_amplitude )
232
180
slope_amplitude = util .pad_shape_with_ones (slope_amplitude , example_ndims )
233
181
dot_prod = dot_prod * slope_amplitude ** 2.
234
182
235
- bias_amplitude = self ._get_bias_amplitude ()
183
+ bias_amplitude = self .bias_amplitude
236
184
if bias_amplitude is not None :
237
185
bias_amplitude = tf .convert_to_tensor (bias_amplitude )
238
186
bias_amplitude = util .pad_shape_with_ones (bias_amplitude , example_ndims )
@@ -247,8 +195,8 @@ def _parameter_control_dependencies(self, is_init):
247
195
ok_to_check = lambda x : ( # pylint:disable=g-long-lambda
248
196
x is not None ) and (is_init != tensor_util .is_ref (x ))
249
197
250
- bias_amplitude = self ._get_bias_amplitude ()
251
- slope_amplitude = self ._get_slope_amplitude ()
198
+ bias_amplitude = self .bias_amplitude
199
+ slope_amplitude = self .slope_amplitude
252
200
253
201
if ok_to_check (self .exponent ):
254
202
exponent = tf .convert_to_tensor (self .exponent )
@@ -296,14 +244,7 @@ class Linear(Polynomial):
296
244
```
297
245
"""
298
246
299
- @deprecation .deprecated_args (
300
- '2022-11-01' ,
301
- '`bias_variance` and `slope_variance` are deprecated. Please use '
302
- '`bias_amplitude` and `slope_amplitude` instead.' ,
303
- 'bias_variance' , 'slope_variance' )
304
247
def __init__ (self ,
305
- bias_variance = None ,
306
- slope_variance = None ,
307
248
bias_amplitude = None ,
308
249
slope_amplitude = None ,
309
250
shift = None ,
@@ -314,17 +255,6 @@ def __init__(self,
314
255
"""Construct a Linear kernel instance.
315
256
316
257
Args:
317
- bias_variance: Positive floating point `Tensor` that controls the variance
318
- from the origin. If bias = 0, there is no variance and the fitted
319
- function goes through the origin (also known as the homogeneous linear
320
- kernel). Must be broadcastable with `slope_variance`, `shift` and inputs
321
- to `apply` and `matrix` methods. A value of `None` is treated like 0.
322
- Default Value: `None`
323
- slope_variance: Positive floating point `Tensor` that controls the
324
- variance of the regression line slope. Must be broadcastable with
325
- `bias_variance`, `shift`, and inputs to `apply` and `matrix` methods. A
326
- value of `None` is treated like 1.
327
- Default Value: `None`
328
258
bias_amplitude: Non-negative floating point `Tensor` that controls the
329
259
stddev from the origin. If bias = 0, there is no stddev and the
330
260
fitted function goes through the origin. Must be broadcastable with
@@ -354,8 +284,6 @@ def __init__(self,
354
284
"""
355
285
parameters = dict (locals ()) if parameters is None else parameters
356
286
super (Linear , self ).__init__ (
357
- bias_variance = bias_variance ,
358
- slope_variance = slope_variance ,
359
287
bias_amplitude = bias_amplitude ,
360
288
slope_amplitude = slope_amplitude ,
361
289
shift = shift ,
@@ -372,15 +300,9 @@ def _parameter_properties(cls, dtype):
372
300
bias_amplitude = parameter_properties .ParameterProperties (
373
301
default_constraining_bijector_fn = (
374
302
lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
375
- bias_variance = parameter_properties .ParameterProperties (
376
- default_constraining_bijector_fn = (
377
- lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
378
303
slope_amplitude = parameter_properties .ParameterProperties (
379
304
default_constraining_bijector_fn = (
380
305
lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
381
- slope_variance = parameter_properties .ParameterProperties (
382
- default_constraining_bijector_fn = (
383
- lambda : softplus .Softplus (low = dtype_util .eps (dtype )))),
384
306
shift = parameter_properties .ParameterProperties ())
385
307
386
308
0 commit comments