@@ -3102,6 +3102,21 @@ def logp(value, mu, kappa):
3102
3102
)
3103
3103
3104
3104
3105
+ class SkewNormalRV (RandomVariable ):
3106
+ name = "skewnormal"
3107
+ ndim_supp = 0
3108
+ ndims_params = [0 , 0 , 0 ]
3109
+ dtype = "floatX"
3110
+ _print_name = ("SkewNormal" , "\\ operatorname{SkewNormal}" )
3111
+
3112
+ @classmethod
3113
+ def rng_fn (cls , rng , mu , sigma , alpha , size = None ):
3114
+ return stats .skewnorm .rvs (a = alpha , loc = mu , scale = sigma , size = size , random_state = rng )
3115
+
3116
+
3117
+ skewnormal = SkewNormalRV ()
3118
+
3119
+
3105
3120
class SkewNormal (Continuous ):
3106
3121
r"""
3107
3122
Univariate skew-normal log-likelihood.
@@ -3160,51 +3175,25 @@ class SkewNormal(Continuous):
3160
3175
approaching plus/minus infinite we get a half-normal distribution.
3161
3176
3162
3177
"""
3178
+ rv_op = skewnormal
3163
3179
3164
- def __init__ (self , mu = 0.0 , sigma = None , tau = None , alpha = 1 , sd = None , * args , ** kwargs ):
3165
- super ().__init__ (* args , ** kwargs )
3166
-
3180
+ @classmethod
3181
+ def dist (cls , alpha = 1 , mu = 0.0 , sigma = None , tau = None , sd = None , * args , ** kwargs ):
3167
3182
if sd is not None :
3168
3183
sigma = sd
3169
3184
3170
3185
tau , sigma = get_tau_sigma (tau = tau , sigma = sigma )
3171
- self .mu = mu = at .as_tensor_variable (floatX (mu ))
3172
- self .tau = at .as_tensor_variable (tau )
3173
- self .sigma = self .sd = at .as_tensor_variable (sigma )
3174
-
3175
- self .alpha = alpha = at .as_tensor_variable (floatX (alpha ))
3176
-
3177
- self .mean = mu + self .sigma * (2 / np .pi ) ** 0.5 * alpha / (1 + alpha ** 2 ) ** 0.5
3178
- self .variance = self .sigma ** 2 * (1 - (2 * alpha ** 2 ) / ((1 + alpha ** 2 ) * np .pi ))
3186
+ alpha = at .as_tensor_variable (floatX (alpha ))
3187
+ mu = at .as_tensor_variable (floatX (mu ))
3188
+ tau = at .as_tensor_variable (tau )
3189
+ sigma = at .as_tensor_variable (sigma )
3179
3190
3180
3191
assert_negative_support (tau , "tau" , "SkewNormal" )
3181
3192
assert_negative_support (sigma , "sigma" , "SkewNormal" )
3182
3193
3183
- def random (self , point = None , size = None ):
3184
- """
3185
- Draw random values from SkewNormal distribution.
3186
-
3187
- Parameters
3188
- ----------
3189
- point: dict, optional
3190
- Dict of variable values on which random values are to be
3191
- conditioned (uses default point if not specified).
3192
- size: int, optional
3193
- Desired size of random sample (returns one sample if not
3194
- specified).
3195
-
3196
- Returns
3197
- -------
3198
- array
3199
- """
3200
- # mu, tau, _, alpha = draw_values(
3201
- # [self.mu, self.tau, self.sigma, self.alpha], point=point, size=size
3202
- # )
3203
- # return generate_samples(
3204
- # stats.skewnorm.rvs, a=alpha, loc=mu, scale=tau ** -0.5, dist_shape=self.shape, size=size
3205
- # )
3194
+ return super ().dist ([mu , sigma , alpha ], * args , ** kwargs )
3206
3195
3207
- def logp (self , value ):
3196
+ def logp (value , mu , sigma , alpha ):
3208
3197
"""
3209
3198
Calculate log-probability of SkewNormal distribution at specified value.
3210
3199
@@ -3218,20 +3207,14 @@ def logp(self, value):
3218
3207
-------
3219
3208
TensorVariable
3220
3209
"""
3221
- tau = self .tau
3222
- sigma = self .sigma
3223
- mu = self .mu
3224
- alpha = self .alpha
3210
+ tau , sigma = get_tau_sigma (sigma = sigma )
3225
3211
return bound (
3226
3212
at .log (1 + at .erf (((value - mu ) * at .sqrt (tau ) * alpha ) / at .sqrt (2 )))
3227
3213
+ (- tau * (value - mu ) ** 2 + at .log (tau / np .pi / 2.0 )) / 2.0 ,
3228
3214
tau > 0 ,
3229
3215
sigma > 0 ,
3230
3216
)
3231
3217
3232
- def _distr_parameters_for_repr (self ):
3233
- return ["mu" , "sigma" , "alpha" ]
3234
-
3235
3218
3236
3219
class Triangular (BoundedContinuous ):
3237
3220
r"""
@@ -3474,6 +3457,21 @@ def logcdf(
3474
3457
)
3475
3458
3476
3459
3460
+ class RiceRV (RandomVariable ):
3461
+ name = "rice"
3462
+ ndim_supp = 0
3463
+ ndims_params = [0 , 0 ]
3464
+ dtype = "floatX"
3465
+ _print_name = ("Rice" , "\\ operatorname{Rice}" )
3466
+
3467
+ @classmethod
3468
+ def rng_fn (cls , rng , b , sigma , size = None ):
3469
+ return stats .rice .rvs (b = b , scale = sigma , size = size , random_state = rng )
3470
+
3471
+
3472
+ rice = RiceRV ()
3473
+
3474
+
3477
3475
class Rice (PositiveContinuous ):
3478
3476
r"""
3479
3477
Rice distribution.
@@ -3533,42 +3531,21 @@ class Rice(PositiveContinuous):
3533
3531
b = \dfrac{\nu}{\sigma}
3534
3532
3535
3533
"""
3534
+ rv_op = rice
3536
3535
3537
- def __init__ ( self , nu = None , sigma = None , b = None , sd = None , * args , ** kwargs ):
3538
- super (). __init__ ( * args , ** kwargs )
3536
+ @ classmethod
3537
+ def dist ( cls , nu = None , sigma = None , b = None , sd = None , * args , ** kwargs ):
3539
3538
if sd is not None :
3540
3539
sigma = sd
3541
3540
3542
- nu , b , sigma = self .get_nu_b (nu , b , sigma )
3543
- self .nu = nu = at .as_tensor_variable (floatX (nu ))
3544
- self .sigma = self .sd = sigma = at .as_tensor_variable (floatX (sigma ))
3545
- self .b = b = at .as_tensor_variable (floatX (b ))
3546
-
3547
- nu_sigma_ratio = - (nu ** 2 ) / (2 * sigma ** 2 )
3548
- self .mean = (
3549
- sigma
3550
- * np .sqrt (np .pi / 2 )
3551
- * at .exp (nu_sigma_ratio / 2 )
3552
- * (
3553
- (1 - nu_sigma_ratio ) * at .i0 (- nu_sigma_ratio / 2 )
3554
- - nu_sigma_ratio * at .i1 (- nu_sigma_ratio / 2 )
3555
- )
3556
- )
3557
- self .variance = (
3558
- 2 * sigma ** 2
3559
- + nu ** 2
3560
- - (np .pi * sigma ** 2 / 2 )
3561
- * (
3562
- at .exp (nu_sigma_ratio / 2 )
3563
- * (
3564
- (1 - nu_sigma_ratio ) * at .i0 (- nu_sigma_ratio / 2 )
3565
- - nu_sigma_ratio * at .i1 (- nu_sigma_ratio / 2 )
3566
- )
3567
- )
3568
- ** 2
3569
- )
3541
+ nu , b , sigma = cls .get_nu_b (nu , b , sigma )
3542
+ b = at .as_tensor_variable (floatX (b ))
3543
+ sigma = at .as_tensor_variable (floatX (sigma ))
3570
3544
3571
- def get_nu_b (self , nu , b , sigma ):
3545
+ return super ().dist ([b , sigma ], * args , ** kwargs )
3546
+
3547
+ @classmethod
3548
+ def get_nu_b (cls , nu , b , sigma ):
3572
3549
if sigma is None :
3573
3550
sigma = 1.0
3574
3551
if nu is None and b is not None :
@@ -3579,35 +3556,7 @@ def get_nu_b(self, nu, b, sigma):
3579
3556
return nu , b , sigma
3580
3557
raise ValueError ("Rice distribution must specify either nu" " or b." )
3581
3558
3582
- def random (self , point = None , size = None ):
3583
- """
3584
- Draw random values from Rice distribution.
3585
-
3586
- Parameters
3587
- ----------
3588
- point: dict, optional
3589
- Dict of variable values on which random values are to be
3590
- conditioned (uses default point if not specified).
3591
- size: int, optional
3592
- Desired size of random sample (returns one sample if not
3593
- specified).
3594
-
3595
- Returns
3596
- -------
3597
- array
3598
- """
3599
- # nu, sigma = draw_values([self.nu, self.sigma], point=point, size=size)
3600
- # return generate_samples(self._random, nu=nu, sigma=sigma, dist_shape=self.shape, size=size)
3601
-
3602
- def _random (self , nu , sigma , size ):
3603
- """Wrapper around stats.rice.rvs that converts Rice's
3604
- parametrization to scipy.rice. All parameter arrays should have
3605
- been broadcasted properly by generate_samples at this point and size is
3606
- the scipy.rvs representation.
3607
- """
3608
- return stats .rice .rvs (b = nu / sigma , scale = sigma , size = size )
3609
-
3610
- def logp (self , value ):
3559
+ def logp (value , b , sigma ):
3611
3560
"""
3612
3561
Calculate log-probability of Rice distribution at specified value.
3613
3562
@@ -3621,20 +3570,13 @@ def logp(self, value):
3621
3570
-------
3622
3571
TensorVariable
3623
3572
"""
3624
- nu = self .nu
3625
- sigma = self .sigma
3626
- b = self .b
3627
3573
x = value / sigma
3628
3574
return bound (
3629
3575
at .log (x * at .exp ((- (x - b ) * (x - b )) / 2 ) * i0e (x * b ) / sigma ),
3630
3576
sigma >= 0 ,
3631
- nu >= 0 ,
3632
3577
value > 0 ,
3633
3578
)
3634
3579
3635
- def _distr_parameters_for_repr (self ):
3636
- return ["nu" , "sigma" ]
3637
-
3638
3580
3639
3581
class Logistic (Continuous ):
3640
3582
r"""
0 commit comments