265
265
#include <linux/syscalls.h>
266
266
#include <linux/completion.h>
267
267
#include <linux/uuid.h>
268
+ #include <linux/locallock.h>
268
269
#include <crypto/chacha20.h>
269
270
270
271
#include <asm/processor.h>
@@ -2188,6 +2189,7 @@ static rwlock_t batched_entropy_reset_lock = __RW_LOCK_UNLOCKED(batched_entropy_
2188
2189
* at any point prior.
2189
2190
*/
2190
2191
static DEFINE_PER_CPU (struct batched_entropy , batched_entropy_u64 ) ;
2192
+ static DEFINE_LOCAL_IRQ_LOCK (batched_entropy_u64_lock );
2191
2193
u64 get_random_u64 (void )
2192
2194
{
2193
2195
u64 ret ;
@@ -2208,7 +2210,7 @@ u64 get_random_u64(void)
2208
2210
warn_unseeded_randomness (& previous );
2209
2211
2210
2212
use_lock = READ_ONCE (crng_init ) < 2 ;
2211
- batch = & get_cpu_var ( batched_entropy_u64 );
2213
+ batch = & get_locked_var ( batched_entropy_u64_lock , batched_entropy_u64 );
2212
2214
if (use_lock )
2213
2215
read_lock_irqsave (& batched_entropy_reset_lock , flags );
2214
2216
if (batch -> position % ARRAY_SIZE (batch -> entropy_u64 ) == 0 ) {
@@ -2218,12 +2220,13 @@ u64 get_random_u64(void)
2218
2220
ret = batch -> entropy_u64 [batch -> position ++ ];
2219
2221
if (use_lock )
2220
2222
read_unlock_irqrestore (& batched_entropy_reset_lock , flags );
2221
- put_cpu_var ( batched_entropy_u64 );
2223
+ put_locked_var ( batched_entropy_u64_lock , batched_entropy_u64 );
2222
2224
return ret ;
2223
2225
}
2224
2226
EXPORT_SYMBOL (get_random_u64 );
2225
2227
2226
2228
static DEFINE_PER_CPU (struct batched_entropy , batched_entropy_u32 ) ;
2229
+ static DEFINE_LOCAL_IRQ_LOCK (batched_entropy_u32_lock );
2227
2230
u32 get_random_u32 (void )
2228
2231
{
2229
2232
u32 ret ;
@@ -2238,7 +2241,7 @@ u32 get_random_u32(void)
2238
2241
warn_unseeded_randomness (& previous );
2239
2242
2240
2243
use_lock = READ_ONCE (crng_init ) < 2 ;
2241
- batch = & get_cpu_var ( batched_entropy_u32 );
2244
+ batch = & get_locked_var ( batched_entropy_u32_lock , batched_entropy_u32 );
2242
2245
if (use_lock )
2243
2246
read_lock_irqsave (& batched_entropy_reset_lock , flags );
2244
2247
if (batch -> position % ARRAY_SIZE (batch -> entropy_u32 ) == 0 ) {
@@ -2248,7 +2251,7 @@ u32 get_random_u32(void)
2248
2251
ret = batch -> entropy_u32 [batch -> position ++ ];
2249
2252
if (use_lock )
2250
2253
read_unlock_irqrestore (& batched_entropy_reset_lock , flags );
2251
- put_cpu_var ( batched_entropy_u32 );
2254
+ put_locked_var ( batched_entropy_u32_lock , batched_entropy_u32 );
2252
2255
return ret ;
2253
2256
}
2254
2257
EXPORT_SYMBOL (get_random_u32 );
0 commit comments