265
265
#include <linux/syscalls.h>
266
266
#include <linux/completion.h>
267
267
#include <linux/uuid.h>
268
+ #include <linux/locallock.h>
268
269
#include <crypto/chacha20.h>
269
270
270
271
#include <asm/processor.h>
@@ -2196,6 +2197,7 @@ static rwlock_t batched_entropy_reset_lock = __RW_LOCK_UNLOCKED(batched_entropy_
2196
2197
* at any point prior.
2197
2198
*/
2198
2199
static DEFINE_PER_CPU (struct batched_entropy , batched_entropy_u64 ) ;
2200
+ static DEFINE_LOCAL_IRQ_LOCK (batched_entropy_u64_lock );
2199
2201
u64 get_random_u64 (void )
2200
2202
{
2201
2203
u64 ret ;
@@ -2216,7 +2218,7 @@ u64 get_random_u64(void)
2216
2218
warn_unseeded_randomness (& previous );
2217
2219
2218
2220
use_lock = READ_ONCE (crng_init ) < 2 ;
2219
- batch = & get_cpu_var ( batched_entropy_u64 );
2221
+ batch = & get_locked_var ( batched_entropy_u64_lock , batched_entropy_u64 );
2220
2222
if (use_lock )
2221
2223
read_lock_irqsave (& batched_entropy_reset_lock , flags );
2222
2224
if (batch -> position % ARRAY_SIZE (batch -> entropy_u64 ) == 0 ) {
@@ -2226,12 +2228,13 @@ u64 get_random_u64(void)
2226
2228
ret = batch -> entropy_u64 [batch -> position ++ ];
2227
2229
if (use_lock )
2228
2230
read_unlock_irqrestore (& batched_entropy_reset_lock , flags );
2229
- put_cpu_var ( batched_entropy_u64 );
2231
+ put_locked_var ( batched_entropy_u64_lock , batched_entropy_u64 );
2230
2232
return ret ;
2231
2233
}
2232
2234
EXPORT_SYMBOL (get_random_u64 );
2233
2235
2234
2236
static DEFINE_PER_CPU (struct batched_entropy , batched_entropy_u32 ) ;
2237
+ static DEFINE_LOCAL_IRQ_LOCK (batched_entropy_u32_lock );
2235
2238
u32 get_random_u32 (void )
2236
2239
{
2237
2240
u32 ret ;
@@ -2246,7 +2249,7 @@ u32 get_random_u32(void)
2246
2249
warn_unseeded_randomness (& previous );
2247
2250
2248
2251
use_lock = READ_ONCE (crng_init ) < 2 ;
2249
- batch = & get_cpu_var ( batched_entropy_u32 );
2252
+ batch = & get_locked_var ( batched_entropy_u32_lock , batched_entropy_u32 );
2250
2253
if (use_lock )
2251
2254
read_lock_irqsave (& batched_entropy_reset_lock , flags );
2252
2255
if (batch -> position % ARRAY_SIZE (batch -> entropy_u32 ) == 0 ) {
@@ -2256,7 +2259,7 @@ u32 get_random_u32(void)
2256
2259
ret = batch -> entropy_u32 [batch -> position ++ ];
2257
2260
if (use_lock )
2258
2261
read_unlock_irqrestore (& batched_entropy_reset_lock , flags );
2259
- put_cpu_var ( batched_entropy_u32 );
2262
+ put_locked_var ( batched_entropy_u32_lock , batched_entropy_u32 );
2260
2263
return ret ;
2261
2264
}
2262
2265
EXPORT_SYMBOL (get_random_u32 );
0 commit comments