1use crate::impls::{fill_via_u32_chunks, fill_via_u64_chunks};
57use crate::{CryptoRng, Error, RngCore, SeedableRng};
58use core::convert::AsRef;
59use core::fmt;
60#[cfg(feature = "serde1")]
61use serde::{Deserialize, Serialize};
62
63pub trait BlockRngCore {
69 type Item;
71
72 type Results: AsRef<[Self::Item]> + AsMut<[Self::Item]> + Default;
75
76 fn generate(&mut self, results: &mut Self::Results);
78}
79
80#[derive(Clone)]
116#[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))]
117#[cfg_attr(
118 feature = "serde1",
119 serde(
120 bound = "for<'x> R: Serialize + Deserialize<'x> + Sized, for<'x> R::Results: Serialize + Deserialize<'x>"
121 )
122)]
123pub struct BlockRng<R: BlockRngCore + ?Sized> {
124 results: R::Results,
125 index: usize,
126 pub core: R,
128}
129
130impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng<R> {
132 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
133 fmt.debug_struct("BlockRng")
134 .field("core", &self.core)
135 .field("result_len", &self.results.as_ref().len())
136 .field("index", &self.index)
137 .finish()
138 }
139}
140
141impl<R: BlockRngCore> BlockRng<R> {
142 #[inline]
145 pub fn new(core: R) -> BlockRng<R> {
146 let results_empty = R::Results::default();
147 BlockRng {
148 core,
149 index: results_empty.as_ref().len(),
150 results: results_empty,
151 }
152 }
153
154 #[inline(always)]
160 pub fn index(&self) -> usize {
161 self.index
162 }
163
164 #[inline]
167 pub fn reset(&mut self) {
168 self.index = self.results.as_ref().len();
169 }
170
171 #[inline]
174 pub fn generate_and_set(&mut self, index: usize) {
175 assert!(index < self.results.as_ref().len());
176 self.core.generate(&mut self.results);
177 self.index = index;
178 }
179}
180
181impl<R: BlockRngCore<Item = u32>> RngCore for BlockRng<R>
182where
183 <R as BlockRngCore>::Results: AsRef<[u32]> + AsMut<[u32]>,
184{
185 #[inline]
186 fn next_u32(&mut self) -> u32 {
187 if self.index >= self.results.as_ref().len() {
188 self.generate_and_set(0);
189 }
190
191 let value = self.results.as_ref()[self.index];
192 self.index += 1;
193 value
194 }
195
196 #[inline]
197 fn next_u64(&mut self) -> u64 {
198 let read_u64 = |results: &[u32], index| {
199 let data = &results[index..=index + 1];
200 u64::from(data[1]) << 32 | u64::from(data[0])
201 };
202
203 let len = self.results.as_ref().len();
204
205 let index = self.index;
206 if index < len - 1 {
207 self.index += 2;
208 read_u64(self.results.as_ref(), index)
210 } else if index >= len {
211 self.generate_and_set(2);
212 read_u64(self.results.as_ref(), 0)
213 } else {
214 let x = u64::from(self.results.as_ref()[len - 1]);
215 self.generate_and_set(1);
216 let y = u64::from(self.results.as_ref()[0]);
217 (y << 32) | x
218 }
219 }
220
221 #[inline]
222 fn fill_bytes(&mut self, dest: &mut [u8]) {
223 let mut read_len = 0;
224 while read_len < dest.len() {
225 if self.index >= self.results.as_ref().len() {
226 self.generate_and_set(0);
227 }
228 let (consumed_u32, filled_u8) =
229 fill_via_u32_chunks(&self.results.as_ref()[self.index..], &mut dest[read_len..]);
230
231 self.index += consumed_u32;
232 read_len += filled_u8;
233 }
234 }
235
236 #[inline(always)]
237 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {
238 self.fill_bytes(dest);
239 Ok(())
240 }
241}
242
243impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng<R> {
244 type Seed = R::Seed;
245
246 #[inline(always)]
247 fn from_seed(seed: Self::Seed) -> Self {
248 Self::new(R::from_seed(seed))
249 }
250
251 #[inline(always)]
252 fn seed_from_u64(seed: u64) -> Self {
253 Self::new(R::seed_from_u64(seed))
254 }
255
256 #[inline(always)]
257 fn from_rng<S: RngCore>(rng: S) -> Result<Self, Error> {
258 Ok(Self::new(R::from_rng(rng)?))
259 }
260}
261
262#[derive(Clone)]
285#[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))]
286pub struct BlockRng64<R: BlockRngCore + ?Sized> {
287 results: R::Results,
288 index: usize,
289 half_used: bool, pub core: R,
292}
293
294impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng64<R> {
296 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
297 fmt.debug_struct("BlockRng64")
298 .field("core", &self.core)
299 .field("result_len", &self.results.as_ref().len())
300 .field("index", &self.index)
301 .field("half_used", &self.half_used)
302 .finish()
303 }
304}
305
306impl<R: BlockRngCore> BlockRng64<R> {
307 #[inline]
310 pub fn new(core: R) -> BlockRng64<R> {
311 let results_empty = R::Results::default();
312 BlockRng64 {
313 core,
314 index: results_empty.as_ref().len(),
315 half_used: false,
316 results: results_empty,
317 }
318 }
319
320 #[inline(always)]
326 pub fn index(&self) -> usize {
327 self.index
328 }
329
330 #[inline]
333 pub fn reset(&mut self) {
334 self.index = self.results.as_ref().len();
335 self.half_used = false;
336 }
337
338 #[inline]
341 pub fn generate_and_set(&mut self, index: usize) {
342 assert!(index < self.results.as_ref().len());
343 self.core.generate(&mut self.results);
344 self.index = index;
345 self.half_used = false;
346 }
347}
348
349impl<R: BlockRngCore<Item = u64>> RngCore for BlockRng64<R>
350where
351 <R as BlockRngCore>::Results: AsRef<[u64]> + AsMut<[u64]>,
352{
353 #[inline]
354 fn next_u32(&mut self) -> u32 {
355 let mut index = self.index - self.half_used as usize;
356 if index >= self.results.as_ref().len() {
357 self.core.generate(&mut self.results);
358 self.index = 0;
359 index = 0;
360 self.half_used = false;
362 }
363
364 let shift = 32 * (self.half_used as usize);
365
366 self.half_used = !self.half_used;
367 self.index += self.half_used as usize;
368
369 (self.results.as_ref()[index] >> shift) as u32
370 }
371
372 #[inline]
373 fn next_u64(&mut self) -> u64 {
374 if self.index >= self.results.as_ref().len() {
375 self.core.generate(&mut self.results);
376 self.index = 0;
377 }
378
379 let value = self.results.as_ref()[self.index];
380 self.index += 1;
381 self.half_used = false;
382 value
383 }
384
385 #[inline]
386 fn fill_bytes(&mut self, dest: &mut [u8]) {
387 let mut read_len = 0;
388 self.half_used = false;
389 while read_len < dest.len() {
390 if self.index as usize >= self.results.as_ref().len() {
391 self.core.generate(&mut self.results);
392 self.index = 0;
393 }
394
395 let (consumed_u64, filled_u8) = fill_via_u64_chunks(
396 &self.results.as_ref()[self.index as usize..],
397 &mut dest[read_len..],
398 );
399
400 self.index += consumed_u64;
401 read_len += filled_u8;
402 }
403 }
404
405 #[inline(always)]
406 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {
407 self.fill_bytes(dest);
408 Ok(())
409 }
410}
411
412impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng64<R> {
413 type Seed = R::Seed;
414
415 #[inline(always)]
416 fn from_seed(seed: Self::Seed) -> Self {
417 Self::new(R::from_seed(seed))
418 }
419
420 #[inline(always)]
421 fn seed_from_u64(seed: u64) -> Self {
422 Self::new(R::seed_from_u64(seed))
423 }
424
425 #[inline(always)]
426 fn from_rng<S: RngCore>(rng: S) -> Result<Self, Error> {
427 Ok(Self::new(R::from_rng(rng)?))
428 }
429}
430
431impl<R: BlockRngCore + CryptoRng> CryptoRng for BlockRng<R> {}
432
433#[cfg(test)]
434mod test {
435 use crate::{SeedableRng, RngCore};
436 use crate::block::{BlockRng, BlockRng64, BlockRngCore};
437
438 #[derive(Debug, Clone)]
439 struct DummyRng {
440 counter: u32,
441 }
442
443 impl BlockRngCore for DummyRng {
444 type Item = u32;
445
446 type Results = [u32; 16];
447
448 fn generate(&mut self, results: &mut Self::Results) {
449 for r in results {
450 *r = self.counter;
451 self.counter = self.counter.wrapping_add(3511615421);
452 }
453 }
454 }
455
456 impl SeedableRng for DummyRng {
457 type Seed = [u8; 4];
458
459 fn from_seed(seed: Self::Seed) -> Self {
460 DummyRng { counter: u32::from_le_bytes(seed) }
461 }
462 }
463
464 #[test]
465 fn blockrng_next_u32_vs_next_u64() {
466 let mut rng1 = BlockRng::<DummyRng>::from_seed([1, 2, 3, 4]);
467 let mut rng2 = rng1.clone();
468 let mut rng3 = rng1.clone();
469
470 let mut a = [0; 16];
471 (&mut a[..4]).copy_from_slice(&rng1.next_u32().to_le_bytes());
472 (&mut a[4..12]).copy_from_slice(&rng1.next_u64().to_le_bytes());
473 (&mut a[12..]).copy_from_slice(&rng1.next_u32().to_le_bytes());
474
475 let mut b = [0; 16];
476 (&mut b[..4]).copy_from_slice(&rng2.next_u32().to_le_bytes());
477 (&mut b[4..8]).copy_from_slice(&rng2.next_u32().to_le_bytes());
478 (&mut b[8..]).copy_from_slice(&rng2.next_u64().to_le_bytes());
479 assert_eq!(a, b);
480
481 let mut c = [0; 16];
482 (&mut c[..8]).copy_from_slice(&rng3.next_u64().to_le_bytes());
483 (&mut c[8..12]).copy_from_slice(&rng3.next_u32().to_le_bytes());
484 (&mut c[12..]).copy_from_slice(&rng3.next_u32().to_le_bytes());
485 assert_eq!(a, c);
486 }
487
488 #[derive(Debug, Clone)]
489 struct DummyRng64 {
490 counter: u64,
491 }
492
493 impl BlockRngCore for DummyRng64 {
494 type Item = u64;
495
496 type Results = [u64; 8];
497
498 fn generate(&mut self, results: &mut Self::Results) {
499 for r in results {
500 *r = self.counter;
501 self.counter = self.counter.wrapping_add(2781463553396133981);
502 }
503 }
504 }
505
506 impl SeedableRng for DummyRng64 {
507 type Seed = [u8; 8];
508
509 fn from_seed(seed: Self::Seed) -> Self {
510 DummyRng64 { counter: u64::from_le_bytes(seed) }
511 }
512 }
513
514 #[test]
515 fn blockrng64_next_u32_vs_next_u64() {
516 let mut rng1 = BlockRng64::<DummyRng64>::from_seed([1, 2, 3, 4, 5, 6, 7, 8]);
517 let mut rng2 = rng1.clone();
518 let mut rng3 = rng1.clone();
519
520 let mut a = [0; 16];
521 (&mut a[..4]).copy_from_slice(&rng1.next_u32().to_le_bytes());
522 (&mut a[4..12]).copy_from_slice(&rng1.next_u64().to_le_bytes());
523 (&mut a[12..]).copy_from_slice(&rng1.next_u32().to_le_bytes());
524
525 let mut b = [0; 16];
526 (&mut b[..4]).copy_from_slice(&rng2.next_u32().to_le_bytes());
527 (&mut b[4..8]).copy_from_slice(&rng2.next_u32().to_le_bytes());
528 (&mut b[8..]).copy_from_slice(&rng2.next_u64().to_le_bytes());
529 assert_ne!(a, b);
530 assert_eq!(&a[..4], &b[..4]);
531 assert_eq!(&a[4..12], &b[8..]);
532
533 let mut c = [0; 16];
534 (&mut c[..8]).copy_from_slice(&rng3.next_u64().to_le_bytes());
535 (&mut c[8..12]).copy_from_slice(&rng3.next_u32().to_le_bytes());
536 (&mut c[12..]).copy_from_slice(&rng3.next_u32().to_le_bytes());
537 assert_eq!(b, c);
538 }
539}