1// Copyright 2018 Developers of the Rand project.
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
89//! The `BlockRngCore` trait and implementation helpers
10//!
11//! The [`BlockRngCore`] trait exists to assist in the implementation of RNGs
12//! which generate a block of data in a cache instead of returning generated
13//! values directly.
14//!
15//! Usage of this trait is optional, but provides two advantages:
16//! implementations only need to concern themselves with generation of the
17//! block, not the various [`RngCore`] methods (especially [`fill_bytes`], where
18//! the optimal implementations are not trivial), and this allows
19//! `ReseedingRng` (see [`rand`](https://docs.rs/rand) crate) perform periodic
20//! reseeding with very low overhead.
21//!
22//! # Example
23//!
24//! ```no_run
25//! use rand_core::{RngCore, SeedableRng};
26//! use rand_core::block::{BlockRngCore, BlockRng};
27//!
28//! struct MyRngCore;
29//!
30//! impl BlockRngCore for MyRngCore {
31//! type Item = u32;
32//! type Results = [u32; 16];
33//!
34//! fn generate(&mut self, results: &mut Self::Results) {
35//! unimplemented!()
36//! }
37//! }
38//!
39//! impl SeedableRng for MyRngCore {
40//! type Seed = [u8; 32];
41//! fn from_seed(seed: Self::Seed) -> Self {
42//! unimplemented!()
43//! }
44//! }
45//!
46//! // optionally, also implement CryptoBlockRng for MyRngCore
47//!
48//! // Final RNG.
49//! let mut rng = BlockRng::<MyRngCore>::seed_from_u64(0);
50//! println!("First value: {}", rng.next_u32());
51//! ```
52//!
53//! [`BlockRngCore`]: crate::block::BlockRngCore
54//! [`fill_bytes`]: RngCore::fill_bytes
5556use crate::impls::fill_via_chunks;
57use crate::{CryptoRng, RngCore, SeedableRng, TryRngCore};
58use core::fmt;
59#[cfg(feature = "serde")]
60use serde::{Deserialize, Serialize};
6162/// A trait for RNGs which do not generate random numbers individually, but in
63/// blocks (typically `[u32; N]`). This technique is commonly used by
64/// cryptographic RNGs to improve performance.
65///
66/// See the [module][crate::block] documentation for details.
67pub trait BlockRngCore {
68/// Results element type, e.g. `u32`.
69type Item;
7071/// Results type. This is the 'block' an RNG implementing `BlockRngCore`
72 /// generates, which will usually be an array like `[u32; 16]`.
73type Results: AsRef<[Self::Item]> + AsMut<[Self::Item]> + Default;
7475/// Generate a new block of results.
76fn generate(&mut self, results: &mut Self::Results);
77}
7879/// A marker trait used to indicate that an [`RngCore`] implementation is
80/// supposed to be cryptographically secure.
81///
82/// See [`CryptoRng`] docs for more information.
83pub trait CryptoBlockRng: BlockRngCore {}
8485/// A wrapper type implementing [`RngCore`] for some type implementing
86/// [`BlockRngCore`] with `u32` array buffer; i.e. this can be used to implement
87/// a full RNG from just a `generate` function.
88///
89/// The `core` field may be accessed directly but the results buffer may not.
90/// PRNG implementations can simply use a type alias
91/// (`pub type MyRng = BlockRng<MyRngCore>;`) but might prefer to use a
92/// wrapper type (`pub struct MyRng(BlockRng<MyRngCore>);`); the latter must
93/// re-implement `RngCore` but hides the implementation details and allows
94/// extra functionality to be defined on the RNG
95/// (e.g. `impl MyRng { fn set_stream(...){...} }`).
96///
97/// `BlockRng` has heavily optimized implementations of the [`RngCore`] methods
98/// reading values from the results buffer, as well as
99/// calling [`BlockRngCore::generate`] directly on the output array when
100/// [`fill_bytes`] is called on a large array. These methods also handle
101/// the bookkeeping of when to generate a new batch of values.
102///
103/// No whole generated `u32` values are thrown away and all values are consumed
104/// in-order. [`next_u32`] simply takes the next available `u32` value.
105/// [`next_u64`] is implemented by combining two `u32` values, least
106/// significant first. [`fill_bytes`] consume a whole number of `u32` values,
107/// converting each `u32` to a byte slice in little-endian order. If the requested byte
108/// length is not a multiple of 4, some bytes will be discarded.
109///
110/// See also [`BlockRng64`] which uses `u64` array buffers. Currently there is
111/// no direct support for other buffer types.
112///
113/// For easy initialization `BlockRng` also implements [`SeedableRng`].
114///
115/// [`next_u32`]: RngCore::next_u32
116/// [`next_u64`]: RngCore::next_u64
117/// [`fill_bytes`]: RngCore::fill_bytes
118#[derive(Clone)]
119#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
120#[cfg_attr(
121 feature = "serde",
122 serde(
123 bound = "for<'x> R: Serialize + Deserialize<'x>, for<'x> R::Results: Serialize + Deserialize<'x>"
124)
125)]
126pub struct BlockRng<R: BlockRngCore> {
127 results: R::Results,
128 index: usize,
129/// The *core* part of the RNG, implementing the `generate` function.
130pub core: R,
131}
132133// Custom Debug implementation that does not expose the contents of `results`.
134impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng<R> {
135fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
136 fmt.debug_struct("BlockRng")
137 .field("core", &self.core)
138 .field("result_len", &self.results.as_ref().len())
139 .field("index", &self.index)
140 .finish()
141 }
142}
143144impl<R: BlockRngCore> BlockRng<R> {
145/// Create a new `BlockRng` from an existing RNG implementing
146 /// `BlockRngCore`. Results will be generated on first use.
147#[inline]
148pub fn new(core: R) -> BlockRng<R> {
149let results_empty = R::Results::default();
150 BlockRng {
151 core,
152 index: results_empty.as_ref().len(),
153 results: results_empty,
154 }
155 }
156157/// Get the index into the result buffer.
158 ///
159 /// If this is equal to or larger than the size of the result buffer then
160 /// the buffer is "empty" and `generate()` must be called to produce new
161 /// results.
162#[inline(always)]
163pub fn index(&self) -> usize {
164self.index
165 }
166167/// Reset the number of available results.
168 /// This will force a new set of results to be generated on next use.
169#[inline]
170pub fn reset(&mut self) {
171self.index = self.results.as_ref().len();
172 }
173174/// Generate a new set of results immediately, setting the index to the
175 /// given value.
176#[inline]
177pub fn generate_and_set(&mut self, index: usize) {
178assert!(index < self.results.as_ref().len());
179self.core.generate(&mut self.results);
180self.index = index;
181 }
182}
183184impl<R: BlockRngCore<Item = u32>> RngCore for BlockRng<R> {
185#[inline]
186fn next_u32(&mut self) -> u32 {
187if self.index >= self.results.as_ref().len() {
188self.generate_and_set(0);
189 }
190191let value = self.results.as_ref()[self.index];
192self.index += 1;
193 value
194 }
195196#[inline]
197fn next_u64(&mut self) -> u64 {
198let read_u64 = |results: &[u32], index| {
199let data = &results[index..=index + 1];
200 (u64::from(data[1]) << 32) | u64::from(data[0])
201 };
202203let len = self.results.as_ref().len();
204205let index = self.index;
206if index < len - 1 {
207self.index += 2;
208// Read an u64 from the current index
209read_u64(self.results.as_ref(), index)
210 } else if index >= len {
211self.generate_and_set(2);
212 read_u64(self.results.as_ref(), 0)
213 } else {
214let x = u64::from(self.results.as_ref()[len - 1]);
215self.generate_and_set(1);
216let y = u64::from(self.results.as_ref()[0]);
217 (y << 32) | x
218 }
219 }
220221#[inline]
222fn fill_bytes(&mut self, dest: &mut [u8]) {
223let mut read_len = 0;
224while read_len < dest.len() {
225if self.index >= self.results.as_ref().len() {
226self.generate_and_set(0);
227 }
228let (consumed_u32, filled_u8) =
229 fill_via_chunks(&self.results.as_mut()[self.index..], &mut dest[read_len..]);
230231self.index += consumed_u32;
232 read_len += filled_u8;
233 }
234 }
235}
236237impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng<R> {
238type Seed = R::Seed;
239240#[inline(always)]
241fn from_seed(seed: Self::Seed) -> Self {
242Self::new(R::from_seed(seed))
243 }
244245#[inline(always)]
246fn seed_from_u64(seed: u64) -> Self {
247Self::new(R::seed_from_u64(seed))
248 }
249250#[inline(always)]
251fn from_rng(rng: &mut impl RngCore) -> Self {
252Self::new(R::from_rng(rng))
253 }
254255#[inline(always)]
256fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
257 R::try_from_rng(rng).map(Self::new)
258 }
259}
260261impl<R: CryptoBlockRng + BlockRngCore<Item = u32>> CryptoRng for BlockRng<R> {}
262263/// A wrapper type implementing [`RngCore`] for some type implementing
264/// [`BlockRngCore`] with `u64` array buffer; i.e. this can be used to implement
265/// a full RNG from just a `generate` function.
266///
267/// This is similar to [`BlockRng`], but specialized for algorithms that operate
268/// on `u64` values.
269///
270/// No whole generated `u64` values are thrown away and all values are consumed
271/// in-order. [`next_u64`] simply takes the next available `u64` value.
272/// [`next_u32`] is however a bit special: half of a `u64` is consumed, leaving
273/// the other half in the buffer. If the next function called is [`next_u32`]
274/// then the other half is then consumed, however both [`next_u64`] and
275/// [`fill_bytes`] discard the rest of any half-consumed `u64`s when called.
276///
277/// [`fill_bytes`] consumes a whole number of `u64` values. If the requested length
278/// is not a multiple of 8, some bytes will be discarded.
279///
280/// [`next_u32`]: RngCore::next_u32
281/// [`next_u64`]: RngCore::next_u64
282/// [`fill_bytes`]: RngCore::fill_bytes
283#[derive(Clone)]
284#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
285pub struct BlockRng64<R: BlockRngCore + ?Sized> {
286 results: R::Results,
287 index: usize,
288 half_used: bool, // true if only half of the previous result is used
289/// The *core* part of the RNG, implementing the `generate` function.
290pub core: R,
291}
292293// Custom Debug implementation that does not expose the contents of `results`.
294impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng64<R> {
295fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
296 fmt.debug_struct("BlockRng64")
297 .field("core", &self.core)
298 .field("result_len", &self.results.as_ref().len())
299 .field("index", &self.index)
300 .field("half_used", &self.half_used)
301 .finish()
302 }
303}
304305impl<R: BlockRngCore> BlockRng64<R> {
306/// Create a new `BlockRng` from an existing RNG implementing
307 /// `BlockRngCore`. Results will be generated on first use.
308#[inline]
309pub fn new(core: R) -> BlockRng64<R> {
310let results_empty = R::Results::default();
311 BlockRng64 {
312 core,
313 index: results_empty.as_ref().len(),
314 half_used: false,
315 results: results_empty,
316 }
317 }
318319/// Get the index into the result buffer.
320 ///
321 /// If this is equal to or larger than the size of the result buffer then
322 /// the buffer is "empty" and `generate()` must be called to produce new
323 /// results.
324#[inline(always)]
325pub fn index(&self) -> usize {
326self.index
327 }
328329/// Reset the number of available results.
330 /// This will force a new set of results to be generated on next use.
331#[inline]
332pub fn reset(&mut self) {
333self.index = self.results.as_ref().len();
334self.half_used = false;
335 }
336337/// Generate a new set of results immediately, setting the index to the
338 /// given value.
339#[inline]
340pub fn generate_and_set(&mut self, index: usize) {
341assert!(index < self.results.as_ref().len());
342self.core.generate(&mut self.results);
343self.index = index;
344self.half_used = false;
345 }
346}
347348impl<R: BlockRngCore<Item = u64>> RngCore for BlockRng64<R> {
349#[inline]
350fn next_u32(&mut self) -> u32 {
351let mut index = self.index - self.half_used as usize;
352if index >= self.results.as_ref().len() {
353self.core.generate(&mut self.results);
354self.index = 0;
355 index = 0;
356// `self.half_used` is by definition `false`
357self.half_used = false;
358 }
359360let shift = 32 * (self.half_used as usize);
361362self.half_used = !self.half_used;
363self.index += self.half_used as usize;
364365 (self.results.as_ref()[index] >> shift) as u32
366 }
367368#[inline]
369fn next_u64(&mut self) -> u64 {
370if self.index >= self.results.as_ref().len() {
371self.core.generate(&mut self.results);
372self.index = 0;
373 }
374375let value = self.results.as_ref()[self.index];
376self.index += 1;
377self.half_used = false;
378 value
379 }
380381#[inline]
382fn fill_bytes(&mut self, dest: &mut [u8]) {
383let mut read_len = 0;
384self.half_used = false;
385while read_len < dest.len() {
386if self.index >= self.results.as_ref().len() {
387self.core.generate(&mut self.results);
388self.index = 0;
389 }
390391let (consumed_u64, filled_u8) =
392 fill_via_chunks(&self.results.as_mut()[self.index..], &mut dest[read_len..]);
393394self.index += consumed_u64;
395 read_len += filled_u8;
396 }
397 }
398}
399400impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng64<R> {
401type Seed = R::Seed;
402403#[inline(always)]
404fn from_seed(seed: Self::Seed) -> Self {
405Self::new(R::from_seed(seed))
406 }
407408#[inline(always)]
409fn seed_from_u64(seed: u64) -> Self {
410Self::new(R::seed_from_u64(seed))
411 }
412413#[inline(always)]
414fn from_rng(rng: &mut impl RngCore) -> Self {
415Self::new(R::from_rng(rng))
416 }
417418#[inline(always)]
419fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
420 R::try_from_rng(rng).map(Self::new)
421 }
422}
423424impl<R: CryptoBlockRng + BlockRngCore<Item = u64>> CryptoRng for BlockRng64<R> {}
425426#[cfg(test)]
427mod test {
428use crate::block::{BlockRng, BlockRng64, BlockRngCore};
429use crate::{RngCore, SeedableRng};
430431#[derive(Debug, Clone)]
432struct DummyRng {
433 counter: u32,
434 }
435436impl BlockRngCore for DummyRng {
437type Item = u32;
438type Results = [u32; 16];
439440fn generate(&mut self, results: &mut Self::Results) {
441for r in results {
442*r = self.counter;
443self.counter = self.counter.wrapping_add(3511615421);
444 }
445 }
446 }
447448impl SeedableRng for DummyRng {
449type Seed = [u8; 4];
450451fn from_seed(seed: Self::Seed) -> Self {
452 DummyRng {
453 counter: u32::from_le_bytes(seed),
454 }
455 }
456 }
457458#[test]
459fn blockrng_next_u32_vs_next_u64() {
460let mut rng1 = BlockRng::<DummyRng>::from_seed([1, 2, 3, 4]);
461let mut rng2 = rng1.clone();
462let mut rng3 = rng1.clone();
463464let mut a = [0; 16];
465 a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
466 a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
467 a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
468469let mut b = [0; 16];
470 b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
471 b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
472 b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
473assert_eq!(a, b);
474475let mut c = [0; 16];
476 c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
477 c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
478 c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
479assert_eq!(a, c);
480 }
481482#[derive(Debug, Clone)]
483struct DummyRng64 {
484 counter: u64,
485 }
486487impl BlockRngCore for DummyRng64 {
488type Item = u64;
489type Results = [u64; 8];
490491fn generate(&mut self, results: &mut Self::Results) {
492for r in results {
493*r = self.counter;
494self.counter = self.counter.wrapping_add(2781463553396133981);
495 }
496 }
497 }
498499impl SeedableRng for DummyRng64 {
500type Seed = [u8; 8];
501502fn from_seed(seed: Self::Seed) -> Self {
503 DummyRng64 {
504 counter: u64::from_le_bytes(seed),
505 }
506 }
507 }
508509#[test]
510fn blockrng64_next_u32_vs_next_u64() {
511let mut rng1 = BlockRng64::<DummyRng64>::from_seed([1, 2, 3, 4, 5, 6, 7, 8]);
512let mut rng2 = rng1.clone();
513let mut rng3 = rng1.clone();
514515let mut a = [0; 16];
516 a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
517 a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
518 a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
519520let mut b = [0; 16];
521 b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
522 b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
523 b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
524assert_ne!(a, b);
525assert_eq!(&a[..4], &b[..4]);
526assert_eq!(&a[4..12], &b[8..]);
527528let mut c = [0; 16];
529 c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
530 c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
531 c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
532assert_eq!(b, c);
533 }
534}