| Avi Drissman | e4622aa | 2022-09-08 20:36:06 | [diff] [blame] | 1 | // Copyright 2011 The Chromium Authors |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 5 | #include "base/rand_util.h" |
| 6 | |
| avi | 9b6f4293 | 2015-12-26 22:15:14 | [diff] [blame] | 7 | #include <stddef.h> |
| 8 | #include <stdint.h> |
| 9 | |
| [email protected] | 0173b96 | 2011-08-24 19:58:36 | [diff] [blame] | 10 | #include <algorithm> |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 11 | #include <cmath> |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 12 | #include <limits> |
| dcheng | 093de9b | 2016-04-04 21:25:51 | [diff] [blame] | 13 | #include <memory> |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 14 | #include <vector> |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 15 | |
| Tom Sepez | 7859fac5 | 2025-07-22 19:34:16 | [diff] [blame] | 16 | #include "base/compiler_specific.h" |
| Austin Sullivan | a41f7f6 | 2024-01-09 20:11:50 | [diff] [blame] | 17 | #include "base/containers/span.h" |
| [email protected] | c910c5a | 2014-01-23 02:14:28 | [diff] [blame] | 18 | #include "base/logging.h" |
| [email protected] | c910c5a | 2014-01-23 02:14:28 | [diff] [blame] | 19 | #include "base/time/time.h" |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 20 | #include "testing/gtest/include/gtest/gtest.h" |
| 21 | |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 22 | namespace base { |
| 23 | |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 24 | namespace { |
| 25 | |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 26 | constexpr int kIntMin = std::numeric_limits<int>::min(); |
| 27 | constexpr int kIntMax = std::numeric_limits<int>::max(); |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 28 | |
| 29 | } // namespace |
| 30 | |
| Nico Weber | 0a3852a7 | 2015-10-29 20:42:58 | [diff] [blame] | 31 | TEST(RandUtilTest, RandInt) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 32 | EXPECT_EQ(RandInt(0, 0), 0); |
| 33 | EXPECT_EQ(RandInt(kIntMin, kIntMin), kIntMin); |
| 34 | EXPECT_EQ(RandInt(kIntMax, kIntMax), kIntMax); |
| Nico Weber | 0a3852a7 | 2015-10-29 20:42:58 | [diff] [blame] | 35 | |
| 36 | // Check that the DCHECKS in RandInt() don't fire due to internal overflow. |
| 37 | // There was a 50% chance of that happening, so calling it 40 times means |
| 38 | // the chances of this passing by accident are tiny (9e-13). |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 39 | for (int i = 0; i < 40; ++i) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 40 | RandInt(kIntMin, kIntMax); |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 41 | } |
| [email protected] | 05f9b68 | 2008-09-29 22:18:01 | [diff] [blame] | 42 | } |
| [email protected] | 94a0f31 | 2008-09-30 14:26:33 | [diff] [blame] | 43 | |
| 44 | TEST(RandUtilTest, RandDouble) { |
| [email protected] | 29548d8 | 2011-04-29 21:03:54 | [diff] [blame] | 45 | // Force 64-bit precision, making sure we're not in a 80-bit FPU register. |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 46 | volatile double number = RandDouble(); |
| 47 | EXPECT_LT(number, 1.0); |
| 48 | EXPECT_GE(number, 0.0); |
| [email protected] | 29548d8 | 2011-04-29 21:03:54 | [diff] [blame] | 49 | } |
| 50 | |
| Avery Musbach | eff342b | 2022-10-06 18:36:07 | [diff] [blame] | 51 | TEST(RandUtilTest, RandFloat) { |
| 52 | // Force 32-bit precision, making sure we're not in an 80-bit FPU register. |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 53 | volatile float number = RandFloat(); |
| 54 | EXPECT_LT(number, 1.0f); |
| 55 | EXPECT_GE(number, 0.0f); |
| Avery Musbach | eff342b | 2022-10-06 18:36:07 | [diff] [blame] | 56 | } |
| 57 | |
| Peter Kasting | b2dc5504 | 2025-01-16 16:30:54 | [diff] [blame] | 58 | TEST(RandUtilTest, RandBool) { |
| 59 | // This test should finish extremely quickly unless `RandBool()` can only give |
| 60 | // one result value. |
| 61 | for (bool seen_false = false, seen_true = false; !seen_false || !seen_true;) { |
| 62 | (RandBool() ? seen_true : seen_false) = true; |
| 63 | } |
| 64 | } |
| 65 | |
| Peter Kasting | f18c8ca | 2023-10-04 16:31:51 | [diff] [blame] | 66 | TEST(RandUtilTest, RandTimeDelta) { |
| 67 | { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 68 | const auto delta = RandTimeDelta(-Seconds(2), -Seconds(1)); |
| 69 | EXPECT_GE(delta, -Seconds(2)); |
| 70 | EXPECT_LT(delta, -Seconds(1)); |
| Peter Kasting | f18c8ca | 2023-10-04 16:31:51 | [diff] [blame] | 71 | } |
| 72 | |
| 73 | { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 74 | const auto delta = RandTimeDelta(-Seconds(2), Seconds(2)); |
| 75 | EXPECT_GE(delta, -Seconds(2)); |
| 76 | EXPECT_LT(delta, Seconds(2)); |
| Peter Kasting | f18c8ca | 2023-10-04 16:31:51 | [diff] [blame] | 77 | } |
| 78 | |
| 79 | { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 80 | const auto delta = RandTimeDelta(Seconds(1), Seconds(2)); |
| 81 | EXPECT_GE(delta, Seconds(1)); |
| 82 | EXPECT_LT(delta, Seconds(2)); |
| Peter Kasting | f18c8ca | 2023-10-04 16:31:51 | [diff] [blame] | 83 | } |
| 84 | } |
| 85 | |
| 86 | TEST(RandUtilTest, RandTimeDeltaUpTo) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 87 | const auto delta = RandTimeDeltaUpTo(Seconds(2)); |
| Peter Kasting | f18c8ca | 2023-10-04 16:31:51 | [diff] [blame] | 88 | EXPECT_FALSE(delta.is_negative()); |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 89 | EXPECT_LT(delta, Seconds(2)); |
| Peter Kasting | f18c8ca | 2023-10-04 16:31:51 | [diff] [blame] | 90 | } |
| 91 | |
| Peter Kasting | b2dc5504 | 2025-01-16 16:30:54 | [diff] [blame] | 92 | TEST(RandUtilTest, RandomizeByPercentage) { |
| 93 | EXPECT_EQ(0, RandomizeByPercentage(0, 100)); |
| 94 | EXPECT_EQ(100, RandomizeByPercentage(100, 0)); |
| 95 | |
| 96 | // Check that 10 +/- 200% will eventually produce values in each range |
| 97 | // [-10, 0), [0, 10), [10, 20), [20, 30). |
| 98 | for (bool a = false, b = false, c = false, d = false; !a || !b || !c || !d;) { |
| 99 | const int r = RandomizeByPercentage(10, 200); |
| 100 | EXPECT_GE(r, -10); |
| 101 | EXPECT_LT(r, 30); |
| 102 | a |= (r < 0); |
| 103 | b |= (r >= 0 && r < 10); |
| 104 | c |= (r >= 10 && r < 20); |
| 105 | d |= (r >= 20); |
| 106 | } |
| 107 | } |
| 108 | |
| Avery Musbach | 92a30e38 | 2022-09-08 23:30:41 | [diff] [blame] | 109 | TEST(RandUtilTest, BitsToOpenEndedUnitInterval) { |
| 110 | // Force 64-bit precision, making sure we're not in an 80-bit FPU register. |
| 111 | volatile double all_zeros = BitsToOpenEndedUnitInterval(0x0); |
| 112 | EXPECT_EQ(0.0, all_zeros); |
| 113 | |
| 114 | // Force 64-bit precision, making sure we're not in an 80-bit FPU register. |
| 115 | volatile double smallest_nonzero = BitsToOpenEndedUnitInterval(0x1); |
| 116 | EXPECT_LT(0.0, smallest_nonzero); |
| 117 | |
| 118 | for (uint64_t i = 0x2; i < 0x10; ++i) { |
| 119 | // Force 64-bit precision, making sure we're not in an 80-bit FPU register. |
| 120 | volatile double number = BitsToOpenEndedUnitInterval(i); |
| 121 | EXPECT_EQ(i * smallest_nonzero, number); |
| 122 | } |
| 123 | |
| 124 | // Force 64-bit precision, making sure we're not in an 80-bit FPU register. |
| 125 | volatile double all_ones = BitsToOpenEndedUnitInterval(UINT64_MAX); |
| 126 | EXPECT_GT(1.0, all_ones); |
| 127 | } |
| 128 | |
| Avery Musbach | eff342b | 2022-10-06 18:36:07 | [diff] [blame] | 129 | TEST(RandUtilTest, BitsToOpenEndedUnitIntervalF) { |
| 130 | // Force 32-bit precision, making sure we're not in an 80-bit FPU register. |
| 131 | volatile float all_zeros = BitsToOpenEndedUnitIntervalF(0x0); |
| 132 | EXPECT_EQ(0.f, all_zeros); |
| 133 | |
| 134 | // Force 32-bit precision, making sure we're not in an 80-bit FPU register. |
| 135 | volatile float smallest_nonzero = BitsToOpenEndedUnitIntervalF(0x1); |
| 136 | EXPECT_LT(0.f, smallest_nonzero); |
| 137 | |
| 138 | for (uint64_t i = 0x2; i < 0x10; ++i) { |
| 139 | // Force 32-bit precision, making sure we're not in an 80-bit FPU register. |
| 140 | volatile float number = BitsToOpenEndedUnitIntervalF(i); |
| 141 | EXPECT_EQ(i * smallest_nonzero, number); |
| 142 | } |
| 143 | |
| 144 | // Force 32-bit precision, making sure we're not in an 80-bit FPU register. |
| 145 | volatile float all_ones = BitsToOpenEndedUnitIntervalF(UINT64_MAX); |
| 146 | EXPECT_GT(1.f, all_ones); |
| 147 | } |
| 148 | |
| [email protected] | 51a0181 | 2011-05-05 08:46:11 | [diff] [blame] | 149 | TEST(RandUtilTest, RandBytes) { |
| [email protected] | 0173b96 | 2011-08-24 19:58:36 | [diff] [blame] | 150 | const size_t buffer_size = 50; |
| Austin Sullivan | a41f7f6 | 2024-01-09 20:11:50 | [diff] [blame] | 151 | uint8_t buffer[buffer_size]; |
| Tom Sepez | 7859fac5 | 2025-07-22 19:34:16 | [diff] [blame] | 152 | UNSAFE_TODO(memset(buffer, 0, buffer_size)); |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 153 | RandBytes(buffer); |
| Tom Sepez | 7859fac5 | 2025-07-22 19:34:16 | [diff] [blame] | 154 | std::sort(buffer, UNSAFE_TODO(buffer + buffer_size)); |
| [email protected] | 0173b96 | 2011-08-24 19:58:36 | [diff] [blame] | 155 | // Probability of occurrence of less than 25 unique bytes in 50 random bytes |
| 156 | // is below 10^-25. |
| Tom Sepez | 7859fac5 | 2025-07-22 19:34:16 | [diff] [blame] | 157 | UNSAFE_TODO( |
| 158 | EXPECT_GT(std::unique(buffer, buffer + buffer_size) - buffer, 25)); |
| [email protected] | 51a0181 | 2011-05-05 08:46:11 | [diff] [blame] | 159 | } |
| 160 | |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 161 | // Verify that calling RandBytes with an empty buffer doesn't fail. |
| Sergey Ulanov | fdc62f8e | 2017-08-01 19:51:00 | [diff] [blame] | 162 | TEST(RandUtilTest, RandBytes0) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 163 | RandBytes(span<uint8_t>()); |
| Sergey Ulanov | fdc62f8e | 2017-08-01 19:51:00 | [diff] [blame] | 164 | } |
| 165 | |
| Tom Sepez | 230a75c6 | 2023-11-13 23:27:16 | [diff] [blame] | 166 | TEST(RandUtilTest, RandBytesAsVector) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 167 | std::vector<uint8_t> random_vec = RandBytesAsVector(0); |
| Tom Sepez | 230a75c6 | 2023-11-13 23:27:16 | [diff] [blame] | 168 | EXPECT_TRUE(random_vec.empty()); |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 169 | random_vec = RandBytesAsVector(1); |
| Tom Sepez | 230a75c6 | 2023-11-13 23:27:16 | [diff] [blame] | 170 | EXPECT_EQ(1U, random_vec.size()); |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 171 | random_vec = RandBytesAsVector(145); |
| Tom Sepez | 230a75c6 | 2023-11-13 23:27:16 | [diff] [blame] | 172 | EXPECT_EQ(145U, random_vec.size()); |
| 173 | char accumulator = 0; |
| 174 | for (auto i : random_vec) { |
| 175 | accumulator |= i; |
| 176 | } |
| 177 | // In theory this test can fail, but it won't before the universe dies of |
| 178 | // heat death. |
| 179 | EXPECT_NE(0, accumulator); |
| 180 | } |
| 181 | |
| [email protected] | 29548d8 | 2011-04-29 21:03:54 | [diff] [blame] | 182 | TEST(RandUtilTest, RandBytesAsString) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 183 | std::string random_string = RandBytesAsString(1); |
| [email protected] | fdce4788 | 2011-11-29 20:06:18 | [diff] [blame] | 184 | EXPECT_EQ(1U, random_string.size()); |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 185 | random_string = RandBytesAsString(145); |
| [email protected] | 29548d8 | 2011-04-29 21:03:54 | [diff] [blame] | 186 | EXPECT_EQ(145U, random_string.size()); |
| 187 | char accumulator = 0; |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 188 | for (auto i : random_string) { |
| jdoerrie | 6c622935 | 2018-10-22 15:55:43 | [diff] [blame] | 189 | accumulator |= i; |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 190 | } |
| [email protected] | 29548d8 | 2011-04-29 21:03:54 | [diff] [blame] | 191 | // In theory this test can fail, but it won't before the universe dies of |
| 192 | // heat death. |
| 193 | EXPECT_NE(0, accumulator); |
| [email protected] | 94a0f31 | 2008-09-30 14:26:33 | [diff] [blame] | 194 | } |
| [email protected] | a74dcae | 2010-08-30 21:07:05 | [diff] [blame] | 195 | |
| 196 | // Make sure that it is still appropriate to use RandGenerator in conjunction |
| 197 | // with std::random_shuffle(). |
| 198 | TEST(RandUtilTest, RandGeneratorForRandomShuffle) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 199 | EXPECT_EQ(RandGenerator(1), 0U); |
| [email protected] | a74dcae | 2010-08-30 21:07:05 | [diff] [blame] | 200 | EXPECT_LE(std::numeric_limits<ptrdiff_t>::max(), |
| avi | 9b6f4293 | 2015-12-26 22:15:14 | [diff] [blame] | 201 | std::numeric_limits<int64_t>::max()); |
| [email protected] | a74dcae | 2010-08-30 21:07:05 | [diff] [blame] | 202 | } |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 203 | |
| 204 | TEST(RandUtilTest, RandGeneratorIsUniform) { |
| 205 | // Verify that RandGenerator has a uniform distribution. This is a |
| 206 | // regression test that consistently failed when RandGenerator was |
| 207 | // implemented this way: |
| 208 | // |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 209 | // return RandUint64() % max; |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 210 | // |
| 211 | // A degenerate case for such an implementation is e.g. a top of |
| 212 | // range that is 2/3rds of the way to MAX_UINT64, in which case the |
| 213 | // bottom half of the range would be twice as likely to occur as the |
| 214 | // top half. A bit of calculus care of jar@ shows that the largest |
| 215 | // measurable delta is when the top of the range is 3/4ths of the |
| 216 | // way, so that's what we use in the test. |
| Peter Kasting | fbb9e56 | 2021-06-27 02:57:37 | [diff] [blame] | 217 | constexpr uint64_t kTopOfRange = |
| avi | 9b6f4293 | 2015-12-26 22:15:14 | [diff] [blame] | 218 | (std::numeric_limits<uint64_t>::max() / 4ULL) * 3ULL; |
| Peter Kasting | fbb9e56 | 2021-06-27 02:57:37 | [diff] [blame] | 219 | constexpr double kExpectedAverage = static_cast<double>(kTopOfRange / 2); |
| 220 | constexpr double kAllowedVariance = kExpectedAverage / 50.0; // +/- 2% |
| 221 | constexpr int kMinAttempts = 1000; |
| 222 | constexpr int kMaxAttempts = 1000000; |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 223 | |
| 224 | double cumulative_average = 0.0; |
| 225 | int count = 0; |
| 226 | while (count < kMaxAttempts) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 227 | uint64_t value = RandGenerator(kTopOfRange); |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 228 | cumulative_average = (count * cumulative_average + value) / (count + 1); |
| 229 | |
| 230 | // Don't quit too quickly for things to start converging, or we may have |
| 231 | // a false positive. |
| 232 | if (count > kMinAttempts && |
| 233 | kExpectedAverage - kAllowedVariance < cumulative_average && |
| 234 | cumulative_average < kExpectedAverage + kAllowedVariance) { |
| 235 | break; |
| 236 | } |
| 237 | |
| 238 | ++count; |
| 239 | } |
| 240 | |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 241 | ASSERT_LT(count, kMaxAttempts) << "Expected average was " << kExpectedAverage |
| 242 | << ", average ended at " << cumulative_average; |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 243 | } |
| 244 | |
| 245 | TEST(RandUtilTest, RandUint64ProducesBothValuesOfAllBits) { |
| 246 | // This tests to see that our underlying random generator is good |
| 247 | // enough, for some value of good enough. |
| avi | 9b6f4293 | 2015-12-26 22:15:14 | [diff] [blame] | 248 | uint64_t kAllZeros = 0ULL; |
| 249 | uint64_t kAllOnes = ~kAllZeros; |
| 250 | uint64_t found_ones = kAllZeros; |
| 251 | uint64_t found_zeros = kAllOnes; |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 252 | |
| 253 | for (size_t i = 0; i < 1000; ++i) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 254 | uint64_t value = RandUint64(); |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 255 | found_ones |= value; |
| 256 | found_zeros &= value; |
| 257 | |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 258 | if (found_zeros == kAllZeros && found_ones == kAllOnes) { |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 259 | return; |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 260 | } |
| [email protected] | af2e192b | 2011-05-30 17:39:09 | [diff] [blame] | 261 | } |
| 262 | |
| 263 | FAIL() << "Didn't achieve all bit values in maximum number of tries."; |
| 264 | } |
| [email protected] | c910c5a | 2014-01-23 02:14:28 | [diff] [blame] | 265 | |
| Scott Graham | 4ffd63b5 | 2017-06-01 18:03:33 | [diff] [blame] | 266 | TEST(RandUtilTest, RandBytesLonger) { |
| 267 | // Fuchsia can only retrieve 256 bytes of entropy at a time, so make sure we |
| 268 | // handle longer requests than that. |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 269 | std::string random_string0 = RandBytesAsString(255); |
| Scott Graham | 4ffd63b5 | 2017-06-01 18:03:33 | [diff] [blame] | 270 | EXPECT_EQ(255u, random_string0.size()); |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 271 | std::string random_string1 = RandBytesAsString(1023); |
| Scott Graham | 4ffd63b5 | 2017-06-01 18:03:33 | [diff] [blame] | 272 | EXPECT_EQ(1023u, random_string1.size()); |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 273 | std::string random_string2 = RandBytesAsString(4097); |
| Scott Graham | 4ffd63b5 | 2017-06-01 18:03:33 | [diff] [blame] | 274 | EXPECT_EQ(4097u, random_string2.size()); |
| 275 | } |
| 276 | |
| [email protected] | c910c5a | 2014-01-23 02:14:28 | [diff] [blame] | 277 | // Benchmark test for RandBytes(). Disabled since it's intentionally slow and |
| 278 | // does not test anything that isn't already tested by the existing RandBytes() |
| 279 | // tests. |
| 280 | TEST(RandUtilTest, DISABLED_RandBytesPerf) { |
| 281 | // Benchmark the performance of |kTestIterations| of RandBytes() using a |
| 282 | // buffer size of |kTestBufferSize|. |
| 283 | const int kTestIterations = 10; |
| 284 | const size_t kTestBufferSize = 1 * 1024 * 1024; |
| 285 | |
| danakj | 95305d27 | 2024-05-09 20:38:44 | [diff] [blame] | 286 | std::array<uint8_t, kTestBufferSize> buffer; |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 287 | const TimeTicks now = TimeTicks::Now(); |
| danakj | 95305d27 | 2024-05-09 20:38:44 | [diff] [blame] | 288 | for (int i = 0; i < kTestIterations; ++i) { |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 289 | RandBytes(buffer); |
| danakj | 95305d27 | 2024-05-09 20:38:44 | [diff] [blame] | 290 | } |
| Peter Kasting | a253f75 | 2025-01-31 18:57:26 | [diff] [blame] | 291 | const TimeTicks end = TimeTicks::Now(); |
| [email protected] | c910c5a | 2014-01-23 02:14:28 | [diff] [blame] | 292 | |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 293 | LOG(INFO) << "RandBytes(" << kTestBufferSize |
| 294 | << ") took: " << (end - now).InMicroseconds() << "µs"; |
| [email protected] | c910c5a | 2014-01-23 02:14:28 | [diff] [blame] | 295 | } |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 296 | |
| 297 | TEST(RandUtilTest, InsecureRandomGeneratorProducesBothValuesOfAllBits) { |
| 298 | // This tests to see that our underlying random generator is good |
| 299 | // enough, for some value of good enough. |
| 300 | uint64_t kAllZeros = 0ULL; |
| 301 | uint64_t kAllOnes = ~kAllZeros; |
| 302 | uint64_t found_ones = kAllZeros; |
| 303 | uint64_t found_zeros = kAllOnes; |
| 304 | |
| 305 | InsecureRandomGenerator generator; |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 306 | |
| 307 | for (size_t i = 0; i < 1000; ++i) { |
| 308 | uint64_t value = generator.RandUint64(); |
| 309 | found_ones |= value; |
| 310 | found_zeros &= value; |
| 311 | |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 312 | if (found_zeros == kAllZeros && found_ones == kAllOnes) { |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 313 | return; |
| Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 314 | } |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 315 | } |
| 316 | |
| 317 | FAIL() << "Didn't achieve all bit values in maximum number of tries."; |
| 318 | } |
| 319 | |
| 320 | namespace { |
| 321 | |
| 322 | constexpr double kXp1Percent = -2.33; |
| 323 | constexpr double kXp99Percent = 2.33; |
| 324 | |
| 325 | double ChiSquaredCriticalValue(double nu, double x_p) { |
| 326 | // From "The Art Of Computer Programming" (TAOCP), Volume 2, Section 3.3.1, |
| 327 | // Table 1. This is the asymptotic value for nu > 30, up to O(1 / sqrt(nu)). |
| 328 | return nu + sqrt(2. * nu) * x_p + 2. / 3. * (x_p * x_p) - 2. / 3.; |
| 329 | } |
| 330 | |
| 331 | int ExtractBits(uint64_t value, int from_bit, int num_bits) { |
| 332 | return (value >> from_bit) & ((1 << num_bits) - 1); |
| 333 | } |
| 334 | |
| 335 | // Performs a Chi-Squared test on a subset of |num_bits| extracted starting from |
| 336 | // |from_bit| in the generated value. |
| 337 | // |
| 338 | // See TAOCP, Volume 2, Section 3.3.1, and |
| 339 | // https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test for details. |
| 340 | // |
| 341 | // This is only one of the many, many random number generator test we could do, |
| 342 | // but they are cumbersome, as they are typically very slow, and expected to |
| 343 | // fail from time to time, due to their probabilistic nature. |
| 344 | // |
| 345 | // The generator we use has however been vetted with the BigCrush test suite |
| 346 | // from Marsaglia, so this should suffice as a smoke test that our |
| 347 | // implementation is wrong. |
| 348 | bool ChiSquaredTest(InsecureRandomGenerator& gen, |
| 349 | size_t n, |
| 350 | int from_bit, |
| 351 | int num_bits) { |
| 352 | const int range = 1 << num_bits; |
| 353 | CHECK_EQ(static_cast<int>(n % range), 0) << "Makes computations simpler"; |
| 354 | std::vector<size_t> samples(range, 0); |
| 355 | |
| 356 | // Count how many samples pf each value are found. All buckets should be |
| 357 | // almost equal if the generator is suitably uniformly random. |
| 358 | for (size_t i = 0; i < n; i++) { |
| 359 | int sample = ExtractBits(gen.RandUint64(), from_bit, num_bits); |
| 360 | samples[sample] += 1; |
| 361 | } |
| 362 | |
| 363 | // Compute the Chi-Squared statistic, which is: |
| 364 | // \Sum_{k=0}^{range-1} \frac{(count - expected)^2}{expected} |
| 365 | double chi_squared = 0.; |
| 366 | double expected_count = n / range; |
| 367 | for (size_t sample_count : samples) { |
| 368 | double deviation = sample_count - expected_count; |
| 369 | chi_squared += (deviation * deviation) / expected_count; |
| 370 | } |
| 371 | |
| 372 | // The generator should produce numbers that are not too far of (chi_squared |
| 373 | // lower than a given quantile), but not too close to the ideal distribution |
| 374 | // either (chi_squared is too low). |
| 375 | // |
| 376 | // See The Art Of Computer Programming, Volume 2, Section 3.3.1 for details. |
| 377 | return chi_squared > ChiSquaredCriticalValue(range - 1, kXp1Percent) && |
| 378 | chi_squared < ChiSquaredCriticalValue(range - 1, kXp99Percent); |
| 379 | } |
| 380 | |
| 381 | } // namespace |
| 382 | |
| 383 | TEST(RandUtilTest, InsecureRandomGeneratorChiSquared) { |
| 384 | constexpr int kIterations = 50; |
| 385 | |
| 386 | // Specifically test the low bits, which are usually weaker in random number |
| 387 | // generators. We don't use them for the 32 bit number generation, but let's |
| 388 | // make sure they are still suitable. |
| 389 | for (int start_bit : {1, 2, 3, 8, 12, 20, 32, 48, 54}) { |
| 390 | int pass_count = 0; |
| 391 | for (int i = 0; i < kIterations; i++) { |
| 392 | size_t samples = 1 << 16; |
| 393 | InsecureRandomGenerator gen; |
| 394 | // Fix the seed to make the test non-flaky. |
| Benoit Lize | 7532d4af | 2021-08-24 11:34:04 | [diff] [blame] | 395 | gen.ReseedForTesting(kIterations + 1); |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 396 | bool pass = ChiSquaredTest(gen, samples, start_bit, 8); |
| 397 | pass_count += pass; |
| 398 | } |
| 399 | |
| 400 | // We exclude 1% on each side, so we expect 98% of tests to pass, meaning 98 |
| 401 | // * kIterations / 100. However this is asymptotic, so add a bit of leeway. |
| 402 | int expected_pass_count = (kIterations * 98) / 100; |
| 403 | EXPECT_GE(pass_count, expected_pass_count - ((kIterations * 2) / 100)) |
| 404 | << "For start_bit = " << start_bit; |
| 405 | } |
| 406 | } |
| 407 | |
| Benoit Lize | d637714 | 2021-07-05 10:17:16 | [diff] [blame] | 408 | TEST(RandUtilTest, InsecureRandomGeneratorRandDouble) { |
| 409 | InsecureRandomGenerator gen; |
| Benoit Lize | d637714 | 2021-07-05 10:17:16 | [diff] [blame] | 410 | |
| 411 | for (int i = 0; i < 1000; i++) { |
| 412 | volatile double x = gen.RandDouble(); |
| 413 | EXPECT_GE(x, 0.); |
| 414 | EXPECT_LT(x, 1.); |
| 415 | } |
| 416 | } |
| Olivier Li | ef2b23c | 2024-01-29 20:58:56 | [diff] [blame] | 417 | |
| 418 | TEST(RandUtilTest, MetricsSubSampler) { |
| 419 | MetricsSubSampler sub_sampler; |
| 420 | int true_count = 0; |
| 421 | int false_count = 0; |
| 422 | for (int i = 0; i < 1000; ++i) { |
| 423 | if (sub_sampler.ShouldSample(0.5)) { |
| 424 | ++true_count; |
| 425 | } else { |
| 426 | ++false_count; |
| 427 | } |
| 428 | } |
| 429 | |
| 430 | // Validate that during normal operation MetricsSubSampler::ShouldSample() |
| 431 | // does not always give the same result. It's technically possible to fail |
| 432 | // this test during normal operation but if the sampling is realistic it |
| 433 | // should happen about once every 2^999 times (the likelihood of the [1,999] |
| 434 | // results being the same as [0], which can be either). This should not make |
| 435 | // this test flaky in the eyes of automated testing. |
| 436 | EXPECT_GT(true_count, 0); |
| 437 | EXPECT_GT(false_count, 0); |
| 438 | } |
| 439 | |
| 440 | TEST(RandUtilTest, MetricsSubSamplerTestingSupport) { |
| 441 | MetricsSubSampler sub_sampler; |
| 442 | |
| 443 | // ScopedAlwaysSampleForTesting makes ShouldSample() return true with |
| 444 | // any probability. |
| 445 | { |
| 446 | MetricsSubSampler::ScopedAlwaysSampleForTesting always_sample; |
| 447 | for (int i = 0; i < 100; ++i) { |
| 448 | EXPECT_TRUE(sub_sampler.ShouldSample(0)); |
| 449 | EXPECT_TRUE(sub_sampler.ShouldSample(0.5)); |
| 450 | EXPECT_TRUE(sub_sampler.ShouldSample(1)); |
| 451 | } |
| 452 | } |
| 453 | |
| 454 | // ScopedNeverSampleForTesting makes ShouldSample() return true with |
| 455 | // any probability. |
| 456 | { |
| 457 | MetricsSubSampler::ScopedNeverSampleForTesting always_sample; |
| 458 | for (int i = 0; i < 100; ++i) { |
| 459 | EXPECT_FALSE(sub_sampler.ShouldSample(0)); |
| 460 | EXPECT_FALSE(sub_sampler.ShouldSample(0.5)); |
| 461 | EXPECT_FALSE(sub_sampler.ShouldSample(1)); |
| 462 | } |
| 463 | } |
| 464 | } |
| 465 | |
| Benoit Lize | 73de21b | 2021-07-02 08:17:56 | [diff] [blame] | 466 | } // namespace base |