From 92afe8b55aea04ec8f946b66a9ddbf18caec509c Mon Sep 17 00:00:00 2001 From: Jan Teske Date: Sun, 9 Nov 2025 10:23:15 +0100 Subject: [PATCH] Update to rand 0.9.2 --- Cargo.lock | 42 ++++++----- src/adapter/Cargo.toml | 3 +- src/adapter/src/coord/message_handler.rs | 2 +- src/adapter/src/coord/statement_logging.rs | 11 +-- src/adapter/src/session.rs | 3 +- src/avro/Cargo.toml | 2 +- src/catalog/Cargo.toml | 2 +- src/cloud-resources/Cargo.toml | 2 +- src/cloud-resources/src/crd/materialize.rs | 6 +- src/cluster/Cargo.toml | 2 +- src/cluster/src/communication.rs | 6 +- src/environmentd/Cargo.toml | 2 +- src/environmentd/tests/server.rs | 2 +- src/expr/Cargo.toml | 2 +- src/expr/benches/window_functions.rs | 6 +- src/kafka-util/Cargo.toml | 2 +- src/kafka-util/src/bin/kgen.rs | 31 +++++---- src/orchestratord/Cargo.toml | 2 +- .../controller/materialize/environmentd.rs | 5 +- src/ore/Cargo.toml | 2 +- src/ore/src/id_gen.rs | 4 +- src/persist/Cargo.toml | 2 +- src/persist/src/retry.rs | 2 +- src/persist/src/unreliable.rs | 4 +- src/repr/Cargo.toml | 2 +- src/repr/benches/row.rs | 69 +++++++++++-------- src/repr/benches/strconv.rs | 8 +-- src/service/Cargo.toml | 2 +- src/ssh-util/Cargo.toml | 2 +- src/ssh-util/src/tunnel.rs | 4 +- src/storage-types/Cargo.toml | 2 +- src/storage-types/benches/row.rs | 25 +++++-- src/storage/Cargo.toml | 2 +- src/storage/src/source/generator/auction.rs | 10 +-- src/storage/src/source/generator/marketing.rs | 30 ++++---- src/storage/src/source/generator/tpch.rs | 55 ++++++++------- src/testdrive/Cargo.toml | 2 +- src/testdrive/src/action.rs | 3 +- src/testdrive/src/action/sleep.rs | 4 +- src/testdrive/src/bin/testdrive.rs | 4 +- src/timestamp-oracle/Cargo.toml | 2 +- src/timestamp-oracle/src/retry.rs | 2 +- src/txn-wal/Cargo.toml | 2 +- src/workspace-hack/Cargo.toml | 6 +- test/test-util/Cargo.toml | 2 +- test/test-util/src/generator/bytes.rs | 4 +- 46 files changed, 204 insertions(+), 185 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 75b48dbd9f608..7dfaf691903f5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5281,8 +5281,7 @@ dependencies = [ "prometheus", "prost", "qcell", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand 0.9.2", "semver", "serde", "serde_json", @@ -5409,7 +5408,7 @@ dependencies = [ "flate2", "itertools 0.14.0", "mz-ore", - "rand 0.8.5", + "rand 0.9.2", "regex", "serde", "serde_json", @@ -5589,7 +5588,7 @@ dependencies = [ "proptest", "proptest-derive", "prost", - "rand 0.8.5", + "rand 0.9.2", "semver", "serde", "serde_json", @@ -5718,7 +5717,7 @@ dependencies = [ "mz-ore", "mz-repr", "mz-server-core", - "rand 0.8.5", + "rand 0.9.2", "schemars", "semver", "serde", @@ -5741,7 +5740,7 @@ dependencies = [ "mz-cluster-client", "mz-ore", "mz-service", - "rand 0.8.5", + "rand 0.9.2", "regex", "timely", "tokio", @@ -6162,7 +6161,7 @@ dependencies = [ "predicates 3.1.3", "prometheus", "proptest", - "rand 0.8.5", + "rand 0.9.2", "rdkafka", "rdkafka-sys", "regex", @@ -6246,7 +6245,7 @@ dependencies = [ "proptest-derive", "prost", "prost-build", - "rand 0.8.5", + "rand 0.9.2", "regex", "regex-syntax", "seahash", @@ -6503,7 +6502,7 @@ dependencies = [ "num_cpus", "prost", "prost-build", - "rand 0.8.5", + "rand 0.9.2", "rdkafka", "serde", "serde_json", @@ -6801,7 +6800,7 @@ dependencies = [ "mz-prof-http", "mz-server-core", "prometheus", - "rand 0.8.5", + "rand 0.9.2", "reqwest 0.12.23", "semver", "serde", @@ -6859,7 +6858,7 @@ dependencies = [ "prometheus", "proptest", "proptest-derive", - "rand 0.8.5", + "rand 0.9.2", "scopeguard", "sentry", "sentry-panic", @@ -6944,7 +6943,7 @@ dependencies = [ "proptest-derive", "prost", "prost-build", - "rand 0.8.5", + "rand 0.9.2", "reqwest 0.12.23", "serde", "serde_json", @@ -7345,7 +7344,7 @@ dependencies = [ "proptest-derive", "prost", "prost-build", - "rand 0.8.5", + "rand 0.9.2", "regex", "ryu", "serde", @@ -7503,7 +7502,7 @@ dependencies = [ "os_info", "pin-project", "prometheus", - "rand 0.8.5", + "rand 0.9.2", "semver", "sentry-tracing", "serde", @@ -7754,7 +7753,7 @@ dependencies = [ "openssh", "openssh-mux-client", "openssl", - "rand 0.8.5", + "rand 0.9.2", "scopeguard", "serde", "serde_json", @@ -7826,7 +7825,7 @@ dependencies = [ "postgres-replication", "prometheus", "prost", - "rand 0.8.5", + "rand 0.9.2", "rdkafka", "regex", "rocksdb", @@ -8034,7 +8033,7 @@ dependencies = [ "proptest-derive", "prost", "prost-build", - "rand 0.8.5", + "rand 0.9.2", "rdkafka", "regex", "reqwest 0.11.24", @@ -8059,7 +8058,7 @@ dependencies = [ "chrono", "mz-kafka-util", "mz-ore", - "rand 0.8.5", + "rand 0.9.2", "rdkafka", "tokio", "tokio-postgres", @@ -8121,7 +8120,7 @@ dependencies = [ "prost-build", "prost-reflect", "prost-types", - "rand 0.8.5", + "rand 0.9.2", "rdkafka", "regex", "reqwest 0.11.24", @@ -8187,7 +8186,7 @@ dependencies = [ "mz-postgres-client", "mz-repr", "postgres-protocol", - "rand 0.8.5", + "rand 0.9.2", "serde", "tokio", "tracing", @@ -8268,7 +8267,7 @@ dependencies = [ "prometheus", "prost", "prost-build", - "rand 0.8.5", + "rand 0.9.2", "serde", "timely", "tokio", @@ -13434,7 +13433,6 @@ dependencies = [ "quote", "rand 0.8.5", "rand 0.9.2", - "rand_chacha 0.3.1", "rand_chacha 0.9.0", "rand_core 0.6.4", "rdkafka-sys", diff --git a/src/adapter/Cargo.toml b/src/adapter/Cargo.toml index a1acc662b877e..87935f77d56cd 100644 --- a/src/adapter/Cargo.toml +++ b/src/adapter/Cargo.toml @@ -77,8 +77,7 @@ opentelemetry = { version = "0.24.0", features = ["trace"] } prometheus = { version = "0.14.0", default-features = false } prost = { version = "0.13.5", features = ["no-recursion-limit"] } qcell = "0.5" -rand = "0.8.5" -rand_chacha = "0.3" +rand = "0.9.2" semver = "1.0.27" serde = "1.0.219" serde_json = "1.0.145" diff --git a/src/adapter/src/coord/message_handler.rs b/src/adapter/src/coord/message_handler.rs index 804e3c762e717..b65dcc1f969a1 100644 --- a/src/adapter/src/coord/message_handler.rs +++ b/src/adapter/src/coord/message_handler.rs @@ -314,7 +314,7 @@ impl Coordinator { EpochMillis::try_from(self.storage_usage_collection_interval.as_millis()) .expect("storage usage collection interval must fit into u64"); let offset = - rngs::SmallRng::from_seed(seed).gen_range(0..storage_usage_collection_interval_ms); + rngs::SmallRng::from_seed(seed).random_range(0..storage_usage_collection_interval_ms); let now_ts: EpochMillis = self.peek_local_write_ts().await.into(); // 2) Determine the amount of ms between now and the next collection time. diff --git a/src/adapter/src/coord/statement_logging.rs b/src/adapter/src/coord/statement_logging.rs index 7385a57b2d77e..aa2d79dca1d2a 100644 --- a/src/adapter/src/coord/statement_logging.rs +++ b/src/adapter/src/coord/statement_logging.rs @@ -26,7 +26,8 @@ use mz_sql_parser::ast::{StatementKind, statement_kind_label_value}; use mz_storage_client::controller::IntrospectionType; use qcell::QCell; use rand::SeedableRng; -use rand::{distributions::Bernoulli, prelude::Distribution, thread_rng}; +use rand::distr::{Bernoulli, Distribution}; +use rand::rngs::SmallRng; use sha2::{Digest, Sha256}; use tokio::time::MissedTickBehavior; use tracing::debug; @@ -138,9 +139,9 @@ pub(crate) struct StatementLogging { unlogged_sessions: BTreeMap, /// A reproducible RNG for deciding whether to sample statement executions. - /// Only used by tests; otherwise, `rand::thread_rng()` is used. + /// Only used by tests; otherwise, `rand::rng()` is used. /// Controlled by the system var `statement_logging_use_reproducible_rng`. - reproducible_rng: rand_chacha::ChaCha8Rng, + reproducible_rng: SmallRng, pending_statement_execution_events: Vec<(Row, Diff)>, pending_prepared_statement_events: Vec, @@ -165,7 +166,7 @@ impl StatementLogging { Self { executions_begun: BTreeMap::new(), unlogged_sessions: BTreeMap::new(), - reproducible_rng: rand_chacha::ChaCha8Rng::seed_from_u64(42), + reproducible_rng: SmallRng::seed_from_u64(42), pending_statement_execution_events: Vec::new(), pending_prepared_statement_events: Vec::new(), pending_session_events: Vec::new(), @@ -697,7 +698,7 @@ impl Coordinator { { distribution.sample(&mut self.statement_logging.reproducible_rng) } else { - distribution.sample(&mut thread_rng()) + distribution.sample(&mut rand::rng()) }; // Track how many statements we're recording. diff --git a/src/adapter/src/session.rs b/src/adapter/src/session.rs index 50a723a6151e2..4567381d3a7d6 100644 --- a/src/adapter/src/session.rs +++ b/src/adapter/src/session.rs @@ -47,7 +47,6 @@ use mz_sql_parser::ast::TransactionIsolationLevel; use mz_storage_client::client::TableData; use mz_storage_types::sources::Timeline; use qcell::{QCell, QCellOwner}; -use rand::Rng; use tokio::sync::mpsc::{self, UnboundedReceiver, UnboundedSender}; use tokio::sync::watch; use uuid::Uuid; @@ -346,7 +345,7 @@ impl Session { notices_tx, notices_rx, next_transaction_id: 0, - secret_key: rand::thread_rng().r#gen(), + secret_key: rand::random(), external_metadata_rx, qcell_owner: QCellOwner::new(), session_oracles: BTreeMap::new(), diff --git a/src/avro/Cargo.toml b/src/avro/Cargo.toml index 4d76f378d8987..71084cf9b6008 100644 --- a/src/avro/Cargo.toml +++ b/src/avro/Cargo.toml @@ -24,7 +24,7 @@ enum-kinds = "0.5.1" flate2 = "1.1.5" itertools = "0.14.0" mz-ore = { path = "../ore", features = ["test"] } -rand = "0.8.5" +rand = "0.9.2" regex = "1.12.2" serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.145" diff --git a/src/catalog/Cargo.toml b/src/catalog/Cargo.toml index 4fbf8d421cedd..dda5da06bfc86 100644 --- a/src/catalog/Cargo.toml +++ b/src/catalog/Cargo.toml @@ -56,7 +56,7 @@ prometheus = { version = "0.14.0", default-features = false } proptest = { version = "1.7.0", default-features = false, features = ["std"] } proptest-derive = { version = "0.5.1", features = ["boxed_union"] } prost = "0.13.5" -rand = "0.8.5" +rand = "0.9.2" semver = { version = "1.0.27" } serde = "1.0.219" serde_json = "1.0.145" diff --git a/src/cloud-resources/Cargo.toml b/src/cloud-resources/Cargo.toml index cd7751a4e6617..87e633ba030e6 100644 --- a/src/cloud-resources/Cargo.toml +++ b/src/cloud-resources/Cargo.toml @@ -17,7 +17,7 @@ k8s-openapi = { version = "0.26.0", features = ["schemars", "v1_31"] } kube = { version = "2.0.1", default-features = false, features = ["client", "derive", "openssl-tls", "ws", "runtime"] } mz-ore = { path = "../ore", default-features = false, features = ["async"] } mz-server-core = { path = "../server-core", default-features = false } -rand = "0.8.5" +rand = "0.9.2" schemars = { version = "1.0.4", features = ["uuid1"] } semver = "1.0.27" serde = "1.0.219" diff --git a/src/cloud-resources/src/crd/materialize.rs b/src/cloud-resources/src/crd/materialize.rs index 21cf27b7cb82f..a0793695d6227 100644 --- a/src/cloud-resources/src/crd/materialize.rs +++ b/src/cloud-resources/src/crd/materialize.rs @@ -18,7 +18,7 @@ use k8s_openapi::{ }; use kube::{CustomResource, Resource, ResourceExt, api::ObjectMeta}; use rand::Rng; -use rand::distributions::Uniform; +use rand::distr::Uniform; use schemars::JsonSchema; use semver::Version; use serde::{Deserialize, Serialize}; @@ -491,8 +491,8 @@ pub mod v1alpha1 { // built-in Alphanumeric distribution from rand, which // includes both upper and lowercase letters. const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789"; - status.resource_id = rand::thread_rng() - .sample_iter(Uniform::new(0, CHARSET.len())) + status.resource_id = rand::rng() + .sample_iter(Uniform::new(0, CHARSET.len()).expect("valid range")) .take(10) .map(|i| char::from(CHARSET[i])) .collect(); diff --git a/src/cluster/Cargo.toml b/src/cluster/Cargo.toml index 54b27db077acc..c30e16665a91e 100644 --- a/src/cluster/Cargo.toml +++ b/src/cluster/Cargo.toml @@ -19,7 +19,7 @@ lgalloc = "0.6.0" mz-cluster-client = { path = "../cluster-client" } mz-ore = { path = "../ore", features = ["async", "process", "tracing"] } mz-service = { path = "../service" } -rand = "0.8.5" +rand = "0.9.2" regex = "1.12.2" timely = "0.25.1" tokio = { version = "1.44.1", features = ["fs", "rt", "sync", "net"] } diff --git a/src/cluster/src/communication.rs b/src/cluster/src/communication.rs index ad86a3067c3d0..304106324cd11 100644 --- a/src/cluster/src/communication.rs +++ b/src/cluster/src/communication.rs @@ -537,7 +537,7 @@ mod turmoil_tests { let mut sim = turmoil::Builder::new() .enable_random_order() - .rng_seed(rng.r#gen()) + .rng_seed(rng.random()) .build(); let processes: Vec<_> = (0..NUM_PROCESSES).map(|i| format!("process-{i}")).collect(); @@ -618,12 +618,12 @@ mod turmoil_tests { // Let random processes crash at random times. for _ in 0..NUM_CRASHES { - let steps = rng.gen_range(1..100); + let steps = rng.random_range(1..100); for _ in 0..steps { sim.step().unwrap(); } - let i = rng.gen_range(0..NUM_PROCESSES); + let i = rng.random_range(0..NUM_PROCESSES); info!("bouncing process {i}"); sim.bounce(format!("process-{i}")); } diff --git a/src/environmentd/Cargo.toml b/src/environmentd/Cargo.toml index ba90f3f3f0563..b28b6e5d94519 100644 --- a/src/environmentd/Cargo.toml +++ b/src/environmentd/Cargo.toml @@ -94,7 +94,7 @@ rdkafka-sys = { version = "4.3.0", features = [ "libz-static", "zstd", ] } -rand = "0.8.5" +rand = "0.9.2" regex = { version = "1.12.2", optional = true } reqwest = { version = "0.11.13", features = ["json"] } rlimit = "0.10.2" diff --git a/src/environmentd/tests/server.rs b/src/environmentd/tests/server.rs index 7a5efafc41c1e..6f56b3c7f4bc4 100644 --- a/src/environmentd/tests/server.rs +++ b/src/environmentd/tests/server.rs @@ -2970,7 +2970,7 @@ fn test_invalid_webhook_body() { // No matter what is in the body, we should always succeed. let mut data = [0u8; 128]; - rand::thread_rng().fill_bytes(&mut data); + rand::rng().fill_bytes(&mut data); println!("Random bytes: {data:?}"); let resp = http_client .post(webhook_url) diff --git a/src/expr/Cargo.toml b/src/expr/Cargo.toml index 79841b5743ce2..ec0b92172f0f7 100644 --- a/src/expr/Cargo.toml +++ b/src/expr/Cargo.toml @@ -77,7 +77,7 @@ insta = "1.43" mz-expr-derive-impl = { path = "../expr-derive-impl", features = ["test"] } mz-expr-test-util = { path = "../expr-test-util" } mz-ore = { path = "../ore" } -rand = "0.8.5" +rand = "0.9.2" [build-dependencies] mz-build-tools = { path = "../build-tools", default-features = false, features = ["protobuf-src"] } diff --git a/src/expr/benches/window_functions.rs b/src/expr/benches/window_functions.rs index 4a8199dd5b47d..97b1f695ae4c9 100644 --- a/src/expr/benches/window_functions.rs +++ b/src/expr/benches/window_functions.rs @@ -14,7 +14,7 @@ use criterion::{Criterion, criterion_group, criterion_main}; use mz_expr::ColumnOrder; use mz_repr::adt::timestamp::CheckedTimestamp; use mz_repr::{Datum, RowArena}; -use rand::distributions::{Distribution, Uniform}; +use rand::distr::{Distribution, Uniform}; /// Microbenchmark to test an important part of window function evaluation. /// @@ -53,7 +53,7 @@ fn order_aggregate_datums_benchmark(c: &mut Criterion) { let scale = 1000000; group.bench_function("order_aggregate_datums", |b| { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let temp_storage = RowArena::new(); let order_by = vec![ColumnOrder { @@ -70,7 +70,7 @@ fn order_aggregate_datums_benchmark(c: &mut Criterion) { // ), // , // ) - let distr = Uniform::new(0, 1000000000); + let distr = Uniform::new(0, 1000000000).expect("valid range"); let mut datums = Vec::with_capacity(scale); for _i in 0..scale { datums.push(temp_storage.make_datum(|packer| { diff --git a/src/kafka-util/Cargo.toml b/src/kafka-util/Cargo.toml index 03d15328c191c..760218067ce4b 100644 --- a/src/kafka-util/Cargo.toml +++ b/src/kafka-util/Cargo.toml @@ -28,7 +28,7 @@ mz-ore = { path = "../ore", features = ["cli", "network", "async"] } mz-ssh-util = { path = "../ssh-util" } num_cpus = "1.17.0" prost = { version = "0.13.5", features = ["no-recursion-limit"] } -rand = "0.8.5" +rand = "0.9.2" rdkafka = { version = "0.29.0", features = ["cmake-build", "ssl-vendored", "libz-static", "zstd"] } serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.145" diff --git a/src/kafka-util/src/bin/kgen.rs b/src/kafka-util/src/bin/kgen.rs index 9dafe7a5e3ff3..d1733de3f8ad5 100644 --- a/src/kafka-util/src/bin/kgen.rs +++ b/src/kafka-util/src/bin/kgen.rs @@ -23,10 +23,10 @@ use mz_kafka_util::client::MzClientContext; use mz_ore::cast::CastFrom; use mz_ore::cli::{self, CliConfig}; use mz_ore::retry::Retry; -use rand::distributions::uniform::SampleUniform; -use rand::distributions::{Alphanumeric, Bernoulli, Uniform, WeightedIndex}; +use rand::distr::uniform::SampleUniform; +use rand::distr::weighted::WeightedIndex; +use rand::distr::{Alphanumeric, Bernoulli, Uniform}; use rand::prelude::{Distribution, ThreadRng}; -use rand::thread_rng; use rdkafka::error::KafkaError; use rdkafka::producer::{BaseRecord, Producer, ThreadedProducer}; use rdkafka::types::RDKafkaErrorCode; @@ -251,7 +251,7 @@ impl<'a> RandomAvroGenerator<'a> { x[0].as_i64().unwrap().try_into().unwrap(), x[1].as_i64().unwrap().try_into().unwrap(), ); - let dist = Uniform::new_inclusive(min, max); + let dist = Uniform::new_inclusive(min, max).expect("valid range"); move |rng| dist.sample(rng) } fn float_dist( @@ -261,7 +261,7 @@ impl<'a> RandomAvroGenerator<'a> { // TODO(benesch): rewrite to avoid `as`. #[allow(clippy::as_conversions)] let (min, max) = (x[0].as_f64().unwrap() as f32, x[1].as_f64().unwrap() as f32); - let dist = Uniform::new_inclusive(min, max); + let dist = Uniform::new_inclusive(min, max).expect("valid range"); move |rng| dist.sample(rng) } fn double_dist( @@ -269,7 +269,7 @@ impl<'a> RandomAvroGenerator<'a> { ) -> impl FnMut(&mut ThreadRng) -> f64 + Clone + use<> { let x = json.as_array().unwrap(); let (min, max) = (x[0].as_f64().unwrap(), x[1].as_f64().unwrap()); - let dist = Uniform::new_inclusive(min, max); + let dist = Uniform::new_inclusive(min, max).expect("valid range"); move |rng| dist.sample(rng) } fn string_dist( @@ -288,7 +288,7 @@ impl<'a> RandomAvroGenerator<'a> { let mut len = integral_dist::(json); move |rng| { let len = len(rng); - let bd = Uniform::new_inclusive(0, 255); + let bd = Uniform::new_inclusive(0, 255).expect("valid range"); iter::repeat_with(|| bd.sample(rng)).take(len).collect() } } @@ -314,7 +314,7 @@ impl<'a> RandomAvroGenerator<'a> { min, precision ); - let dist = Uniform::::new_inclusive(min, max); + let dist = Uniform::::new_inclusive(min, max).expect("valid range"); move |rng| dist.sample(rng).to_be_bytes().to_vec() } // TODO(benesch): rewrite to avoid `as`. @@ -614,8 +614,9 @@ async fn main() -> anyhow::Result<()> { bail!("cannot specify --avro-distribution without --values=avro"); } let len = - Uniform::new_inclusive(args.min_value_size.unwrap(), args.max_value_size.unwrap()); - let bytes = Uniform::new_inclusive(0, 255); + Uniform::new_inclusive(args.min_value_size.unwrap(), args.max_value_size.unwrap()) + .expect("valid range"); + let bytes = Uniform::new_inclusive(0, 255).expect("valid range"); ValueGenerator::UniformBytes { len, bytes } } @@ -689,10 +690,10 @@ async fn main() -> anyhow::Result<()> { } }; let key_dist = if let KeyFormat::Random = args.key_format { - Some(Uniform::new_inclusive( - args.key_min.unwrap(), - args.key_max.unwrap(), - )) + Some( + Uniform::new_inclusive(args.key_min.unwrap(), args.key_max.unwrap()) + .expect("valid range"), + ) } else { None }; @@ -723,7 +724,7 @@ async fn main() -> anyhow::Result<()> { n += 1; } scope.spawn(move |_| { - let mut rng = thread_rng(); + let mut rng = rand::rng(); for _ in 0..n { let i = counter.fetch_add(1, Ordering::Relaxed); if !args.quiet && i % 100_000 == 0 { diff --git a/src/orchestratord/Cargo.toml b/src/orchestratord/Cargo.toml index f981d3249dd5c..860cf52601f0e 100644 --- a/src/orchestratord/Cargo.toml +++ b/src/orchestratord/Cargo.toml @@ -33,7 +33,7 @@ mz-orchestrator-tracing = { path = "../orchestrator-tracing", default-features = mz-prof-http = { path = "../prof-http", default-features = false } mz-server-core = { path = "../server-core", default-features = false } prometheus = { version = "0.14.0", default-features = false } -rand = "0.8.5" +rand = "0.9.2" reqwest = { version = "0.12", features = ["cookies", "json"] } semver = "1.0.27" serde = { version = "1.0.219", features = ["derive"] } diff --git a/src/orchestratord/src/controller/materialize/environmentd.rs b/src/orchestratord/src/controller/materialize/environmentd.rs index 90681e3b06ddb..35c26d91f2774 100644 --- a/src/orchestratord/src/controller/materialize/environmentd.rs +++ b/src/orchestratord/src/controller/materialize/environmentd.rs @@ -38,7 +38,6 @@ use mz_server_core::listeners::{ AllowedRoles, AuthenticatorKind, BaseListenerConfig, HttpListenerConfig, HttpRoutesEnabled, ListenersConfig, SqlListenerConfig, }; -use rand::{Rng, thread_rng}; use reqwest::{Client as HttpClient, StatusCode}; use semver::{BuildMetadata, Prerelease, Version}; use serde::{Deserialize, Serialize}; @@ -223,7 +222,7 @@ impl Resources { trace!("creating new environmentd statefulset"); apply_resource(&statefulset_api, &*self.environmentd_statefulset).await?; - let retry_action = Action::requeue(Duration::from_secs(thread_rng().gen_range(5..10))); + let retry_action = Action::requeue(Duration::from_secs(rand::random_range(5..10))); let statefulset = get_resource( &statefulset_api, @@ -366,7 +365,7 @@ impl Resources { namespace: &str, ) -> Result, Error> { let service_api: Api = Api::namespaced(client.clone(), namespace); - let retry_action = Action::requeue(Duration::from_secs(thread_rng().gen_range(5..10))); + let retry_action = Action::requeue(Duration::from_secs(rand::random_range(5..10))); let promote_url = reqwest::Url::parse(&format!( "{}/api/leader/promote", diff --git a/src/ore/Cargo.toml b/src/ore/Cargo.toml index d01d0a31f1a83..32d470a0c25b7 100644 --- a/src/ore/Cargo.toml +++ b/src/ore/Cargo.toml @@ -46,7 +46,7 @@ pin-project = "1.1.10" prometheus = { version = "0.14.0", default-features = false, optional = true } proptest = { version = "1.7.0", default-features = false, features = ["std"], optional = true } proptest-derive = { version = "0.5.1", optional = true } -rand = { version = "0.8.5", optional = true } +rand = { version = "0.9.2", optional = true } smallvec = { version = "1.15.1", optional = true } stacker = { version = "0.1.22", optional = true } sentry = { version = "0.38.1", optional = true, default-features = false, features = ["backtrace", "contexts", "debug-images", "transport"] } diff --git a/src/ore/src/id_gen.rs b/src/ore/src/id_gen.rs index 5d8e22c429adc..cd0651e11546e 100644 --- a/src/ore/src/id_gen.rs +++ b/src/ore/src/id_gen.rs @@ -146,7 +146,7 @@ impl IdAllocatorInner for IdAllocatorInnerBitSet { let total = usize::cast_from(max - min); assert!(total < BitSet::BITS_PER_USIZE.pow(4)); IdAllocatorInnerBitSet { - next: StdRng::from_entropy(), + next: StdRng::from_os_rng(), min, max, mask, @@ -156,7 +156,7 @@ impl IdAllocatorInner for IdAllocatorInnerBitSet { fn alloc(&mut self) -> Option { let range = self.min..=self.max; - let init = self.next.gen_range(range); + let init = self.next.random_range(range); let mut next = init; loop { // Because hibitset has a hard maximum of 64**4 (~16 million), subtract the min in case diff --git a/src/persist/Cargo.toml b/src/persist/Cargo.toml index 4ef3b356ec08d..a62e0c28b7609 100644 --- a/src/persist/Cargo.toml +++ b/src/persist/Cargo.toml @@ -57,7 +57,7 @@ prometheus = { version = "0.14.0", default-features = false } proptest = { version = "1.7.0", default-features = false, features = ["std"] } proptest-derive = { version = "0.5.1", features = ["boxed_union"] } prost = { version = "0.13.5", features = ["no-recursion-limit"] } -rand = { version = "0.8.5", features = ["small_rng"] } +rand = { version = "0.9.2", features = ["small_rng"] } reqwest = { version = "0.12", features = ["blocking", "json", "default-tls", "charset", "http2"], default-features = false } serde = { version = "1.0.219", features = ["derive"] } serde_json = { version = "1.0.145", optional = true } diff --git a/src/persist/src/retry.rs b/src/persist/src/retry.rs index abee1953af6d3..017e521efc059 100644 --- a/src/persist/src/retry.rs +++ b/src/persist/src/retry.rs @@ -94,7 +94,7 @@ impl RetryStream { /// accidental mis-use. pub async fn sleep(mut self) -> Self { // Should the jitter be configurable? - let jitter = self.rng.gen_range(0.9..=1.1); + let jitter = self.rng.random_range(0.9..=1.1); let sleep = self.next_sleep().mul_f64(jitter); tokio::time::sleep(sleep).await; self.advance() diff --git a/src/persist/src/unreliable.rs b/src/persist/src/unreliable.rs index efa32fa0cceee..07f83002b37d9 100644 --- a/src/persist/src/unreliable.rs +++ b/src/persist/src/unreliable.rs @@ -90,13 +90,13 @@ impl UnreliableHandle { fn should_happen(&self) -> bool { let mut core = self.core.lock().expect("mutex poisoned"); let should_happen = core.should_happen; - core.rng.gen_bool(should_happen) + core.rng.random_bool(should_happen) } fn should_timeout(&self) -> bool { let mut core = self.core.lock().expect("mutex poisoned"); let should_timeout = core.should_timeout; - core.rng.gen_bool(should_timeout) + core.rng.random_bool(should_timeout) } async fn run_op(&self, name: &str, work_fn: WorkFn) -> Result diff --git a/src/repr/Cargo.toml b/src/repr/Cargo.toml index f406f3b31d62b..ceaa5c2a120fc 100644 --- a/src/repr/Cargo.toml +++ b/src/repr/Cargo.toml @@ -87,7 +87,7 @@ bincode = "1.3.3" criterion = { version = "0.6.0" } insta = { version = "1.43.2", features = ["json"] } mz-persist = { path = "../persist" } -rand = "0.8.5" +rand = "0.9.2" [build-dependencies] mz-build-tools = { path = "../build-tools", default-features = false, features = ["protobuf-src"] } diff --git a/src/repr/benches/row.rs b/src/repr/benches/row.rs index 500fe9efe2d11..e066797e6e1d0 100644 --- a/src/repr/benches/row.rs +++ b/src/repr/benches/row.rs @@ -22,7 +22,7 @@ use mz_persist_types::part::{Part, PartBuilder}; use mz_repr::adt::date::Date; use mz_repr::adt::numeric::Numeric; use mz_repr::{Datum, ProtoRow, RelationDesc, Row, SqlColumnType, SqlScalarType}; -use rand::distributions::{Alphanumeric, DistString}; +use rand::distr::{Alphanumeric, Distribution, SampleString, StandardUniform}; use rand::rngs::StdRng; use rand::{Rng, SeedableRng}; @@ -116,24 +116,24 @@ pub fn bench_sort(c: &mut Criterion) { let int_rows = (0..num_rows) .map(|_| { vec![ - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), ] }) .collect::>(); let numeric_rows = (0..num_rows) .map(|_| { vec![ - Datum::Numeric(rng.r#gen::().into()), - Datum::Numeric(rng.r#gen::().into()), - Datum::Numeric(rng.r#gen::().into()), - Datum::Numeric(rng.r#gen::().into()), - Datum::Numeric(rng.r#gen::().into()), - Datum::Numeric(rng.r#gen::().into()), + Datum::Numeric(rng.random::().into()), + Datum::Numeric(rng.random::().into()), + Datum::Numeric(rng.random::().into()), + Datum::Numeric(rng.random::().into()), + Datum::Numeric(rng.random::().into()), + Datum::Numeric(rng.random::().into()), ] }) .collect::>(); @@ -141,7 +141,7 @@ pub fn bench_sort(c: &mut Criterion) { let mut rng = seeded_rng(); let byte_data = (0..num_rows) .map(|_| { - let i: i32 = rng.r#gen(); + let i: i32 = rng.random(); format!("{} and then {} and then {}", i, i + 1, i + 2).into_bytes() }) .collect::>(); @@ -198,12 +198,12 @@ pub fn bench_pack(c: &mut Criterion) { let int_rows = (0..num_rows) .map(|_| { vec![ - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), ] }) .collect::>(); @@ -211,7 +211,7 @@ pub fn bench_pack(c: &mut Criterion) { let mut rng = seeded_rng(); let byte_data = (0..num_rows) .map(|_| { - let i: i32 = rng.r#gen(); + let i: i32 = rng.random(); format!("{} and then {} and then {}", i, i + 1, i + 2).into_bytes() }) .collect::>(); @@ -229,15 +229,15 @@ fn bench_filter(c: &mut Criterion) { let num_rows = 10_000; let mut rng = seeded_rng(); let mut random_date = - || Date::from_pg_epoch(rng.gen_range(Date::LOW_DAYS..=Date::HIGH_DAYS)).unwrap(); + || Date::from_pg_epoch(rng.random_range(Date::LOW_DAYS..=Date::HIGH_DAYS)).unwrap(); let mut rng = seeded_rng(); let date_rows = (0..num_rows) .map(|_| { vec![ Datum::Date(random_date()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), - Datum::Int32(rng.r#gen()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), + Datum::Int32(rng.random()), ] }) .collect::>(); @@ -280,19 +280,30 @@ fn encode_structured2(schema: &RelationDesc, rows: &[Row]) -> Part { builder.finish() } +fn random_option(rng: &mut StdRng) -> Option +where + StandardUniform: Distribution, +{ + if rng.random::() { + Some(rng.random()) + } else { + None + } +} + fn bench_roundtrip(c: &mut Criterion) { let num_rows = 50_000; let mut rng = seeded_rng(); let rows = (0..num_rows) .map(|_| { - let str_len = rng.gen_range(0..10); + let str_len = rng.random_range(0..10); Row::pack(vec![ - Datum::from(rng.r#gen::()), - Datum::from(rng.r#gen::>()), + Datum::from(rng.random::()), + Datum::from(random_option::(&mut rng)), Datum::from(Alphanumeric.sample_string(&mut rng, str_len).as_str()), Datum::from( Some(Alphanumeric.sample_string(&mut rng, str_len).as_str()) - .filter(|_| rng.r#gen::()), + .filter(|_| rng.random::()), ), ]) }) diff --git a/src/repr/benches/strconv.rs b/src/repr/benches/strconv.rs index 1e583c7ebf192..d98ef6c649a04 100644 --- a/src/repr/benches/strconv.rs +++ b/src/repr/benches/strconv.rs @@ -12,7 +12,7 @@ use std::hint::black_box; use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main}; use mz_repr::strconv; use rand::rngs::StdRng; -use rand::seq::SliceRandom; +use rand::seq::IndexedRandom; use rand::{Rng, SeedableRng}; fn bench_parse_float32(c: &mut Criterion) { @@ -40,7 +40,7 @@ fn bench_parse_jsonb(c: &mut Criterion) { fn bench_format_list_simple(c: &mut Criterion) { let mut rng = StdRng::from_seed([0; 32]); - let list: Vec = (0..(1 << 12)).map(|_| rng.r#gen()).collect(); + let list: Vec = (0..(1 << 12)).map(|_| rng.random()).collect(); c.bench_function("format_list simple", |b| { b.iter(|| { let mut buf = String::new(); @@ -62,9 +62,9 @@ fn bench_format_list_nested(c: &mut Criterion) { ]; let list: Vec>> = (0..8) .map(|_| { - (0..rng.gen_range(0..16)) + (0..rng.random_range(0..16)) .map(|_| { - (1..rng.gen_range(0..16)) + (1..rng.random_range(0..16)) .map(|_| STRINGS.choose(&mut rng).unwrap()) .map(|s| (*s).to_owned()) .collect() diff --git a/src/service/Cargo.toml b/src/service/Cargo.toml index 1c3700294d71f..5782d15442925 100644 --- a/src/service/Cargo.toml +++ b/src/service/Cargo.toml @@ -38,7 +38,7 @@ workspace-hack = { version = "0.0.0", path = "../workspace-hack", optional = tru [dev-dependencies] mz-ore = { path = "../ore", features = ["turmoil"] } -rand = "0.8.5" +rand = "0.9.2" tracing-subscriber = "0.3.19" turmoil = "0.7.0" diff --git a/src/ssh-util/Cargo.toml b/src/ssh-util/Cargo.toml index 2b36da6bf22cb..a95e2fe1301a0 100644 --- a/src/ssh-util/Cargo.toml +++ b/src/ssh-util/Cargo.toml @@ -15,7 +15,7 @@ mz-ore = { path = "../ore", features = ["test"] } openssh = { version = "0.11.5", default-features = false, features = ["native-mux"] } openssh-mux-client = "0.17.9" openssl = { version = "0.10.75", features = ["vendored"] } -rand = "0.8.5" +rand = "0.9.2" futures = "0.3.31" itertools = "0.14.0" scopeguard = "1.2.0" diff --git a/src/ssh-util/src/tunnel.rs b/src/ssh-util/src/tunnel.rs index ea9f237d36b3c..9eef325a0a05f 100644 --- a/src/ssh-util/src/tunnel.rs +++ b/src/ssh-util/src/tunnel.rs @@ -292,8 +292,8 @@ async fn port_forward(session: &Session, host: &str, port: u16) -> Result SourceData { data } +fn random_option(rng: &mut StdRng) -> Option +where + StandardUniform: Distribution, +{ + if rng.random::() { + Some(rng.random()) + } else { + None + } +} + fn bench_roundtrip(c: &mut Criterion, name: &str, schema: &RelationDesc, data: &[SourceData]) { c.bench_function(&format!("roundtrip_{}_encode_legacy", name), |b| { b.iter(|| std::hint::black_box(encode_legacy(data))); @@ -74,8 +85,8 @@ fn benches_roundtrip(c: &mut Criterion) { let data = (0..num_rows) .map(|_| { let row = Row::pack(vec![ - Datum::from(rng.r#gen::()), - Datum::from(rng.r#gen::>()), + Datum::from(rng.random::()), + Datum::from(random_option::(&mut rng)), ]); SourceData(Ok(row)) }) @@ -102,12 +113,12 @@ fn benches_roundtrip(c: &mut Criterion) { ]); let data = (0..num_rows) .map(|_| { - let str_len = rng.gen_range(0..10); + let str_len = rng.random_range(0..10); let row = Row::pack(vec![ Datum::from(Alphanumeric.sample_string(&mut rng, str_len).as_bytes()), Datum::from( Some(Alphanumeric.sample_string(&mut rng, str_len).as_bytes()) - .filter(|_| rng.r#gen::()), + .filter(|_| rng.random::()), ), ]); SourceData(Ok(row)) @@ -135,12 +146,12 @@ fn benches_roundtrip(c: &mut Criterion) { ]); let data = (0..num_rows) .map(|_| { - let str_len = rng.gen_range(0..10); + let str_len = rng.random_range(0..10); let row = Row::pack(vec![ Datum::from(Alphanumeric.sample_string(&mut rng, str_len).as_str()), Datum::from( Some(Alphanumeric.sample_string(&mut rng, str_len).as_str()) - .filter(|_| rng.r#gen::()), + .filter(|_| rng.random::()), ), ]); SourceData(Ok(row)) diff --git a/src/storage/Cargo.toml b/src/storage/Cargo.toml index ada746d2a3e1b..f1cb469f98dfe 100644 --- a/src/storage/Cargo.toml +++ b/src/storage/Cargo.toml @@ -64,7 +64,7 @@ pin-project = "1.1.10" postgres-replication = { version = "0.6.7" } prometheus = { version = "0.14.0", default-features = false } prost = { version = "0.13.5", features = ["no-recursion-limit"] } -rand = "0.8.5" +rand = "0.9.2" rdkafka = { version = "0.29.0", features = [ "cmake-build", "ssl-vendored", diff --git a/src/storage/src/source/generator/auction.rs b/src/storage/src/source/generator/auction.rs index c99bb0b586cf6..9a723b130ce64 100644 --- a/src/storage/src/source/generator/auction.rs +++ b/src/storage/src/source/generator/auction.rs @@ -18,7 +18,7 @@ use mz_storage_types::sources::load_generator::{ }; use rand::SeedableRng; use rand::prelude::{Rng, SmallRng}; -use rand::seq::SliceRandom; +use rand::seq::IndexedRandom; /// CREATE TABLE organizations /// ( @@ -125,7 +125,7 @@ impl Generator for Auction { packer.push(Datum::Int64(counter)); // auction id let max_seller_id = i64::try_from(CELEBRETIES.len()) .expect("demo entries less than i64::MAX"); - packer.push(Datum::Int64(rng.gen_range(1..=max_seller_id))); // seller + packer.push(Datum::Int64(rng.random_range(1..=max_seller_id))); // seller packer.push(Datum::String(AUCTIONS.choose(&mut rng).unwrap())); // item packer.push(Datum::TimestampTz( (now + chrono::Duration::try_seconds(10).unwrap()) @@ -134,15 +134,15 @@ impl Generator for Auction { )); // end time pending.push_back((AuctionView::Auctions, auction)); const MAX_BIDS: i64 = 10; - for i in 0..rng.gen_range(2..MAX_BIDS) { + for i in 0..rng.random_range(2..MAX_BIDS) { let bid_id = Datum::Int64(counter * MAX_BIDS + i); let bid = { let mut bid = Row::with_capacity(5); let mut packer = bid.packer(); packer.push(bid_id); - packer.push(Datum::Int64(rng.gen_range(1..=max_seller_id))); // buyer + packer.push(Datum::Int64(rng.random_range(1..=max_seller_id))); // buyer packer.push(Datum::Int64(counter)); // auction id - packer.push(Datum::Int32(rng.gen_range(1..100))); // amount + packer.push(Datum::Int32(rng.random_range(1..100))); // amount packer.push(Datum::TimestampTz( (now + chrono::Duration::try_seconds(i) .expect("time must fit")) diff --git a/src/storage/src/source/generator/marketing.rs b/src/storage/src/source/generator/marketing.rs index a62c4d7585320..984c43276ea06 100644 --- a/src/storage/src/source/generator/marketing.rs +++ b/src/storage/src/source/generator/marketing.rs @@ -18,7 +18,9 @@ use mz_storage_types::sources::MzOffset; use mz_storage_types::sources::load_generator::{ Event, Generator, LoadGeneratorOutput, MarketingView, }; -use rand::{Rng, SeedableRng, distributions::Standard, rngs::SmallRng}; +use rand::distr::StandardUniform; +use rand::rngs::SmallRng; +use rand::{Rng, SeedableRng}; const CONTROL: &str = "control"; const EXPERIMENT: &str = "experiment"; @@ -49,7 +51,7 @@ impl Generator for Marketing { packer.push(Datum::Int64(id.try_into().unwrap())); packer.push(Datum::String(email)); - packer.push(Datum::Int64(rng.gen_range(5_000_000..10_000_000i64))); + packer.push(Datum::Int64(rng.random_range(5_000_000..10_000_000i64))); (MarketingView::Customers, customer, Diff::ONE) }) @@ -67,9 +69,9 @@ impl Generator for Marketing { packer.push(Datum::Int64(impression_id)); packer.push(Datum::Int64( - rng.gen_range(0..CUSTOMERS.len()).try_into().unwrap(), + rng.random_range(0..CUSTOMERS.len()).try_into().unwrap(), )); - packer.push(Datum::Int64(rng.gen_range(0..20i64))); + packer.push(Datum::Int64(rng.random_range(0..20i64))); let impression_time = now(); packer.push(Datum::TimestampTz( to_datetime(impression_time) @@ -81,11 +83,11 @@ impl Generator for Marketing { // 1 in 10 impressions have a click. Making us the // most successful marketing organization in the world. - if rng.gen_range(0..10) == 1 { + if rng.random_range(0..10) == 1 { let mut click = Row::with_capacity(2); let mut packer = click.packer(); - let click_time = impression_time + rng.gen_range(20000..40000); + let click_time = impression_time + rng.random_range(20000..40000); packer.push(Datum::Int64(impression_id)); packer.push(Datum::TimestampTz( @@ -101,13 +103,13 @@ impl Generator for Marketing { let mut updates = future_updates.retrieve(now()); pending.append(&mut updates); - for _ in 0..rng.gen_range(1..2) { + for _ in 0..rng.random_range(1..2) { let id = counter; counter += 1; let mut lead = Lead { id, - customer_id: rng.gen_range(0..CUSTOMERS.len()).try_into().unwrap(), + customer_id: rng.random_range(0..CUSTOMERS.len()).try_into().unwrap(), created_at: now(), converted_at: None, conversion_amount: None, @@ -117,7 +119,7 @@ impl Generator for Marketing { // a highly scientific statistical model // predicting the likelyhood of a conversion - let score = rng.sample::(Standard); + let score = rng.sample::(StandardUniform); let label = score > 0.5f64; let bucket = if lead.id % 10 <= 1 { @@ -141,7 +143,7 @@ impl Generator for Marketing { let mut sent_coupon = false; if !label && bucket == EXPERIMENT { sent_coupon = true; - let amount = rng.gen_range(500..5000); + let amount = rng.random_range(500..5000); let mut coupon = Row::with_capacity(4); let mut packer = coupon.packer(); @@ -161,13 +163,13 @@ impl Generator for Marketing { // Decide if a lead will convert. We assume our model is fairly // accurate and correlates with conversions. We also assume // that coupons make leads a little more liekly to convert. - let mut converted = rng.sample::(Standard) < score; + let mut converted = rng.sample::(StandardUniform) < score; if sent_coupon && !converted { - converted = rng.sample::(Standard) < score; + converted = rng.sample::(StandardUniform) < score; } if converted { - let converted_at = now() + rng.gen_range(1..30); + let converted_at = now() + rng.random_range(1..30); future_updates.insert( converted_at, @@ -175,7 +177,7 @@ impl Generator for Marketing { ); lead.converted_at = Some(converted_at); - lead.conversion_amount = Some(rng.gen_range(1000..25000)); + lead.conversion_amount = Some(rng.random_range(1000..25000)); future_updates.insert( converted_at, diff --git a/src/storage/src/source/generator/tpch.rs b/src/storage/src/source/generator/tpch.rs index 5936bca648711..c546bd0a90bd4 100644 --- a/src/storage/src/source/generator/tpch.rs +++ b/src/storage/src/source/generator/tpch.rs @@ -22,9 +22,9 @@ use mz_repr::adt::numeric::{self, DecimalLike, Numeric}; use mz_repr::{Datum, Diff, Row}; use mz_storage_types::sources::MzOffset; use mz_storage_types::sources::load_generator::{Event, Generator, LoadGeneratorOutput, TpchView}; -use rand::distributions::{Alphanumeric, DistString}; +use rand::distr::{Alphanumeric, SampleString}; use rand::rngs::StdRng; -use rand::seq::SliceRandom; +use rand::seq::IndexedRandom; use rand::{Rng, SeedableRng}; #[derive(Clone, Debug)] @@ -85,7 +85,7 @@ impl Generator for Tpch { let key_usize = usize::try_from(key).expect("key known to be non-negative"); let row = match output { TpchView::Supplier => { - let nation = rng.gen_range(0..count_nation); + let nation = rng.random_range(0..count_nation); row.packer().extend([ Datum::Int64(key), Datum::String(&pad_nine("Supplier", key)), @@ -104,11 +104,11 @@ impl Generator for Tpch { .cloned() .collect::>() .join(" "); - let m = rng.gen_range(1..=5); - let n = rng.gen_range(1..=5); + let m = rng.random_range(1..=5); + let n = rng.random_range(1..=5); for _ in 1..=4 { let suppkey = (key - + (rng.gen_range(0..=3) + + (rng.random_range(0..=3) * ((ctx.tpch.count_supplier / 4) + (key - 1) / ctx.tpch.count_supplier))) % ctx.tpch.count_supplier @@ -116,7 +116,7 @@ impl Generator for Tpch { row.packer().extend([ Datum::Int64(key), Datum::Int64(suppkey), - Datum::Int32(rng.gen_range(1..=9_999)), // availqty + Datum::Int32(rng.random_range(1..=9_999)), // availqty Datum::Numeric(decimal(&mut rng, &mut ctx.cx, 1_00, 1_000_00, 100)), // supplycost Datum::String(text_string( &mut rng, @@ -136,7 +136,7 @@ impl Generator for Tpch { Datum::String(&format!("Manufacturer#{m}")), Datum::String(&format!("Brand#{m}{n}")), Datum::String(&syllables(&mut rng, TYPES)), - Datum::Int32(rng.gen_range(1..=50)), // size + Datum::Int32(rng.random_range(1..=50)), // size Datum::String(&syllables(&mut rng, CONTAINERS)), Datum::Numeric(partkey_retailprice(key)), Datum::String(text_string(&mut rng, &ctx.text_string_source, 49, 198)), @@ -144,7 +144,7 @@ impl Generator for Tpch { row.clone() } TpchView::Customer => { - let nation = rng.gen_range(0..count_nation); + let nation = rng.random_range(0..count_nation); row.packer().extend([ Datum::Int64(key), Datum::String(&pad_nine("Customer", key)), @@ -158,7 +158,7 @@ impl Generator for Tpch { row.clone() } TpchView::Orders => { - let seed = rng.r#gen(); + let seed = rng.random(); let (order, lineitems) = ctx.order_row(seed, key); for row in lineitems { pending.push_back(( @@ -204,7 +204,7 @@ impl Generator for Tpch { if ctx.tpch.tick.is_zero() { return None; } - let idx = rng.gen_range(0..active_orders.len()); + let idx = rng.random_range(0..active_orders.len()); let (key, old_seed) = active_orders.swap_remove(idx); let (old_order, old_lineitems) = ctx.order_row(old_seed, key); // Fill pending with old lineitem retractions, new lineitem @@ -216,7 +216,7 @@ impl Generator for Tpch { Event::Message(MzOffset::from(offset), (row, Diff::MINUS_ONE)), )); } - let new_seed = rng.r#gen(); + let new_seed = rng.random(); let (new_order, new_lineitems) = ctx.order_row(new_seed, key); for row in new_lineitems { pending.push_back(( @@ -263,7 +263,7 @@ impl Context { let mut rng = StdRng::seed_from_u64(seed); let key = order_key(key); let custkey = loop { - let custkey = rng.gen_range(1..=self.tpch.count_customer); + let custkey = rng.random_range(1..=self.tpch.count_customer); if custkey % 3 != 0 { break custkey; } @@ -271,17 +271,17 @@ impl Context { let orderdate = date(&mut rng, &*START_DATE, 1..=*ORDER_END_DAYS); let mut totalprice = Numeric::lossy_from(0); let mut orderstatus = None; - let lineitem_count = rng.gen_range(1..=7); + let lineitem_count = rng.random_range(1..=7); let mut lineitems = Vec::with_capacity(lineitem_count); for linenumber in 1..=lineitem_count { - let partkey = rng.gen_range(1..=self.tpch.count_part); + let partkey = rng.random_range(1..=self.tpch.count_part); let suppkey = (partkey - + (rng.gen_range(0..=3) + + (rng.random_range(0..=3) * ((self.tpch.count_supplier / 4) + (partkey - 1) / self.tpch.count_supplier))) % self.tpch.count_supplier + 1; - let quantity = Numeric::from(rng.gen_range(1..=50)); + let quantity = Numeric::from(rng.random_range(1..=50)); let mut extendedprice = quantity; self.cx .mul(&mut extendedprice, &partkey_retailprice(partkey).0); @@ -337,7 +337,10 @@ impl Context { Datum::Numeric(OrderedDecimal(totalprice)), Datum::Date(orderdate), Datum::String(PRIORITIES.choose(&mut rng).unwrap()), - Datum::String(&pad_nine("Clerk", rng.gen_range(1..=self.tpch.count_clerk))), + Datum::String(&pad_nine( + "Clerk", + rng.random_range(1..=self.tpch.count_clerk), + )), Datum::Int32(0), // shippriority Datum::String(text_string(&mut rng, &self.text_string_source, 19, 78)), ]); @@ -369,13 +372,13 @@ fn text_string<'a, R: Rng + ?Sized>( min: usize, max: usize, ) -> &'a str { - let start = rng.gen_range(0..=(source.len() - max)); - let len = rng.gen_range(min..=max); + let start = rng.random_range(0..=(source.len() - max)); + let len = rng.random_range(min..=max); &source[start..(start + len)] } fn date(rng: &mut R, start: &Date, days: RangeInclusive) -> Date { - let days = rng.gen_range(days); + let days = rng.random_range(days); start.checked_add(days).expect("must fit") } @@ -411,7 +414,7 @@ fn decimal( max: i64, div: i64, ) -> OrderedDecimal { - let n = rng.gen_range(min..=max); + let n = rng.random_range(min..=max); let mut n = Numeric::lossy_from(n); cx.div(&mut n, &Numeric::lossy_from(div)); OrderedDecimal(n) @@ -421,11 +424,11 @@ fn phone(rng: &mut R, nation: i64) -> String { let mut s = String::with_capacity(15); s.push_str(&(nation + 10).to_string()); s.push('-'); - s.push_str(&rng.gen_range(100..=999).to_string()); + s.push_str(&rng.random_range(100..=999).to_string()); s.push('-'); - s.push_str(&rng.gen_range(100..=999).to_string()); + s.push_str(&rng.random_range(100..=999).to_string()); s.push('-'); - s.push_str(&rng.gen_range(1000..=9999).to_string()); + s.push_str(&rng.random_range(1000..=9999).to_string()); s } @@ -436,7 +439,7 @@ fn v_string(rng: &mut R, min: usize, max: usize) -> String { 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', ',', ' ', ]; - let take = rng.gen_range(min..=max); + let take = rng.random_range(min..=max); let mut s = String::with_capacity(take); for _ in 0..take { s.push(*ALPHABET.choose(rng).unwrap()); diff --git a/src/testdrive/Cargo.toml b/src/testdrive/Cargo.toml index 670ebef7c696d..fbaaef7dd7d92 100644 --- a/src/testdrive/Cargo.toml +++ b/src/testdrive/Cargo.toml @@ -59,7 +59,7 @@ postgres-protocol = { version = "0.6.5" } prost = { version = "0.13.5", features = ["no-recursion-limit"] } prost-reflect = { version = "0.15.3", features = ["serde"] } prost-types = { version = "0.13.5" } -rand = "0.8.5" +rand = "0.9.2" rdkafka = { version = "0.29.0", features = ["cmake-build", "ssl-vendored", "libz-static", "zstd"] } regex = "1.12.2" reqwest = { version = "0.11.13", features = ["native-tls-vendored"] } diff --git a/src/testdrive/src/action.rs b/src/testdrive/src/action.rs index 95d1048202d81..dc7b30d5153dd 100644 --- a/src/testdrive/src/action.rs +++ b/src/testdrive/src/action.rs @@ -39,7 +39,6 @@ use mz_persist_client::rpc::PubSubClientConnection; use mz_persist_client::{PersistClient, PersistLocation}; use mz_sql::catalog::EnvironmentId; use mz_tls_util::make_tls; -use rand::Rng; use rdkafka::ClientConfig; use rdkafka::producer::Producer; use regex::{Captures, Regex}; @@ -937,7 +936,7 @@ fn substitute_vars( pub async fn create_state( config: &Config, ) -> Result<(State, impl Future>), anyhow::Error> { - let seed = config.seed.unwrap_or_else(|| rand::thread_rng().r#gen()); + let seed = config.seed.unwrap_or_else(|| rand::random()); let (_tempfile, temp_path) = match &config.temp_dir { Some(temp_dir) => { diff --git a/src/testdrive/src/action/sleep.rs b/src/testdrive/src/action/sleep.rs index 102cef83d7f26..c9437a30a63d5 100644 --- a/src/testdrive/src/action/sleep.rs +++ b/src/testdrive/src/action/sleep.rs @@ -11,7 +11,6 @@ use std::thread; use std::time::Duration; use anyhow::Context; -use rand::Rng; use crate::action::ControlFlow; use crate::parser::BuiltinCommand; @@ -29,8 +28,7 @@ fn run_sleep_inner(mut cmd: BuiltinCommand, random: bool) -> Result Self { // Should the jitter be configurable? - let jitter = self.rng.gen_range(0.9..=1.1); + let jitter = self.rng.random_range(0.9..=1.1); let sleep = self.backoff.mul_f64(jitter); tokio::time::sleep(sleep).await; self.attempt += 1; diff --git a/src/txn-wal/Cargo.toml b/src/txn-wal/Cargo.toml index 654434a19ce1f..a552b6109bb55 100644 --- a/src/txn-wal/Cargo.toml +++ b/src/txn-wal/Cargo.toml @@ -31,7 +31,7 @@ workspace-hack = { version = "0.0.0", path = "../workspace-hack", optional = tru [dev-dependencies] crossbeam-channel = "0.5.15" -rand = { version = "0.8.5", default-features = false, features = ["small_rng"] } +rand = { version = "0.9.2", default-features = false, features = ["small_rng"] } [build-dependencies] mz-build-tools = { path = "../build-tools", default-features = false, features = ["protobuf-src"] } diff --git a/src/workspace-hack/Cargo.toml b/src/workspace-hack/Cargo.toml index 8cdf724f5f4c4..a1effd8dd11a5 100644 --- a/src/workspace-hack/Cargo.toml +++ b/src/workspace-hack/Cargo.toml @@ -114,8 +114,7 @@ prost-types = { version = "0.13.5" } quote = { version = "1.0.42" } rand-274715c4dabd11b0 = { package = "rand", version = "0.9.2" } rand-c38e5c1d305a1b54 = { package = "rand", version = "0.8.5", features = ["small_rng"] } -rand_chacha-274715c4dabd11b0 = { package = "rand_chacha", version = "0.9.0", default-features = false, features = ["std"] } -rand_chacha-468e82937335b1c9 = { package = "rand_chacha", version = "0.3.1" } +rand_chacha = { version = "0.9.0", default-features = false, features = ["std"] } rand_core = { version = "0.6.4", default-features = false, features = ["std"] } rdkafka-sys = { git = "https://github.com/MaterializeInc/rust-rdkafka.git", features = ["cmake-build", "libz-static", "ssl-vendored", "zstd"] } regex = { version = "1.12.2" } @@ -262,8 +261,7 @@ prost-types = { version = "0.13.5" } quote = { version = "1.0.42" } rand-274715c4dabd11b0 = { package = "rand", version = "0.9.2" } rand-c38e5c1d305a1b54 = { package = "rand", version = "0.8.5", features = ["small_rng"] } -rand_chacha-274715c4dabd11b0 = { package = "rand_chacha", version = "0.9.0", default-features = false, features = ["std"] } -rand_chacha-468e82937335b1c9 = { package = "rand_chacha", version = "0.3.1" } +rand_chacha = { version = "0.9.0", default-features = false, features = ["std"] } rand_core = { version = "0.6.4", default-features = false, features = ["std"] } rdkafka-sys = { git = "https://github.com/MaterializeInc/rust-rdkafka.git", features = ["cmake-build", "libz-static", "ssl-vendored", "zstd"] } regex = { version = "1.12.2" } diff --git a/test/test-util/Cargo.toml b/test/test-util/Cargo.toml index f35c54c630d1c..b7535b95052c7 100644 --- a/test/test-util/Cargo.toml +++ b/test/test-util/Cargo.toml @@ -14,7 +14,7 @@ anyhow = "1.0.100" chrono = { version = "0.4.39", default-features = false, features = ["std"] } mz-kafka-util = { path = "../../src/kafka-util" } mz-ore = { path = "../../src/ore", features = ["async"] } -rand = "0.8.5" +rand = "0.9.2" rdkafka = { version = "0.29.0", features = ["cmake-build", "ssl-vendored", "libz-static", "zstd"] } tokio = "1.44.1" tokio-postgres = { version = "0.7.8" } diff --git a/test/test-util/src/generator/bytes.rs b/test/test-util/src/generator/bytes.rs index 60e7324ebbe1e..d494bb69e6dfd 100644 --- a/test/test-util/src/generator/bytes.rs +++ b/test/test-util/src/generator/bytes.rs @@ -8,11 +8,11 @@ // by the Apache License, Version 2.0. use rand::Rng; -use rand::distributions::Alphanumeric; +use rand::distr::Alphanumeric; /// Generates and returns bytes of length `len`. pub fn generate_bytes(len: usize) -> Vec { - rand::thread_rng() + rand::rng() .sample_iter(&Alphanumeric) .take(len) .collect::>()