Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 20 additions & 22 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions src/adapter/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,7 @@ opentelemetry = { version = "0.24.0", features = ["trace"] }
prometheus = { version = "0.14.0", default-features = false }
prost = { version = "0.13.5", features = ["no-recursion-limit"] }
qcell = "0.5"
rand = "0.8.5"
rand_chacha = "0.3"
rand = "0.9.2"
semver = "1.0.27"
serde = "1.0.219"
serde_json = "1.0.145"
Expand Down
2 changes: 1 addition & 1 deletion src/adapter/src/coord/message_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ impl Coordinator {
EpochMillis::try_from(self.storage_usage_collection_interval.as_millis())
.expect("storage usage collection interval must fit into u64");
let offset =
rngs::SmallRng::from_seed(seed).gen_range(0..storage_usage_collection_interval_ms);
rngs::SmallRng::from_seed(seed).random_range(0..storage_usage_collection_interval_ms);
let now_ts: EpochMillis = self.peek_local_write_ts().await.into();

// 2) Determine the amount of ms between now and the next collection time.
Expand Down
11 changes: 6 additions & 5 deletions src/adapter/src/coord/statement_logging.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ use mz_sql_parser::ast::{StatementKind, statement_kind_label_value};
use mz_storage_client::controller::IntrospectionType;
use qcell::QCell;
use rand::SeedableRng;
use rand::{distributions::Bernoulli, prelude::Distribution, thread_rng};
use rand::distr::{Bernoulli, Distribution};
use rand::rngs::SmallRng;
use sha2::{Digest, Sha256};
use tokio::time::MissedTickBehavior;
use tracing::debug;
Expand Down Expand Up @@ -138,9 +139,9 @@ pub(crate) struct StatementLogging {
unlogged_sessions: BTreeMap<Uuid, SessionHistoryEvent>,

/// A reproducible RNG for deciding whether to sample statement executions.
/// Only used by tests; otherwise, `rand::thread_rng()` is used.
/// Only used by tests; otherwise, `rand::rng()` is used.
/// Controlled by the system var `statement_logging_use_reproducible_rng`.
reproducible_rng: rand_chacha::ChaCha8Rng,
reproducible_rng: SmallRng,

pending_statement_execution_events: Vec<(Row, Diff)>,
pending_prepared_statement_events: Vec<PreparedStatementEvent>,
Expand All @@ -165,7 +166,7 @@ impl StatementLogging {
Self {
executions_begun: BTreeMap::new(),
unlogged_sessions: BTreeMap::new(),
reproducible_rng: rand_chacha::ChaCha8Rng::seed_from_u64(42),
reproducible_rng: SmallRng::seed_from_u64(42),
pending_statement_execution_events: Vec::new(),
pending_prepared_statement_events: Vec::new(),
pending_session_events: Vec::new(),
Expand Down Expand Up @@ -697,7 +698,7 @@ impl Coordinator {
{
distribution.sample(&mut self.statement_logging.reproducible_rng)
} else {
distribution.sample(&mut thread_rng())
distribution.sample(&mut rand::rng())
};

// Track how many statements we're recording.
Expand Down
3 changes: 1 addition & 2 deletions src/adapter/src/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ use mz_sql_parser::ast::TransactionIsolationLevel;
use mz_storage_client::client::TableData;
use mz_storage_types::sources::Timeline;
use qcell::{QCell, QCellOwner};
use rand::Rng;
use tokio::sync::mpsc::{self, UnboundedReceiver, UnboundedSender};
use tokio::sync::watch;
use uuid::Uuid;
Expand Down Expand Up @@ -346,7 +345,7 @@ impl<T: TimestampManipulation> Session<T> {
notices_tx,
notices_rx,
next_transaction_id: 0,
secret_key: rand::thread_rng().r#gen(),
secret_key: rand::random(),
external_metadata_rx,
qcell_owner: QCellOwner::new(),
session_oracles: BTreeMap::new(),
Expand Down
2 changes: 1 addition & 1 deletion src/avro/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ enum-kinds = "0.5.1"
flate2 = "1.1.5"
itertools = "0.14.0"
mz-ore = { path = "../ore", features = ["test"] }
rand = "0.8.5"
rand = "0.9.2"
regex = "1.12.2"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.145"
Expand Down
2 changes: 1 addition & 1 deletion src/catalog/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ prometheus = { version = "0.14.0", default-features = false }
proptest = { version = "1.7.0", default-features = false, features = ["std"] }
proptest-derive = { version = "0.5.1", features = ["boxed_union"] }
prost = "0.13.5"
rand = "0.8.5"
rand = "0.9.2"
semver = { version = "1.0.27" }
serde = "1.0.219"
serde_json = "1.0.145"
Expand Down
2 changes: 1 addition & 1 deletion src/cloud-resources/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ k8s-openapi = { version = "0.26.0", features = ["schemars", "v1_31"] }
kube = { version = "2.0.1", default-features = false, features = ["client", "derive", "openssl-tls", "ws", "runtime"] }
mz-ore = { path = "../ore", default-features = false, features = ["async"] }
mz-server-core = { path = "../server-core", default-features = false }
rand = "0.8.5"
rand = "0.9.2"
schemars = { version = "1.0.4", features = ["uuid1"] }
semver = "1.0.27"
serde = "1.0.219"
Expand Down
6 changes: 3 additions & 3 deletions src/cloud-resources/src/crd/materialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use k8s_openapi::{
};
use kube::{CustomResource, Resource, ResourceExt, api::ObjectMeta};
use rand::Rng;
use rand::distributions::Uniform;
use rand::distr::Uniform;
use schemars::JsonSchema;
use semver::Version;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -491,8 +491,8 @@ pub mod v1alpha1 {
// built-in Alphanumeric distribution from rand, which
// includes both upper and lowercase letters.
const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789";
status.resource_id = rand::thread_rng()
.sample_iter(Uniform::new(0, CHARSET.len()))
status.resource_id = rand::rng()
.sample_iter(Uniform::new(0, CHARSET.len()).expect("valid range"))
.take(10)
.map(|i| char::from(CHARSET[i]))
.collect();
Expand Down
2 changes: 1 addition & 1 deletion src/cluster/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ lgalloc = "0.6.0"
mz-cluster-client = { path = "../cluster-client" }
mz-ore = { path = "../ore", features = ["async", "process", "tracing"] }
mz-service = { path = "../service" }
rand = "0.8.5"
rand = "0.9.2"
regex = "1.12.2"
timely = "0.25.1"
tokio = { version = "1.44.1", features = ["fs", "rt", "sync", "net"] }
Expand Down
6 changes: 3 additions & 3 deletions src/cluster/src/communication.rs
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ mod turmoil_tests {

let mut sim = turmoil::Builder::new()
.enable_random_order()
.rng_seed(rng.r#gen())
.rng_seed(rng.random())
.build();

let processes: Vec<_> = (0..NUM_PROCESSES).map(|i| format!("process-{i}")).collect();
Expand Down Expand Up @@ -618,12 +618,12 @@ mod turmoil_tests {

// Let random processes crash at random times.
for _ in 0..NUM_CRASHES {
let steps = rng.gen_range(1..100);
let steps = rng.random_range(1..100);
for _ in 0..steps {
sim.step().unwrap();
}

let i = rng.gen_range(0..NUM_PROCESSES);
let i = rng.random_range(0..NUM_PROCESSES);
info!("bouncing process {i}");
sim.bounce(format!("process-{i}"));
}
Expand Down
2 changes: 1 addition & 1 deletion src/environmentd/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ rdkafka-sys = { version = "4.3.0", features = [
"libz-static",
"zstd",
] }
rand = "0.8.5"
rand = "0.9.2"
regex = { version = "1.12.2", optional = true }
reqwest = { version = "0.11.13", features = ["json"] }
rlimit = "0.10.2"
Expand Down
2 changes: 1 addition & 1 deletion src/environmentd/tests/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2970,7 +2970,7 @@ fn test_invalid_webhook_body() {

// No matter what is in the body, we should always succeed.
let mut data = [0u8; 128];
rand::thread_rng().fill_bytes(&mut data);
rand::rng().fill_bytes(&mut data);
println!("Random bytes: {data:?}");
let resp = http_client
.post(webhook_url)
Expand Down
2 changes: 1 addition & 1 deletion src/expr/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ insta = "1.43"
mz-expr-derive-impl = { path = "../expr-derive-impl", features = ["test"] }
mz-expr-test-util = { path = "../expr-test-util" }
mz-ore = { path = "../ore" }
rand = "0.8.5"
rand = "0.9.2"

[build-dependencies]
mz-build-tools = { path = "../build-tools", default-features = false, features = ["protobuf-src"] }
Expand Down
6 changes: 3 additions & 3 deletions src/expr/benches/window_functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use criterion::{Criterion, criterion_group, criterion_main};
use mz_expr::ColumnOrder;
use mz_repr::adt::timestamp::CheckedTimestamp;
use mz_repr::{Datum, RowArena};
use rand::distributions::{Distribution, Uniform};
use rand::distr::{Distribution, Uniform};

/// Microbenchmark to test an important part of window function evaluation.
///
Expand Down Expand Up @@ -53,7 +53,7 @@ fn order_aggregate_datums_benchmark(c: &mut Criterion) {
let scale = 1000000;

group.bench_function("order_aggregate_datums", |b| {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let temp_storage = RowArena::new();

let order_by = vec![ColumnOrder {
Expand All @@ -70,7 +70,7 @@ fn order_aggregate_datums_benchmark(c: &mut Criterion) {
// ),
// <order_by_col_1>,
// )
let distr = Uniform::new(0, 1000000000);
let distr = Uniform::new(0, 1000000000).expect("valid range");
let mut datums = Vec::with_capacity(scale);
for _i in 0..scale {
datums.push(temp_storage.make_datum(|packer| {
Expand Down
Loading