Skip to content
Merged
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
9c8b107
Support async trait bounds in macros
compiler-errors Feb 20, 2024
5fb67e2
some type system cleanup
lcnr Feb 20, 2024
ae3f4f1
bootstrap: apply most of clippy's suggestions
GrigorenkoPV Feb 20, 2024
8f20a54
Merge `diagnostic_builder.rs` into `diagnostic.rs`.
nnethercote Feb 20, 2024
1407057
Remove some no-longer-needed `pub(crate)` markers.
nnethercote Feb 20, 2024
09ca866
Remove an `unchecked_error_guaranteed` call.
nnethercote Feb 21, 2024
780beda
Tweak block management
Nadrieril Feb 18, 2024
c1514a6
Test one or pattern at a time
Nadrieril Feb 18, 2024
9eabdc2
make it possible for outside crates to inspect a mir::ConstValue with…
RalfJung Feb 21, 2024
a2aa967
compiletest: support auxiliaries with auxiliaries
davidtwco Jan 31, 2024
082b97a
Rollup merge of #121044 - compiler-errors:mbe-async-trait-bounds, r=f…
fmease Feb 21, 2024
4daa43a
Rollup merge of #121175 - Nadrieril:simplify-or-selection, r=matthewj…
fmease Feb 21, 2024
216f9a4
Rollup merge of #121340 - GrigorenkoPV:bootstrap-clippy, r=onur-ozkan
fmease Feb 21, 2024
2d98f05
Rollup merge of #121347 - davidtwco:compiletest-aux-aux, r=oli-obk
fmease Feb 21, 2024
8d27fc8
Rollup merge of #121359 - lcnr:typesystem-cleanup, r=compiler-errors
fmease Feb 21, 2024
ef14c17
Rollup merge of #121366 - nnethercote:rm-diagnostic_builder.rs, r=com…
fmease Feb 21, 2024
ae01e99
Rollup merge of #121379 - nnethercote:rm-unchecked_error_guaranteed, …
fmease Feb 21, 2024
bd7ba27
Rollup merge of #121396 - RalfJung:mir-const-value-inspect, r=oli-obk
fmease Feb 21, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions compiler/rustc_ast_passes/src/ast_validation.rs
Original file line number Diff line number Diff line change
@@ -881,9 +881,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
&item.vis,
errors::VisibilityNotPermittedNote::TraitImpl,
);
// njn: use Dummy here
if let TyKind::Err(_) = self_ty.kind {
this.dcx().emit_err(errors::ObsoleteAuto { span: item.span });
if let TyKind::Dummy = self_ty.kind {
// Abort immediately otherwise the `TyKind::Dummy` will reach HIR lowering,
// which isn't allowed. Not a problem for this obscure, obsolete syntax.
this.dcx().emit_fatal(errors::ObsoleteAuto { span: item.span });
}
if let (&Unsafe::Yes(span), &ImplPolarity::Negative(sp)) = (unsafety, polarity)
{
25 changes: 21 additions & 4 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
@@ -3,10 +3,11 @@ use either::{Left, Right};
use rustc_hir::def::DefKind;
use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo};
use rustc_middle::mir::{self, ConstAlloc, ConstValue};
use rustc_middle::query::TyCtxtAt;
use rustc_middle::traits::Reveal;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, TyCtxt};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
use rustc_target::abi::{self, Abi};
@@ -87,13 +88,16 @@ fn eval_body_using_ecx<'mir, 'tcx>(
}

/// The `InterpCx` is only meant to be used to do field and index projections into constants for
/// `simd_shuffle` and const patterns in match arms. It never performs alignment checks.
/// `simd_shuffle` and const patterns in match arms.
///
/// This should *not* be used to do any actual interpretation. In particular, alignment checks are
/// turned off!
///
/// The function containing the `match` that is currently being analyzed may have generic bounds
/// that inform us about the generic bounds of the constant. E.g., using an associated constant
/// of a function's generic parameter will require knowledge about the bounds on the generic
/// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
pub(crate) fn mk_eval_cx<'mir, 'tcx>(
pub(crate) fn mk_eval_cx_to_read_const_val<'mir, 'tcx>(
tcx: TyCtxt<'tcx>,
root_span: Span,
param_env: ty::ParamEnv<'tcx>,
@@ -108,6 +112,19 @@ pub(crate) fn mk_eval_cx<'mir, 'tcx>(
)
}

/// Create an interpreter context to inspect the given `ConstValue`.
/// Returns both the context and an `OpTy` that represents the constant.
pub fn mk_eval_cx_for_const_val<'mir, 'tcx>(
tcx: TyCtxtAt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
val: mir::ConstValue<'tcx>,
ty: Ty<'tcx>,
) -> Option<(CompileTimeEvalContext<'mir, 'tcx>, OpTy<'tcx>)> {
let ecx = mk_eval_cx_to_read_const_val(tcx.tcx, tcx.span, param_env, CanAccessMutGlobal::No);
let op = ecx.const_val_to_op(val, ty, None).ok()?;
Some((ecx, op))
}

/// This function converts an interpreter value into a MIR constant.
///
/// The `for_diagnostics` flag turns the usual rules for returning `ConstValue::Scalar` into a
@@ -203,7 +220,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
let def_id = cid.instance.def.def_id();
let is_static = tcx.is_static(def_id);
// This is just accessing an already computed constant, so no need to check alignment here.
let ecx = mk_eval_cx(
let ecx = mk_eval_cx_to_read_const_val(
tcx,
tcx.def_span(key.value.instance.def_id()),
key.param_env,
3 changes: 1 addition & 2 deletions compiler/rustc_const_eval/src/const_eval/mod.rs
Original file line number Diff line number Diff line change
@@ -47,8 +47,7 @@ pub(crate) fn try_destructure_mir_constant_for_user_output<'tcx>(
ty: Ty<'tcx>,
) -> Option<mir::DestructuredConstant<'tcx>> {
let param_env = ty::ParamEnv::reveal_all();
let ecx = mk_eval_cx(tcx.tcx, tcx.span, param_env, CanAccessMutGlobal::No);
let op = ecx.const_val_to_op(val, ty, None).ok()?;
let (ecx, op) = mk_eval_cx_for_const_val(tcx, param_env, val, ty)?;

// We go to `usize` as we cannot allocate anything bigger anyway.
let (field_count, variant, down) = match ty.kind() {
10 changes: 6 additions & 4 deletions compiler/rustc_const_eval/src/const_eval/valtrees.rs
Original file line number Diff line number Diff line change
@@ -5,7 +5,7 @@ use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
use rustc_span::DUMMY_SP;
use rustc_target::abi::{Abi, VariantIdx};

use super::eval_queries::{mk_eval_cx, op_to_const};
use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
use super::machine::CompileTimeEvalContext;
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
use crate::const_eval::CanAccessMutGlobal;
@@ -223,7 +223,7 @@ pub(crate) fn eval_to_valtree<'tcx>(
let const_alloc = tcx.eval_to_allocation_raw(param_env.and(cid))?;

// FIXME Need to provide a span to `eval_to_valtree`
let ecx = mk_eval_cx(
let ecx = mk_eval_cx_to_read_const_val(
tcx,
DUMMY_SP,
param_env,
@@ -287,7 +287,8 @@ pub fn valtree_to_const_value<'tcx>(
}
}
ty::Ref(_, inner_ty, _) => {
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
let mut ecx =
mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
let imm = valtree_to_ref(&mut ecx, valtree, *inner_ty);
let imm = ImmTy::from_immediate(imm, tcx.layout_of(param_env_ty).unwrap());
op_to_const(&ecx, &imm.into(), /* for diagnostics */ false)
@@ -314,7 +315,8 @@ pub fn valtree_to_const_value<'tcx>(
bug!("could not find non-ZST field during in {layout:#?}");
}

let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
let mut ecx =
mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);

// Need to create a place for this valtree.
let place = create_valtree_place(&mut ecx, layout, valtree);
9 changes: 7 additions & 2 deletions compiler/rustc_const_eval/src/util/caller_location.rs
Original file line number Diff line number Diff line change
@@ -6,7 +6,7 @@ use rustc_middle::ty::layout::LayoutOf;
use rustc_span::symbol::Symbol;
use rustc_type_ir::Mutability;

use crate::const_eval::{mk_eval_cx, CanAccessMutGlobal, CompileTimeEvalContext};
use crate::const_eval::{mk_eval_cx_to_read_const_val, CanAccessMutGlobal, CompileTimeEvalContext};
use crate::interpret::*;

/// Allocate a `const core::panic::Location` with the provided filename and line/column numbers.
@@ -57,7 +57,12 @@ pub(crate) fn const_caller_location_provider(
col: u32,
) -> mir::ConstValue<'_> {
trace!("const_caller_location: {}:{}:{}", file, line, col);
let mut ecx = mk_eval_cx(tcx.tcx, tcx.span, ty::ParamEnv::reveal_all(), CanAccessMutGlobal::No);
let mut ecx = mk_eval_cx_to_read_const_val(
tcx.tcx,
tcx.span,
ty::ParamEnv::reveal_all(),
CanAccessMutGlobal::No,
);

let loc_place = alloc_caller_location(&mut ecx, file, line, col);
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
424 changes: 344 additions & 80 deletions compiler/rustc_errors/src/diagnostic.rs

Large diffs are not rendered by default.

282 changes: 0 additions & 282 deletions compiler/rustc_errors/src/diagnostic_builder.rs

This file was deleted.

10 changes: 3 additions & 7 deletions compiler/rustc_errors/src/lib.rs
Original file line number Diff line number Diff line change
@@ -37,12 +37,9 @@ extern crate self as rustc_errors;

pub use codes::*;
pub use diagnostic::{
AddToDiagnostic, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgName,
DiagnosticArgValue, DiagnosticStyledString, IntoDiagnosticArg, StringPart, SubDiagnostic,
SubdiagnosticMessageOp,
};
pub use diagnostic_builder::{
BugAbort, DiagnosticBuilder, EmissionGuarantee, FatalAbort, IntoDiagnostic,
AddToDiagnostic, BugAbort, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgName,
DiagnosticArgValue, DiagnosticBuilder, DiagnosticStyledString, EmissionGuarantee, FatalAbort,
IntoDiagnostic, IntoDiagnosticArg, StringPart, SubDiagnostic, SubdiagnosticMessageOp,
};
pub use diagnostic_impls::{
DiagnosticArgFromDisplay, DiagnosticSymbolList, ExpectedLifetimeParameter,
@@ -87,7 +84,6 @@ use Level::*;
pub mod annotate_snippet_emitter_writer;
pub mod codes;
mod diagnostic;
mod diagnostic_builder;
mod diagnostic_impls;
pub mod emitter;
pub mod error;
24 changes: 12 additions & 12 deletions compiler/rustc_infer/src/infer/relate/combine.rs
Original file line number Diff line number Diff line change
@@ -194,15 +194,15 @@ impl<'tcx> InferCtxt<'tcx> {
ty::ConstKind::Infer(InferConst::Var(b_vid)),
) => {
self.inner.borrow_mut().const_unification_table().union(a_vid, b_vid);
return Ok(a);
Ok(a)
}

(
ty::ConstKind::Infer(InferConst::EffectVar(a_vid)),
ty::ConstKind::Infer(InferConst::EffectVar(b_vid)),
) => {
self.inner.borrow_mut().effect_unification_table().union(a_vid, b_vid);
return Ok(a);
Ok(a)
}

// All other cases of inference with other variables are errors.
@@ -220,42 +220,42 @@ impl<'tcx> InferCtxt<'tcx> {
}

(ty::ConstKind::Infer(InferConst::Var(vid)), _) => {
return self.instantiate_const_var(vid, b);
self.instantiate_const_var(relation, relation.a_is_expected(), vid, b)?;
Ok(b)
}

(_, ty::ConstKind::Infer(InferConst::Var(vid))) => {
return self.instantiate_const_var(vid, a);
self.instantiate_const_var(relation, !relation.a_is_expected(), vid, a)?;
Ok(a)
}

(ty::ConstKind::Infer(InferConst::EffectVar(vid)), _) => {
return Ok(self.unify_effect_variable(vid, b));
Ok(self.unify_effect_variable(vid, b))
}

(_, ty::ConstKind::Infer(InferConst::EffectVar(vid))) => {
return Ok(self.unify_effect_variable(vid, a));
Ok(self.unify_effect_variable(vid, a))
}

(ty::ConstKind::Unevaluated(..), _) | (_, ty::ConstKind::Unevaluated(..))
if self.tcx.features().generic_const_exprs || self.next_trait_solver() =>
{
let (a, b) = if relation.a_is_expected() { (a, b) } else { (b, a) };

relation.register_predicates([ty::Binder::dummy(if self.next_trait_solver() {
relation.register_predicates([if self.next_trait_solver() {
ty::PredicateKind::AliasRelate(
a.into(),
b.into(),
ty::AliasRelationDirection::Equate,
)
} else {
ty::PredicateKind::ConstEquate(a, b)
})]);
}]);

return Ok(b);
Ok(b)
}
_ => {}
_ => ty::relate::structurally_relate_consts(relation, a, b),
}

ty::relate::structurally_relate_consts(relation, a, b)
}

fn unify_integral_variable(
123 changes: 71 additions & 52 deletions compiler/rustc_infer/src/infer/relate/generalize.rs
Original file line number Diff line number Diff line change
@@ -22,7 +22,7 @@ impl<'tcx> InferCtxt<'tcx> {
/// subtyping could occur. This also does the occurs checks, detecting whether
/// instantiating `target_vid` would result in a cyclic type. We eagerly error
/// in this case.
#[instrument(skip(self, relation, target_is_expected), level = "debug")]
#[instrument(level = "debug", skip(self, relation, target_is_expected))]
pub(super) fn instantiate_ty_var<R: ObligationEmittingRelation<'tcx>>(
&self,
relation: &mut R,
@@ -158,36 +158,48 @@ impl<'tcx> InferCtxt<'tcx> {
/// As `3 + 4` contains `N` in its args, this must not succeed.
///
/// See `tests/ui/const-generics/occurs-check/` for more examples where this is relevant.
#[instrument(level = "debug", skip(self))]
pub(super) fn instantiate_const_var(
#[instrument(level = "debug", skip(self, relation))]
pub(super) fn instantiate_const_var<R: ObligationEmittingRelation<'tcx>>(
&self,
relation: &mut R,
target_is_expected: bool,
target_vid: ty::ConstVid,
source_ct: ty::Const<'tcx>,
) -> RelateResult<'tcx, ty::Const<'tcx>> {
let span = match self.inner.borrow_mut().const_unification_table().probe_value(target_vid) {
ConstVariableValue::Known { value } => {
bug!("instantiating a known const var: {target_vid:?} {value} {source_ct}")
}
ConstVariableValue::Unknown { origin, universe: _ } => origin.span,
};
) -> RelateResult<'tcx, ()> {
// FIXME(generic_const_exprs): Occurs check failures for unevaluated
// constants and generic expressions are not yet handled correctly.
let Generalization { value_may_be_infer: generalized_ct, has_unconstrained_ty_var } =
self.generalize(span, target_vid, ty::Variance::Invariant, source_ct)?;
self.generalize(relation.span(), target_vid, ty::Variance::Invariant, source_ct)?;

debug_assert!(!generalized_ct.is_ct_infer());
if has_unconstrained_ty_var {
span_bug!(span, "unconstrained ty var when generalizing `{source_ct:?}`");
bug!("unconstrained ty var when generalizing `{source_ct:?}`");
}

self.inner
.borrow_mut()
.const_unification_table()
.union_value(target_vid, ConstVariableValue::Known { value: generalized_ct });

// FIXME(generic_const_exprs): We have to make sure we actually equate
// `generalized_ct` and `source_ct` here.
Ok(generalized_ct)
// HACK: make sure that we `a_is_expected` continues to be
// correct when relating the generalized type with the source.
if target_is_expected == relation.a_is_expected() {
relation.relate_with_variance(
ty::Variance::Invariant,
ty::VarianceDiagInfo::default(),
generalized_ct,
source_ct,
)?;
} else {
relation.relate_with_variance(
ty::Variance::Invariant,
ty::VarianceDiagInfo::default(),
source_ct,
generalized_ct,
)?;
}

Ok(())
}

/// Attempts to generalize `source_term` for the type variable `target_vid`.
@@ -287,6 +299,49 @@ impl<'tcx> Generalizer<'_, 'tcx> {
ty::TermKind::Const(ct) => TypeError::CyclicConst(ct),
}
}

/// An occurs check failure inside of an alias does not mean
/// that the types definitely don't unify. We may be able
/// to normalize the alias after all.
///
/// We handle this by lazily equating the alias and generalizing
/// it to an inference variable.
///
/// This is incomplete and will hopefully soon get fixed by #119106.
fn generalize_alias_ty(
&mut self,
alias: ty::AliasTy<'tcx>,
) -> Result<Ty<'tcx>, TypeError<'tcx>> {
let is_nested_alias = mem::replace(&mut self.in_alias, true);
let result = match self.relate(alias, alias) {
Ok(alias) => Ok(alias.to_ty(self.tcx())),
Err(e) => {
if is_nested_alias {
return Err(e);
} else {
let mut visitor = MaxUniverse::new();
alias.visit_with(&mut visitor);
let infer_replacement_is_complete =
self.for_universe.can_name(visitor.max_universe())
&& !alias.has_escaping_bound_vars();
if !infer_replacement_is_complete {
warn!("may incompletely handle alias type: {alias:?}");
}

debug!("generalization failure in alias");
Ok(self.infcx.next_ty_var_in_universe(
TypeVariableOrigin {
kind: TypeVariableOriginKind::MiscVariable,
span: self.span,
},
self.for_universe,
))
}
}
};
self.in_alias = is_nested_alias;
result
}
}

impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
@@ -433,43 +488,7 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
}
}

ty::Alias(kind, data) => {
// An occurs check failure inside of an alias does not mean
// that the types definitely don't unify. We may be able
// to normalize the alias after all.
//
// We handle this by lazily equating the alias and generalizing
// it to an inference variable.
let is_nested_alias = mem::replace(&mut self.in_alias, true);
let result = match self.relate(data, data) {
Ok(data) => Ok(Ty::new_alias(self.tcx(), kind, data)),
Err(e) => {
if is_nested_alias {
return Err(e);
} else {
let mut visitor = MaxUniverse::new();
t.visit_with(&mut visitor);
let infer_replacement_is_complete =
self.for_universe.can_name(visitor.max_universe())
&& !t.has_escaping_bound_vars();
if !infer_replacement_is_complete {
warn!("may incompletely handle alias type: {t:?}");
}

debug!("generalization failure in alias");
Ok(self.infcx.next_ty_var_in_universe(
TypeVariableOrigin {
kind: TypeVariableOriginKind::MiscVariable,
span: self.span,
},
self.for_universe,
))
}
}
};
self.in_alias = is_nested_alias;
result
}
ty::Alias(_, data) => self.generalize_alias_ty(data),

_ => relate::structurally_relate_tys(self, t, t),
}?;
91 changes: 51 additions & 40 deletions compiler/rustc_mir_build/src/build/matches/mod.rs
Original file line number Diff line number Diff line change
@@ -1052,7 +1052,7 @@ struct Ascription<'tcx> {
variance: ty::Variance,
}

#[derive(Debug)]
#[derive(Debug, Clone)]
pub(crate) struct MatchPair<'pat, 'tcx> {
// This place...
place: PlaceBuilder<'tcx>,
@@ -1408,69 +1408,85 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
span: Span,
scrutinee_span: Span,
candidates: &mut [&mut Candidate<'_, 'tcx>],
block: BasicBlock,
start_block: BasicBlock,
otherwise_block: BasicBlock,
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
) {
let (first_candidate, remaining_candidates) = candidates.split_first_mut().unwrap();

// All of the or-patterns have been sorted to the end, so if the first
// pattern is an or-pattern we only have or-patterns.
match first_candidate.match_pairs[0].pattern.kind {
PatKind::Or { .. } => (),
_ => {
self.test_candidates(
span,
scrutinee_span,
candidates,
block,
otherwise_block,
fake_borrows,
);
return;
}
assert!(first_candidate.subcandidates.is_empty());
if !matches!(first_candidate.match_pairs[0].pattern.kind, PatKind::Or { .. }) {
self.test_candidates(
span,
scrutinee_span,
candidates,
start_block,
otherwise_block,
fake_borrows,
);
return;
}

let match_pairs = mem::take(&mut first_candidate.match_pairs);
first_candidate.pre_binding_block = Some(block);
let (first_match_pair, remaining_match_pairs) = match_pairs.split_first().unwrap();
let PatKind::Or { ref pats } = &first_match_pair.pattern.kind else { unreachable!() };

let remainder_start = self.cfg.start_new_block();
for match_pair in match_pairs {
let PatKind::Or { ref pats } = &match_pair.pattern.kind else {
bug!("Or-patterns should have been sorted to the end");
};
let or_span = match_pair.pattern.span;
let or_span = first_match_pair.pattern.span;
// Test the alternatives of this or-pattern.
self.test_or_pattern(
first_candidate,
start_block,
remainder_start,
pats,
or_span,
&first_match_pair.place,
fake_borrows,
);

if !remaining_match_pairs.is_empty() {
// If more match pairs remain, test them after each subcandidate.
// We could add them to the or-candidates before the call to `test_or_pattern` but this
// would make it impossible to detect simplifiable or-patterns. That would guarantee
// exponentially large CFGs for cases like `(1 | 2, 3 | 4, ...)`.
first_candidate.visit_leaves(|leaf_candidate| {
self.test_or_pattern(
leaf_candidate,
remainder_start,
pats,
or_span,
&match_pair.place,
assert!(leaf_candidate.match_pairs.is_empty());
leaf_candidate.match_pairs.extend(remaining_match_pairs.iter().cloned());
let or_start = leaf_candidate.pre_binding_block.unwrap();
// In a case like `(a | b, c | d)`, if `a` succeeds and `c | d` fails, we know `(b,
// c | d)` will fail too. If there is no guard, we skip testing of `b` by branching
// directly to `remainder_start`. If there is a guard, we have to try `(b, c | d)`.
let or_otherwise = leaf_candidate.otherwise_block.unwrap_or(remainder_start);
self.test_candidates_with_or(
span,
scrutinee_span,
&mut [leaf_candidate],
or_start,
or_otherwise,
fake_borrows,
);
});
}

// Test the remaining candidates.
self.match_candidates(
span,
scrutinee_span,
remainder_start,
otherwise_block,
remaining_candidates,
fake_borrows,
)
);
}

#[instrument(
skip(self, otherwise, or_span, place, fake_borrows, candidate, pats),
skip(self, start_block, otherwise_block, or_span, place, fake_borrows, candidate, pats),
level = "debug"
)]
fn test_or_pattern<'pat>(
&mut self,
candidate: &mut Candidate<'pat, 'tcx>,
otherwise: BasicBlock,
start_block: BasicBlock,
otherwise_block: BasicBlock,
pats: &'pat [Box<Pat<'tcx>>],
or_span: Span,
place: &PlaceBuilder<'tcx>,
@@ -1482,16 +1498,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
.map(|pat| Candidate::new(place.clone(), pat, candidate.has_guard, self))
.collect();
let mut or_candidate_refs: Vec<_> = or_candidates.iter_mut().collect();
let otherwise = if let Some(otherwise_block) = candidate.otherwise_block {
otherwise_block
} else {
otherwise
};
self.match_candidates(
or_span,
or_span,
candidate.pre_binding_block.unwrap(),
otherwise,
start_block,
otherwise_block,
&mut or_candidate_refs,
fake_borrows,
);
2 changes: 2 additions & 0 deletions compiler/rustc_parse/messages.ftl
Original file line number Diff line number Diff line change
@@ -27,6 +27,8 @@ parse_async_bound_modifier_in_2015 = `async` trait bounds are only allowed in Ru
parse_async_fn_in_2015 = `async fn` is not permitted in Rust 2015
.label = to use `async fn`, switch to Rust 2018 or later
parse_async_impl = `async` trait implementations are unsupported
parse_async_move_block_in_2015 = `async move` blocks are only allowed in Rust 2018 or later
parse_async_move_order_incorrect = the order of `move` and `async` is incorrect
7 changes: 7 additions & 0 deletions compiler/rustc_parse/src/errors.rs
Original file line number Diff line number Diff line change
@@ -2975,3 +2975,10 @@ pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span);
#[derive(Diagnostic)]
#[diag(parse_invalid_offset_of)]
pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span);

#[derive(Diagnostic)]
#[diag(parse_async_impl)]
pub(crate) struct AsyncImpl {
#[primary_span]
pub span: Span,
}
29 changes: 13 additions & 16 deletions compiler/rustc_parse/src/parser/item.rs
Original file line number Diff line number Diff line change
@@ -562,6 +562,15 @@ impl<'a> Parser<'a> {
self.sess.gated_spans.gate(sym::const_trait_impl, span);
}

// Parse stray `impl async Trait`
if (self.token.uninterpolated_span().at_least_rust_2018()
&& self.token.is_keyword(kw::Async))
|| self.is_kw_followed_by_ident(kw::Async)
{
self.bump();
self.dcx().emit_err(errors::AsyncImpl { span: self.prev_token.span });
}

let polarity = self.parse_polarity();

// Parse both types and traits as a type, then reinterpret if necessary.
@@ -592,22 +601,10 @@ impl<'a> Parser<'a> {
// We need to report this error after `cfg` expansion for compatibility reasons
self.bump(); // `..`, do not add it to expected tokens

// FIXME(nnethercote): AST validation later detects this
// `TyKind::Err` and emits an errors. So why the unchecked
// ErrorGuaranteed?
// - A `span_delayed_bug` doesn't work here, because rustfmt can
// hit this path but then not hit the follow-up path in the AST
// validator that issues the error, which results in ICEs.
// - `TyKind::Dummy` doesn't work, because it ends up reaching HIR
// lowering, which results in ICEs. Changing `TyKind::Dummy` to
// `TyKind::Err` during AST validation might fix that, but that's
// not possible because AST validation doesn't allow mutability.
//
// #121072 will hopefully remove all this special handling of the
// obsolete `impl Trait for ..` and then this can go away.
#[allow(deprecated)]
let guar = rustc_errors::ErrorGuaranteed::unchecked_error_guaranteed();
Some(self.mk_ty(self.prev_token.span, TyKind::Err(guar)))
// AST validation later detects this `TyKind::Dummy` and emits an
// error. (#121072 will hopefully remove all this special handling
// of the obsolete `impl Trait for ..` and then this can go away.)
Some(self.mk_ty(self.prev_token.span, TyKind::Dummy))
} else if has_for || self.token.can_begin_type() {
Some(self.parse_ty()?)
} else {
9 changes: 6 additions & 3 deletions compiler/rustc_parse/src/parser/ty.rs
Original file line number Diff line number Diff line change
@@ -778,9 +778,10 @@ impl<'a> Parser<'a> {
|| self.check(&token::Not)
|| self.check(&token::Question)
|| self.check(&token::Tilde)
|| self.check_keyword(kw::Const)
|| self.check_keyword(kw::For)
|| self.check(&token::OpenDelim(Delimiter::Parenthesis))
|| self.check_keyword(kw::Const)
|| self.check_keyword(kw::Async)
}

/// Parses a bound according to the grammar:
@@ -882,11 +883,13 @@ impl<'a> Parser<'a> {
BoundConstness::Never
};

let asyncness = if self.token.span.at_least_rust_2018() && self.eat_keyword(kw::Async) {
let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
&& self.eat_keyword(kw::Async)
{
self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span);
BoundAsyncness::Async(self.prev_token.span)
} else if self.may_recover()
&& self.token.span.is_rust_2015()
&& self.token.uninterpolated_span().is_rust_2015()
&& self.is_kw_followed_by_ident(kw::Async)
{
self.bump(); // eat `async`
29 changes: 11 additions & 18 deletions compiler/rustc_trait_selection/src/solve/normalize.rs
Original file line number Diff line number Diff line change
@@ -85,25 +85,16 @@ impl<'tcx> NormalizationFolder<'_, 'tcx> {
),
);

// Do not emit an error if normalization is known to fail but instead
// keep the projection unnormalized. This is the case for projections
// with a `T: Trait` where-clause and opaque types outside of the defining
// scope.
let result = if infcx.predicate_may_hold(&obligation) {
self.fulfill_cx.register_predicate_obligation(infcx, obligation);
let errors = self.fulfill_cx.select_all_or_error(infcx);
if !errors.is_empty() {
return Err(errors);
}
let ty = infcx.resolve_vars_if_possible(new_infer_ty);

// Alias is guaranteed to be fully structurally resolved,
// so we can super fold here.
ty.try_super_fold_with(self)?
} else {
alias_ty.try_super_fold_with(self)?
};
self.fulfill_cx.register_predicate_obligation(infcx, obligation);
let errors = self.fulfill_cx.select_all_or_error(infcx);
if !errors.is_empty() {
return Err(errors);
}

// Alias is guaranteed to be fully structurally resolved,
// so we can super fold here.
let ty = infcx.resolve_vars_if_possible(new_infer_ty);
let result = ty.try_super_fold_with(self)?;
self.depth -= 1;
Ok(result)
}
@@ -178,6 +169,7 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
Ok(t)
}

#[instrument(level = "debug", skip(self), ret)]
fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
let infcx = self.at.infcx;
debug_assert_eq!(ty, infcx.shallow_resolve(ty));
@@ -204,6 +196,7 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
}
}

#[instrument(level = "debug", skip(self), ret)]
fn try_fold_const(&mut self, ct: ty::Const<'tcx>) -> Result<ty::Const<'tcx>, Self::Error> {
let infcx = self.at.infcx;
debug_assert_eq!(ct, infcx.shallow_resolve(ct));
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
use crate::solve::EvalCtxt;
use rustc_middle::traits::solve::{Certainty, Goal, QueryResult};
use rustc_middle::ty;

impl<'tcx> EvalCtxt<'_, 'tcx> {
#[instrument(level = "debug", skip(self), ret)]
pub(super) fn normalize_anon_const(
&mut self,
goal: Goal<'tcx, ty::NormalizesTo<'tcx>>,
) -> QueryResult<'tcx> {
if let Some(normalized_const) = self.try_const_eval_resolve(
goal.param_env,
ty::UnevaluatedConst::new(goal.predicate.alias.def_id, goal.predicate.alias.args),
self.tcx()
.type_of(goal.predicate.alias.def_id)
.no_bound_vars()
.expect("const ty should not rely on other generics"),
) {
self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?;
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
} else {
self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS)
}
}
}
71 changes: 26 additions & 45 deletions compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs
Original file line number Diff line number Diff line change
@@ -18,8 +18,9 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::ty::{ToPredicate, TypeVisitableExt};
use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP};

mod anon_const;
mod inherent;
mod opaques;
mod opaque_types;
mod weak_types;

impl<'tcx> EvalCtxt<'_, 'tcx> {
@@ -31,34 +32,34 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
let def_id = goal.predicate.def_id();
match self.tcx().def_kind(def_id) {
DefKind::AssocTy | DefKind::AssocConst => {
// To only compute normalization once for each projection we only
// assemble normalization candidates if the expected term is an
// unconstrained inference variable.
//
// Why: For better cache hits, since if we have an unconstrained RHS then
// there are only as many cache keys as there are (canonicalized) alias
// types in each normalizes-to goal. This also weakens inference in a
// forwards-compatible way so we don't use the value of the RHS term to
// affect candidate assembly for projections.
//
// E.g. for `<T as Trait>::Assoc == u32` we recursively compute the goal
// `exists<U> <T as Trait>::Assoc == U` and then take the resulting type for
// `U` and equate it with `u32`. This means that we don't need a separate
// projection cache in the solver, since we're piggybacking off of regular
// goal caching.
if self.term_is_fully_unconstrained(goal) {
match self.tcx().associated_item(def_id).container {
ty::AssocItemContainer::TraitContainer => {
match self.tcx().associated_item(def_id).container {
ty::AssocItemContainer::TraitContainer => {
// To only compute normalization once for each projection we only
// assemble normalization candidates if the expected term is an
// unconstrained inference variable.
//
// Why: For better cache hits, since if we have an unconstrained RHS then
// there are only as many cache keys as there are (canonicalized) alias
// types in each normalizes-to goal. This also weakens inference in a
// forwards-compatible way so we don't use the value of the RHS term to
// affect candidate assembly for projections.
//
// E.g. for `<T as Trait>::Assoc == u32` we recursively compute the goal
// `exists<U> <T as Trait>::Assoc == U` and then take the resulting type for
// `U` and equate it with `u32`. This means that we don't need a separate
// projection cache in the solver, since we're piggybacking off of regular
// goal caching.
if self.term_is_fully_unconstrained(goal) {
let candidates = self.assemble_and_evaluate_candidates(goal);
self.merge_candidates(candidates)
} else {
self.set_normalizes_to_hack_goal(goal);
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
}
ty::AssocItemContainer::ImplContainer => {
self.normalize_inherent_associated_type(goal)
}
}
} else {
self.set_normalizes_to_hack_goal(goal);
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
ty::AssocItemContainer::ImplContainer => {
self.normalize_inherent_associated_type(goal)
}
}
}
DefKind::AnonConst => self.normalize_anon_const(goal),
@@ -67,26 +68,6 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
kind => bug!("unknown DefKind {} in projection goal: {goal:#?}", kind.descr(def_id)),
}
}

#[instrument(level = "debug", skip(self), ret)]
fn normalize_anon_const(
&mut self,
goal: Goal<'tcx, ty::NormalizesTo<'tcx>>,
) -> QueryResult<'tcx> {
if let Some(normalized_const) = self.try_const_eval_resolve(
goal.param_env,
ty::UnevaluatedConst::new(goal.predicate.alias.def_id, goal.predicate.alias.args),
self.tcx()
.type_of(goal.predicate.alias.def_id)
.no_bound_vars()
.expect("const ty should not rely on other generics"),
) {
self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?;
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
} else {
self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS)
}
}
}

impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
10 changes: 5 additions & 5 deletions src/bootstrap/src/bin/main.rs
Original file line number Diff line number Diff line change
@@ -39,14 +39,14 @@ fn main() {
.open(&lock_path)));
_build_lock_guard = match build_lock.try_write() {
Ok(mut lock) => {
t!(lock.write(&process::id().to_string().as_ref()));
t!(lock.write(process::id().to_string().as_ref()));
lock
}
err => {
drop(err);
println!("WARNING: build directory locked by process {pid}, waiting for lock");
let mut lock = t!(build_lock.write());
t!(lock.write(&process::id().to_string().as_ref()));
t!(lock.write(process::id().to_string().as_ref()));
lock
}
};
@@ -113,14 +113,14 @@ fn main() {
continue;
}

let file = t!(fs::File::open(&entry.path()));
let file = t!(fs::File::open(entry.path()));

// To ensure deterministic results we must sort the dump lines.
// This is necessary because the order of rustc invocations different
// almost all the time.
let mut lines: Vec<String> = t!(BufReader::new(&file).lines().collect());
lines.sort_by_key(|t| t.to_lowercase());
let mut file = t!(OpenOptions::new().write(true).truncate(true).open(&entry.path()));
let mut file = t!(OpenOptions::new().write(true).truncate(true).open(entry.path()));
t!(file.write_all(lines.join("\n").as_bytes()));
}
}
@@ -156,7 +156,7 @@ fn check_version(config: &Config) -> Option<String> {
msg.push_str("There have been changes to x.py since you last updated:\n");

for change in changes {
msg.push_str(&format!(" [{}] {}\n", change.severity.to_string(), change.summary));
msg.push_str(&format!(" [{}] {}\n", change.severity, change.summary));
msg.push_str(&format!(
" - PR Link https://github.com/rust-lang/rust/pull/{}\n",
change.change_id
4 changes: 2 additions & 2 deletions src/bootstrap/src/bin/rustc.rs
Original file line number Diff line number Diff line change
@@ -276,7 +276,7 @@ fn main() {
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or(String::new()),
rusage_data.unwrap_or_default(),
);
}
}
@@ -440,5 +440,5 @@ fn format_rusage_data(_child: Child) -> Option<String> {
));
}

return Some(init_str);
Some(init_str)
}
4 changes: 2 additions & 2 deletions src/bootstrap/src/bin/sccache-plus-cl.rs
Original file line number Diff line number Diff line change
@@ -18,9 +18,9 @@ fn main() {

// Invoke sccache with said compiler
let sccache_path = env::var_os("SCCACHE_PATH").unwrap();
let mut cmd = Command::new(&sccache_path);
let mut cmd = Command::new(sccache_path);
cmd.arg(compiler.path());
for &(ref k, ref v) in compiler.env() {
for (k, v) in compiler.env() {
cmd.env(k, v);
}
for arg in env::args().skip(1) {
8 changes: 4 additions & 4 deletions src/bootstrap/src/core/build_steps/check.rs
Original file line number Diff line number Diff line change
@@ -34,7 +34,7 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
&builder.config.cmd
{
// disable the most spammy clippy lints
let ignored_lints = vec![
let ignored_lints = [
"many_single_char_names", // there are a lot in stdarch
"collapsible_if",
"type_complexity",
@@ -150,7 +150,7 @@ impl Step for Std {
if compiler.stage == 0 {
let libdir = builder.sysroot_libdir(compiler, target);
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
}
drop(_guard);

@@ -301,7 +301,7 @@ impl Step for Rustc {

let libdir = builder.sysroot_libdir(compiler, target);
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
add_to_sysroot(builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
}
}

@@ -353,7 +353,7 @@ impl Step for CodegenBackend {
.arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml")));
rustc_cargo_env(builder, &mut cargo, target, compiler.stage);

let _guard = builder.msg_check(&backend, target);
let _guard = builder.msg_check(backend, target);

run_cargo(
builder,
58 changes: 29 additions & 29 deletions src/bootstrap/src/core/build_steps/compile.rs
Original file line number Diff line number Diff line change
@@ -107,8 +107,8 @@ impl Std {
) -> Vec<(PathBuf, DependencyType)> {
let mut deps = Vec::new();
if !self.is_for_mir_opt_tests {
deps.extend(copy_third_party_objects(builder, &compiler, target));
deps.extend(copy_self_contained_objects(builder, &compiler, target));
deps.extend(copy_third_party_objects(builder, compiler, target));
deps.extend(copy_self_contained_objects(builder, compiler, target));
}
deps
}
@@ -186,7 +186,7 @@ impl Step for Std {

// Profiler information requires LLVM's compiler-rt
if builder.config.profiler {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));
}

let mut target_deps = builder.ensure(StartupObjects { compiler, target });
@@ -271,7 +271,7 @@ impl Step for Std {
if target.is_synthetic() {
cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1");
}
for rustflag in self.extra_rust_args.into_iter() {
for rustflag in self.extra_rust_args.iter() {
cargo.rustflag(rustflag);
}

@@ -333,7 +333,7 @@ fn copy_third_party_objects(
// The sanitizers are only copied in stage1 or above,
// to avoid creating dependency on LLVM.
target_deps.extend(
copy_sanitizers(builder, &compiler, target)
copy_sanitizers(builder, compiler, target)
.into_iter()
.map(|d| (d, DependencyType::Target)),
);
@@ -487,7 +487,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car

// for no-std targets we only compile a few no_std crates
cargo
.args(&["-p", "alloc"])
.args(["-p", "alloc"])
.arg("--manifest-path")
.arg(builder.src.join("library/alloc/Cargo.toml"))
.arg("--features")
@@ -626,20 +626,20 @@ impl Step for StdLink {
.build
.config
.initial_rustc
.starts_with(builder.out.join(&compiler.host.triple).join("stage0/bin"))
.starts_with(builder.out.join(compiler.host.triple).join("stage0/bin"))
{
// Copy bin files from stage0/bin to stage0-sysroot/bin
let sysroot = builder.out.join(&compiler.host.triple).join("stage0-sysroot");
let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot");

let host = compiler.host.triple;
let stage0_bin_dir = builder.out.join(&host).join("stage0/bin");
let stage0_bin_dir = builder.out.join(host).join("stage0/bin");
let sysroot_bin_dir = sysroot.join("bin");
t!(fs::create_dir_all(&sysroot_bin_dir));
builder.cp_r(&stage0_bin_dir, &sysroot_bin_dir);

// Copy all *.so files from stage0/lib to stage0-sysroot/lib
let stage0_lib_dir = builder.out.join(&host).join("stage0/lib");
if let Ok(files) = fs::read_dir(&stage0_lib_dir) {
let stage0_lib_dir = builder.out.join(host).join("stage0/lib");
if let Ok(files) = fs::read_dir(stage0_lib_dir) {
for file in files {
let file = t!(file);
let path = file.path();
@@ -654,9 +654,9 @@ impl Step for StdLink {
t!(fs::create_dir_all(&sysroot_codegen_backends));
let stage0_codegen_backends = builder
.out
.join(&host)
.join(host)
.join("stage0/lib/rustlib")
.join(&host)
.join(host)
.join("codegen-backends");
if stage0_codegen_backends.exists() {
builder.cp_r(&stage0_codegen_backends, &sysroot_codegen_backends);
@@ -1179,7 +1179,7 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
// The config can also specify its own llvm linker flags.
if let Some(ref s) = builder.config.llvm_ldflags {
if !llvm_linker_flags.is_empty() {
llvm_linker_flags.push_str(" ");
llvm_linker_flags.push(' ');
}
llvm_linker_flags.push_str(s);
}
@@ -1270,7 +1270,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
for path_set in &run.paths {
needs_codegen_cfg = match path_set {
PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)),
PathSet::Suite(suite) => is_codegen_cfg_needed(&suite, run),
PathSet::Suite(suite) => is_codegen_cfg_needed(suite, run),
}
}
needs_codegen_cfg
@@ -1279,7 +1279,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_";

fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
if path.path.to_str().unwrap().contains(&CODEGEN_BACKEND_PREFIX) {
if path.path.to_str().unwrap().contains(CODEGEN_BACKEND_PREFIX) {
let mut needs_codegen_backend_config = true;
for &backend in run.builder.config.codegen_backends(run.target) {
if path
@@ -1300,7 +1300,7 @@ fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
}
}

return false;
false
}

impl Step for CodegenBackend {
@@ -1393,7 +1393,7 @@ impl Step for CodegenBackend {
}
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
let codegen_backend = codegen_backend.to_str().unwrap();
t!(fs::write(&stamp, &codegen_backend));
t!(fs::write(stamp, codegen_backend));
}
}

@@ -1441,7 +1441,7 @@ fn copy_codegen_backends_to_sysroot(
let dot = filename.find('.').unwrap();
format!("{}-{}{}", &filename[..dash], builder.rust_release(), &filename[dot..])
};
builder.copy(&file, &dst.join(target_filename));
builder.copy(file, &dst.join(target_filename));
}
}

@@ -1519,7 +1519,7 @@ impl Step for Sysroot {
/// 1-3.
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
let compiler = self.compiler;
let host_dir = builder.out.join(&compiler.host.triple);
let host_dir = builder.out.join(compiler.host.triple);

let sysroot_dir = |stage| {
if stage == 0 {
@@ -1578,7 +1578,7 @@ impl Step for Sysroot {
let mut add_filtered_files = |suffix, contents| {
for path in contents {
let path = Path::new(&path);
if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) {
if path.parent().map_or(false, |parent| parent.ends_with(suffix)) {
filtered_files.push(path.file_name().unwrap().to_owned());
}
}
@@ -1802,7 +1802,7 @@ impl Step for Assemble {
if let Some(lld_install) = lld_install {
let src_exe = exe("lld", target_compiler.host);
let dst_exe = exe("rust-lld", target_compiler.host);
builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe));
builder.copy(&lld_install.join("bin").join(src_exe), &libdir_bin.join(dst_exe));
let self_contained_lld_dir = libdir_bin.join("gcc-ld");
t!(fs::create_dir_all(&self_contained_lld_dir));
let lld_wrapper_exe = builder.ensure(crate::core::build_steps::tool::LldWrapper {
@@ -1850,7 +1850,7 @@ impl Step for Assemble {
let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host);
let rustc = out_dir.join(exe("rustc-main", host));
let bindir = sysroot.join("bin");
t!(fs::create_dir_all(&bindir));
t!(fs::create_dir_all(bindir));
let compiler = builder.rustc(target_compiler);
builder.copy(&rustc, &compiler);

@@ -1869,9 +1869,9 @@ pub fn add_to_sysroot(
stamp: &Path,
) {
let self_contained_dst = &sysroot_dst.join("self-contained");
t!(fs::create_dir_all(&sysroot_dst));
t!(fs::create_dir_all(&sysroot_host_dst));
t!(fs::create_dir_all(&self_contained_dst));
t!(fs::create_dir_all(sysroot_dst));
t!(fs::create_dir_all(sysroot_host_dst));
t!(fs::create_dir_all(self_contained_dst));
for (path, dependency_type) in builder.read_stamp_file(stamp) {
let dst = match dependency_type {
DependencyType::Host => sysroot_host_dst,
@@ -2009,14 +2009,14 @@ pub fn run_cargo(
.map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata())))
.collect::<Vec<_>>();
for (prefix, extension, expected_len) in toplevel {
let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| {
let candidates = contents.iter().filter(|&(_, filename, meta)| {
meta.len() == expected_len
&& filename
.strip_prefix(&prefix[..])
.map(|s| s.starts_with('-') && s.ends_with(&extension[..]))
.unwrap_or(false)
});
let max = candidates.max_by_key(|&&(_, _, ref metadata)| {
let max = candidates.max_by_key(|&(_, _, metadata)| {
metadata.modified().expect("mtime should be available on all relevant OSes")
});
let path_to_add = match max {
@@ -2045,7 +2045,7 @@ pub fn run_cargo(
new_contents.extend(dep.to_str().unwrap().as_bytes());
new_contents.extend(b"\0");
}
t!(fs::write(&stamp, &new_contents));
t!(fs::write(stamp, &new_contents));
deps.into_iter().map(|(d, _)| d).collect()
}

76 changes: 38 additions & 38 deletions src/bootstrap/src/core/build_steps/dist.rs
Original file line number Diff line number Diff line change
@@ -78,7 +78,7 @@ impl Step for Docs {
let mut tarball = Tarball::new(builder, "rust-docs", &host.triple);
tarball.set_product_name("Rust Documentation");
tarball.add_bulk_dir(&builder.doc_out(host), dest);
tarball.add_file(&builder.src.join("src/doc/robots.txt"), dest, 0o644);
tarball.add_file(builder.src.join("src/doc/robots.txt"), dest, 0o644);
Some(tarball.generate())
}
}
@@ -342,7 +342,7 @@ impl Step for Mingw {
// thrown away (this contains the runtime DLLs included in the rustc package
// above) and the second argument is where to place all the MinGW components
// (which is what we want).
make_win_dist(&tmpdir(builder), tarball.image_dir(), host, &builder);
make_win_dist(&tmpdir(builder), tarball.image_dir(), host, builder);

Some(tarball.generate())
}
@@ -658,7 +658,7 @@ impl Step for Std {
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
let stamp = compile::libstd_stamp(builder, compiler_to_use, target);
verify_uefi_rlib_format(builder, target, &stamp);
copy_target_libs(builder, target, &tarball.image_dir(), &stamp);
copy_target_libs(builder, target, tarball.image_dir(), &stamp);

Some(tarball.generate())
}
@@ -734,7 +734,7 @@ impl Step for Analysis {
const DEFAULT: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "analysis");
let default = should_build_extended_tool(run.builder, "analysis");
run.alias("rust-analysis").default_condition(default)
}

@@ -890,7 +890,7 @@ impl Step for Src {
/// Creates the `rust-src` installer component
fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
if !builder.config.dry_run() {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));
}

let tarball = Tarball::new_targetless(builder, "rust-src");
@@ -976,7 +976,7 @@ impl Step for PlainSourceTarball {
];
let src_dirs = ["src", "compiler", "library", "tests"];

copy_src_dirs(builder, &builder.src, &src_dirs, &[], &plain_dst_src);
copy_src_dirs(builder, &builder.src, &src_dirs, &[], plain_dst_src);

// Copy the files normally
for item in &src_files {
@@ -986,8 +986,8 @@ impl Step for PlainSourceTarball {
// Create the version file
builder.create(&plain_dst_src.join("version"), &builder.rust_version());
if let Some(info) = builder.rust_info().info() {
channel::write_commit_hash_file(&plain_dst_src, &info.sha);
channel::write_commit_info_file(&plain_dst_src, info);
channel::write_commit_hash_file(plain_dst_src, &info.sha);
channel::write_commit_info_file(plain_dst_src, info);
}

// If we're building from git or tarball sources, we need to vendor
@@ -1014,7 +1014,7 @@ impl Step for PlainSourceTarball {
// Will read the libstd Cargo.toml
// which uses the unstable `public-dependency` feature.
.env("RUSTC_BOOTSTRAP", "1")
.current_dir(&plain_dst_src);
.current_dir(plain_dst_src);

let config = if !builder.config.dry_run() {
t!(String::from_utf8(t!(cmd.output()).stdout))
@@ -1043,7 +1043,7 @@ impl Step for Cargo {
const ONLY_HOSTS: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "cargo");
let default = should_build_extended_tool(run.builder, "cargo");
run.alias("cargo").default_condition(default)
}

@@ -1070,7 +1070,7 @@ impl Step for Cargo {
let mut tarball = Tarball::new(builder, "cargo", &target.triple);
tarball.set_overlay(OverlayKind::Cargo);

tarball.add_file(&cargo, "bin", 0o755);
tarball.add_file(cargo, "bin", 0o755);
tarball.add_file(etc.join("_cargo"), "share/zsh/site-functions", 0o644);
tarball.add_renamed_file(etc.join("cargo.bashcomp.sh"), "etc/bash_completion.d", "cargo");
tarball.add_dir(etc.join("man"), "share/man/man1");
@@ -1092,7 +1092,7 @@ impl Step for Rls {
const DEFAULT: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "rls");
let default = should_build_extended_tool(run.builder, "rls");
run.alias("rls").default_condition(default)
}

@@ -1134,7 +1134,7 @@ impl Step for RustAnalyzer {
const ONLY_HOSTS: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "rust-analyzer");
let default = should_build_extended_tool(run.builder, "rust-analyzer");
run.alias("rust-analyzer").default_condition(default)
}

@@ -1176,7 +1176,7 @@ impl Step for Clippy {
const ONLY_HOSTS: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "clippy");
let default = should_build_extended_tool(run.builder, "clippy");
run.alias("clippy").default_condition(default)
}

@@ -1224,7 +1224,7 @@ impl Step for Miri {
const ONLY_HOSTS: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "miri");
let default = should_build_extended_tool(run.builder, "miri");
run.alias("miri").default_condition(default)
}

@@ -1337,12 +1337,12 @@ impl Step for CodegenBackend {
let src = builder.sysroot(compiler);
let backends_src = builder.sysroot_codegen_backends(compiler);
let backends_rel = backends_src
.strip_prefix(&src)
.strip_prefix(src)
.unwrap()
.strip_prefix(builder.sysroot_libdir_relative(compiler))
.unwrap();
// Don't use custom libdir here because ^lib/ will be resolved again with installer
let backends_dst = PathBuf::from("lib").join(&backends_rel);
let backends_dst = PathBuf::from("lib").join(backends_rel);

let backend_name = format!("rustc_codegen_{}", backend);
let mut found_backend = false;
@@ -1371,7 +1371,7 @@ impl Step for Rustfmt {
const ONLY_HOSTS: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "rustfmt");
let default = should_build_extended_tool(run.builder, "rustfmt");
run.alias("rustfmt").default_condition(default)
}

@@ -1454,7 +1454,7 @@ impl Step for RustDemangler {
let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple);
tarball.set_overlay(OverlayKind::RustDemangler);
tarball.is_preview(true);
tarball.add_file(&rust_demangler, "bin", 0o755);
tarball.add_file(rust_demangler, "bin", 0o755);
tarball.add_legal_and_readme_to("share/doc/rust-demangler");
Some(tarball.generate())
}
@@ -1609,7 +1609,7 @@ impl Step for Extended {
let prepare = |name: &str| {
builder.create_dir(&pkg.join(name));
builder.cp_r(
&work.join(&format!("{}-{}", pkgname(builder, name), target.triple)),
&work.join(format!("{}-{}", pkgname(builder, name), target.triple)),
&pkg.join(name),
);
builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
@@ -1673,7 +1673,7 @@ impl Step for Extended {
name.to_string()
};
builder.cp_r(
&work.join(&format!("{}-{}", pkgname(builder, name), target.triple)).join(dir),
&work.join(format!("{}-{}", pkgname(builder, name), target.triple)).join(dir),
&exe.join(name),
);
builder.remove(&exe.join(name).join("manifest.in"));
@@ -1707,7 +1707,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("rustc")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("RustcGroup")
.arg("-dr")
@@ -1723,7 +1723,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("rust-docs")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("DocsGroup")
.arg("-dr")
@@ -1741,7 +1741,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("cargo")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("CargoGroup")
.arg("-dr")
@@ -1758,7 +1758,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("rust-std")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("StdGroup")
.arg("-dr")
@@ -1774,7 +1774,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("rust-analyzer")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("RustAnalyzerGroup")
.arg("-dr")
@@ -1793,7 +1793,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("clippy")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("ClippyGroup")
.arg("-dr")
@@ -1812,7 +1812,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("rust-demangler")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("RustDemanglerGroup")
.arg("-dr")
@@ -1831,7 +1831,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("miri")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("MiriGroup")
.arg("-dr")
@@ -1849,7 +1849,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("rust-analysis")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("AnalysisGroup")
.arg("-dr")
@@ -1867,7 +1867,7 @@ impl Step for Extended {
.current_dir(&exe)
.arg("dir")
.arg("rust-mingw")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("GccGroup")
.arg("-dr")
@@ -1890,10 +1890,10 @@ impl Step for Extended {
.arg("-dStdDir=rust-std")
.arg("-dAnalysisDir=rust-analysis")
.arg("-arch")
.arg(&arch)
.arg(arch)
.arg("-out")
.arg(&output)
.arg(&input);
.arg(input);
add_env(builder, &mut cmd, target);

if built_tools.contains("clippy") {
@@ -2026,7 +2026,7 @@ fn install_llvm_file(builder: &Builder<'_>, source: &Path, destination: &Path) {
return;
}

builder.install(&source, destination, 0o644);
builder.install(source, destination, 0o644);
}

/// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking.
@@ -2123,7 +2123,7 @@ impl Step for LlvmTools {
const DEFAULT: bool = true;

fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "llvm-tools");
let default = should_build_extended_tool(run.builder, "llvm-tools");
// FIXME: allow using the names of the tools themselves?
run.alias("llvm-tools").default_condition(default)
}
@@ -2231,12 +2231,12 @@ impl Step for RustDev {
tarball.add_file(lld_path, "bin", 0o755);
}

tarball.add_file(&builder.llvm_filecheck(target), "bin", 0o755);
tarball.add_file(builder.llvm_filecheck(target), "bin", 0o755);

// Copy the include directory as well; needed mostly to build
// librustc_llvm properly (e.g., llvm-config.h is in here). But also
// just broadly useful to be able to link against the bundled LLVM.
tarball.add_dir(&builder.llvm_out(target).join("include"), "include");
tarball.add_dir(builder.llvm_out(target).join("include"), "include");

// Copy libLLVM.so to the target lib dir as well, so the RPATH like
// `$ORIGIN/../lib` can find it. It may also be used as a dependency
@@ -2312,7 +2312,7 @@ impl Step for BuildManifest {
let build_manifest = builder.tool_exe(Tool::BuildManifest);

let tarball = Tarball::new(builder, "build-manifest", &self.target.triple);
tarball.add_file(&build_manifest, "bin", 0o755);
tarball.add_file(build_manifest, "bin", 0o755);
tarball.generate()
}
}
20 changes: 10 additions & 10 deletions src/bootstrap/src/core/build_steps/doc.rs
Original file line number Diff line number Diff line change
@@ -151,7 +151,7 @@ impl<P: Step> Step for RustbookSrc<P> {
builder.info(&format!("Rustbook ({target}) - {name}"));
let _ = fs::remove_dir_all(&out);

builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out));
builder.run(rustbook_cmd.arg("build").arg(src).arg("-d").arg(out));
}

if self.parent.is_some() {
@@ -384,7 +384,7 @@ impl Step for Standalone {
// with no particular explicit doc requested (e.g. library/core).
if builder.paths.is_empty() || builder.was_invoked_explicitly::<Self>(Kind::Doc) {
let index = out.join("index.html");
builder.open_in_browser(&index);
builder.open_in_browser(index);
}
}
}
@@ -517,7 +517,7 @@ impl Step for SharedAssets {
.replace("VERSION", &builder.rust_release())
.replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or(""))
.replace("STAMP", builder.rust_info().sha().unwrap_or(""));
t!(fs::write(&version_info, &info));
t!(fs::write(&version_info, info));
}

builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css"));
@@ -714,11 +714,11 @@ fn doc_std(
}

let description =
format!("library{} in {} format", crate_description(&requested_crates), format.as_str());
let _guard = builder.msg_doc(compiler, &description, target);
format!("library{} in {} format", crate_description(requested_crates), format.as_str());
let _guard = builder.msg_doc(compiler, description, target);

builder.run(&mut cargo.into());
builder.cp_r(&out_dir, &out);
builder.cp_r(&out_dir, out);
}

#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
@@ -781,7 +781,7 @@ impl Step for Rustc {
let _guard = builder.msg_sysroot_tool(
Kind::Doc,
stage,
&format!("compiler{}", crate_description(&self.crates)),
format!("compiler{}", crate_description(&self.crates)),
compiler.host,
target,
);
@@ -819,7 +819,7 @@ impl Step for Rustc {
// Create all crate output directories first to make sure rustdoc uses
// relative links.
// FIXME: Cargo should probably do this itself.
let dir_name = krate.replace("-", "_");
let dir_name = krate.replace('-', "_");
t!(fs::create_dir_all(out_dir.join(&*dir_name)));
cargo.arg("-p").arg(krate);
if to_open.is_none() {
@@ -844,7 +844,7 @@ impl Step for Rustc {
if !builder.config.dry_run() {
// Sanity check on linked compiler crates
for krate in &*self.crates {
let dir_name = krate.replace("-", "_");
let dir_name = krate.replace('-', "_");
// Making sure the directory exists and is not empty.
assert!(out.join(&*dir_name).read_dir().unwrap().next().is_some());
}
@@ -1160,7 +1160,7 @@ impl Step for RustcBook {
cmd.arg(&out_listing);
cmd.arg("--rustc");
cmd.arg(&rustc);
cmd.arg("--rustc-target").arg(&self.target.rustc_target_arg());
cmd.arg("--rustc-target").arg(self.target.rustc_target_arg());
if builder.is_verbose() {
cmd.arg("--verbose");
}
6 changes: 3 additions & 3 deletions src/bootstrap/src/core/build_steps/format.rs
Original file line number Diff line number Diff line change
@@ -11,7 +11,7 @@ use std::process::{Command, Stdio};
use std::sync::mpsc::SyncSender;

fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl FnMut(bool) -> bool {
let mut cmd = Command::new(&rustfmt);
let mut cmd = Command::new(rustfmt);
// avoid the submodule config paths from coming into play,
// we only allow a single global config for the workspace for now
cmd.arg("--config-path").arg(&src.canonicalize().unwrap());
@@ -162,7 +162,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
// against anything like `compiler/rustc_foo/src/foo.rs`,
// preventing the latter from being formatted.
untracked_count += 1;
fmt_override.add(&format!("!/{untracked_path}")).expect(&untracked_path);
fmt_override.add(&format!("!/{untracked_path}")).expect(untracked_path);
}
// Only check modified files locally to speed up runtime.
// We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through;
@@ -221,7 +221,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
assert!(rustfmt_path.exists(), "{}", rustfmt_path.display());
let src = build.src.clone();
let (tx, rx): (SyncSender<PathBuf>, _) = std::sync::mpsc::sync_channel(128);
let walker = match paths.get(0) {
let walker = match paths.first() {
Some(first) => {
let find_shortcut_candidates = |p: &PathBuf| {
let mut candidates = Vec::new();
4 changes: 2 additions & 2 deletions src/bootstrap/src/core/build_steps/install.rs
Original file line number Diff line number Diff line change
@@ -24,7 +24,7 @@ const SHELL: &str = "sh";
// We have to run a few shell scripts, which choke quite a bit on both `\`
// characters and on `C:\` paths, so normalize both of them away.
fn sanitize_sh(path: &Path) -> String {
let path = path.to_str().unwrap().replace("\\", "/");
let path = path.to_str().unwrap().replace('\\', "/");
return change_drive(unc_to_lfs(&path)).unwrap_or(path);

fn unc_to_lfs(s: &str) -> &str {
@@ -44,7 +44,7 @@ fn sanitize_sh(path: &Path) -> String {
}
}

fn is_dir_writable_for_user(dir: &PathBuf) -> bool {
fn is_dir_writable_for_user(dir: &Path) -> bool {
let tmp = dir.join(".tmp");
match fs::create_dir_all(&tmp) {
Ok(_) => {
28 changes: 14 additions & 14 deletions src/bootstrap/src/core/build_steps/llvm.rs
Original file line number Diff line number Diff line change
@@ -110,7 +110,7 @@ pub fn prebuilt_llvm_config(
let smart_stamp_hash = STAMP_HASH_MEMO.get_or_init(|| {
generate_smart_stamp_hash(
&builder.config.src.join("src/llvm-project"),
&builder.in_tree_llvm_info.sha().unwrap_or_default(),
builder.in_tree_llvm_info.sha().unwrap_or_default(),
)
});

@@ -289,7 +289,7 @@ impl Step for Llvm {

let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target);
t!(stamp.remove());
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
t!(fs::create_dir_all(&out_dir));

// https://llvm.org/docs/CMake.html
@@ -355,7 +355,7 @@ impl Step for Llvm {
cfg.define("LLVM_BUILD_RUNTIME", "No");
}
if let Some(path) = builder.config.llvm_profile_use.as_ref() {
cfg.define("LLVM_PROFDATA_FILE", &path);
cfg.define("LLVM_PROFDATA_FILE", path);
}

// Disable zstd to avoid a dependency on libzstd.so.
@@ -643,7 +643,7 @@ fn configure_cmake(

let sanitize_cc = |cc: &Path| {
if target.is_msvc() {
OsString::from(cc.to_str().unwrap().replace("\\", "/"))
OsString::from(cc.to_str().unwrap().replace('\\', "/"))
} else {
cc.as_os_str().to_owned()
}
@@ -808,10 +808,10 @@ fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmak
// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365
fn get_var(var_base: &str, host: &str, target: &str) -> Option<OsString> {
let kind = if host == target { "HOST" } else { "TARGET" };
let target_u = target.replace("-", "_");
env::var_os(&format!("{var_base}_{target}"))
.or_else(|| env::var_os(&format!("{}_{}", var_base, target_u)))
.or_else(|| env::var_os(&format!("{}_{}", kind, var_base)))
let target_u = target.replace('-', "_");
env::var_os(format!("{var_base}_{target}"))
.or_else(|| env::var_os(format!("{}_{}", var_base, target_u)))
.or_else(|| env::var_os(format!("{}_{}", kind, var_base)))
.or_else(|| env::var_os(var_base))
}

@@ -862,7 +862,7 @@ impl Step for Lld {
}

let _guard = builder.msg_unstaged(Kind::Build, "LLD", target);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
t!(fs::create_dir_all(&out_dir));

let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
@@ -986,7 +986,7 @@ impl Step for Sanitizers {

let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target);
t!(stamp.remove());
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);

let mut cfg = cmake::Config::new(&compiler_rt_dir);
cfg.profile("Release");
@@ -1051,7 +1051,7 @@ fn supported_sanitizers(
.map(move |c| SanitizerRuntime {
cmake_target: format!("clang_rt.{}_{}_dynamic", c, os),
path: out_dir
.join(&format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)),
.join(format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)),
name: format!("librustc-{}_rt.{}.dylib", channel, c),
})
.collect()
@@ -1062,7 +1062,7 @@ fn supported_sanitizers(
.iter()
.map(move |c| SanitizerRuntime {
cmake_target: format!("clang_rt.{}-{}", c, arch),
path: out_dir.join(&format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)),
path: out_dir.join(format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)),
name: format!("librustc-{}_rt.{}.a", channel, c),
})
.collect()
@@ -1165,7 +1165,7 @@ impl Step for CrtBeginEnd {

/// Build crtbegin.o/crtend.o for musl target.
fn run(self, builder: &Builder<'_>) -> Self::Output {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));

let out_dir = builder.native_dir(self.target).join("crt");

@@ -1233,7 +1233,7 @@ impl Step for Libunwind {

/// Build libunwind.a
fn run(self, builder: &Builder<'_>) -> Self::Output {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));

if builder.config.dry_run() {
return PathBuf::new();
2 changes: 1 addition & 1 deletion src/bootstrap/src/core/build_steps/run.rs
Original file line number Diff line number Diff line change
@@ -23,7 +23,7 @@ impl Step for ExpandYamlAnchors {
fn run(self, builder: &Builder<'_>) {
builder.info("Expanding YAML anchors in the GitHub Actions configuration");
builder.run_delaying_failure(
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
);
}

23 changes: 11 additions & 12 deletions src/bootstrap/src/core/build_steps/setup.rs
Original file line number Diff line number Diff line change
@@ -8,7 +8,7 @@ use std::env::consts::EXE_SUFFIX;
use std::fmt::Write as _;
use std::fs::File;
use std::io::Write;
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
use std::path::{Path, PathBuf, MAIN_SEPARATOR_STR};
use std::process::Command;
use std::str::FromStr;
use std::{fmt, fs, io};
@@ -257,8 +257,7 @@ impl Step for Link {
return;
}
let stage_path =
["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string());

["build", config.build.rustc_target_arg(), "stage1"].join(MAIN_SEPARATOR_STR);
if !rustup_installed() {
eprintln!("`rustup` is not installed; cannot link `stage1` toolchain");
} else if stage_dir_exists(&stage_path[..]) && !config.dry_run() {
@@ -276,7 +275,7 @@ fn rustup_installed() -> bool {
}

fn stage_dir_exists(stage_path: &str) -> bool {
match fs::create_dir(&stage_path) {
match fs::create_dir(stage_path) {
Ok(_) => true,
Err(_) => Path::new(&stage_path).exists(),
}
@@ -294,7 +293,7 @@ fn attempt_toolchain_link(stage_path: &str) {
return;
}

if try_link_toolchain(&stage_path) {
if try_link_toolchain(stage_path) {
println!(
"Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain"
);
@@ -310,7 +309,7 @@ fn attempt_toolchain_link(stage_path: &str) {

fn toolchain_is_linked() -> bool {
match Command::new("rustup")
.args(&["toolchain", "list"])
.args(["toolchain", "list"])
.stdout(std::process::Stdio::piped())
.output()
{
@@ -337,7 +336,7 @@ fn toolchain_is_linked() -> bool {
fn try_link_toolchain(stage_path: &str) -> bool {
Command::new("rustup")
.stdout(std::process::Stdio::null())
.args(&["toolchain", "link", "stage1", &stage_path])
.args(["toolchain", "link", "stage1", stage_path])
.output()
.map_or(false, |output| output.status.success())
}
@@ -366,7 +365,7 @@ fn ensure_stage1_toolchain_placeholder_exists(stage_path: &str) -> bool {
return false;
}

return true;
true
}

// Used to get the path for `Subcommand::Setup`
@@ -469,13 +468,13 @@ impl Step for Hook {
if config.dry_run() {
return;
}
t!(install_git_hook_maybe(&config));
t!(install_git_hook_maybe(config));
}
}

// install a git hook to automatically run tidy, if they want
fn install_git_hook_maybe(config: &Config) -> io::Result<()> {
let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| {
let git = t!(config.git().args(["rev-parse", "--git-common-dir"]).output().map(|output| {
assert!(output.status.success(), "failed to run `git`");
PathBuf::from(t!(String::from_utf8(output.stdout)).trim())
}));
@@ -541,7 +540,7 @@ impl Step for Vscode {
if config.dry_run() {
return;
}
while !t!(create_vscode_settings_maybe(&config)) {}
while !t!(create_vscode_settings_maybe(config)) {}
}
}

@@ -608,7 +607,7 @@ fn create_vscode_settings_maybe(config: &Config) -> io::Result<bool> {
}
_ => "Created",
};
fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?;
fs::write(&vscode_settings, RUST_ANALYZER_SETTINGS)?;
println!("{verb} `.vscode/settings.json`");
} else {
println!("\n{RUST_ANALYZER_SETTINGS}");
2 changes: 1 addition & 1 deletion src/bootstrap/src/core/build_steps/suggest.rs
Original file line number Diff line number Diff line change
@@ -36,7 +36,7 @@ pub fn suggest(builder: &Builder<'_>, run: bool) {
// this code expects one suggestion per line in the following format:
// <x_subcommand> {some number of flags} [optional stage number]
let cmd = sections.next().unwrap();
let stage = sections.next_back().map(|s| str::parse(s).ok()).flatten();
let stage = sections.next_back().and_then(|s| str::parse(s).ok());
let paths: Vec<PathBuf> = sections.map(|p| PathBuf::from_str(p).unwrap()).collect();

(cmd, stage, paths)
2 changes: 1 addition & 1 deletion src/bootstrap/src/core/build_steps/synthetic_targets.rs
Original file line number Diff line number Diff line change
@@ -79,7 +79,7 @@ fn create_synthetic_target(

customize(spec_map);

std::fs::write(&path, &serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
std::fs::write(&path, serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap());
crate::utils::cc_detect::find_target(builder, target);

78 changes: 36 additions & 42 deletions src/bootstrap/src/core/build_steps/test.rs
Original file line number Diff line number Diff line change
@@ -156,7 +156,7 @@ You can skip linkcheck with --skip src/tools/linkchecker"
// Run the linkchecker.
let _guard =
builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc")));
}

@@ -253,15 +253,15 @@ impl Step for Cargotest {
let out_dir = builder.out.join("ct");
t!(fs::create_dir_all(&out_dir));

let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let mut cmd = builder.tool_cmd(Tool::CargoTest);
let mut cmd = cmd
let cmd = cmd
.arg(&cargo)
.arg(&out_dir)
.args(builder.config.test_args())
.env("RUSTC", builder.rustc(compiler))
.env("RUSTDOC", builder.rustdoc(compiler));
add_rustdoc_cargo_linker_args(&mut cmd, builder, compiler.host, LldThreads::No);
add_rustdoc_cargo_linker_args(cmd, builder, compiler.host, LldThreads::No);
builder.run_delaying_failure(cmd);
}
}
@@ -322,7 +322,7 @@ impl Step for Cargo {
builder,
);

let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
add_flags_and_try_run_tests(builder, &mut cargo);
}
}
@@ -474,7 +474,7 @@ impl Step for RustDemangler {
);

let dir = testdir(builder, compiler.host);
t!(fs::create_dir_all(&dir));
t!(fs::create_dir_all(dir));

cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler);
cargo.add_rustc_lib_path(builder);
@@ -525,7 +525,7 @@ impl Miri {
// Tell `cargo miri setup` where to find the sources.
cargo.env("MIRI_LIB_SRC", builder.src.join("library"));
// Tell it where to find Miri.
cargo.env("MIRI", &miri);
cargo.env("MIRI", miri);
// Tell it where to put the sysroot.
cargo.env("MIRI_SYSROOT", &miri_sysroot);
// Debug things.
@@ -637,7 +637,7 @@ impl Step for Miri {
// does not understand the flags added by `add_flags_and_try_run_test`.
let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
{
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
}

@@ -649,11 +649,11 @@ impl Step for Miri {
// `MIRI_SKIP_UI_CHECKS` and `RUSTC_BLESS` are incompatible
cargo.env_remove("RUSTC_BLESS");
// Optimizations can change error locations and remove UB so don't run `fail` tests.
cargo.args(&["tests/pass", "tests/panic"]);
cargo.args(["tests/pass", "tests/panic"]);

let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
{
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
}
}
@@ -693,7 +693,7 @@ impl Step for Miri {

let mut cargo = Command::from(cargo);
{
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
}
}
@@ -946,15 +946,15 @@ impl Step for RustdocJSNotStd {
}

fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String> {
let mut command = Command::new(&npm);
let mut command = Command::new(npm);
command.arg("list").arg("--parseable").arg("--long").arg("--depth=0");
if global {
command.arg("--global");
}
let lines = command
.output()
.map(|output| String::from_utf8_lossy(&output.stdout).into_owned())
.unwrap_or(String::new());
.unwrap_or_default();
lines
.lines()
.find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@"))
@@ -1048,7 +1048,7 @@ impl Step for RustdocGUI {
cmd.arg("--npm").arg(npm);
}

let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let _guard = builder.msg_sysroot_tool(
Kind::Test,
self.compiler.stage,
@@ -1096,7 +1096,7 @@ impl Step for Tidy {
cmd.arg(format!("--extra-checks={s}"));
}
let mut args = std::env::args_os();
if let Some(_) = args.find(|arg| arg == OsStr::new("--")) {
if args.any(|arg| arg == OsStr::new("--")) {
cmd.arg("--");
cmd.args(args);
}
@@ -1116,7 +1116,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to
);
crate::exit!(1);
}
crate::core::build_steps::format::format(&builder, !builder.config.cmd.bless(), &[]);
crate::core::build_steps::format::format(builder, !builder.config.cmd.bless(), &[]);
}

builder.info("tidy check");
@@ -1171,7 +1171,7 @@ impl Step for ExpandYamlAnchors {
}
builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded");
builder.run_delaying_failure(
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
);
}

@@ -1759,7 +1759,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the

for exclude in &builder.config.skip {
cmd.arg("--skip");
cmd.arg(&exclude);
cmd.arg(exclude);
}

// Get paths from cmd args
@@ -1780,7 +1780,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
// so the correct filters are passed to libtest
if cfg!(windows) {
let test_args_win: Vec<String> =
test_args.iter().map(|s| s.replace("/", "\\")).collect();
test_args.iter().map(|s| s.replace('/', "\\")).collect();
cmd.args(&test_args_win);
} else {
cmd.args(&test_args);
@@ -1900,7 +1900,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
// rather than stomp over it.
if !builder.config.dry_run() && target.is_msvc() {
for &(ref k, ref v) in builder.cc.borrow()[&target].env() {
for (k, v) in builder.cc.borrow()[&target].env() {
if k != "PATH" {
cmd.env(k, v);
}
@@ -1996,7 +1996,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
let _group = builder.msg(
Kind::Test,
compiler.stage,
&format!("compiletest suite={suite} mode={mode}"),
format!("compiletest suite={suite} mode={mode}"),
compiler.host,
target,
);
@@ -2022,7 +2022,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
"Check compiletest suite={} mode={} compare_mode={} ({} -> {})",
suite, mode, compare_mode, &compiler.host, target
));
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
try_run_tests(builder, &mut cmd, false);
}
}
@@ -2094,7 +2094,7 @@ impl BookTest {
compiler.host,
compiler.host,
);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) {
ToolState::TestPass
} else {
@@ -2111,12 +2111,12 @@ impl BookTest {
builder.ensure(compile::Std::new(compiler, host));

let _guard =
builder.msg(Kind::Test, compiler.stage, &format!("book {}", self.name), host, host);
builder.msg(Kind::Test, compiler.stage, format!("book {}", self.name), host, host);

// Do a breadth-first traversal of the `src/doc` directory and just run
// tests for all files that end in `*.md`
let mut stack = vec![builder.src.join(self.path)];
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let mut files = Vec::new();
while let Some(p) = stack.pop() {
if p.is_dir() {
@@ -2227,7 +2227,7 @@ impl Step for ErrorIndex {

let guard =
builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run_quiet(&mut tool);
drop(guard);
// The tests themselves need to link to std, so make sure it is
@@ -2315,11 +2315,8 @@ impl Step for CrateLibrustc {
let builder = run.builder;
let host = run.build_triple();
let compiler = builder.compiler_for(builder.top_stage, host, host);
let crates = run
.paths
.iter()
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
.collect();
let crates =
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();

builder.ensure(CrateLibrustc { compiler, target: run.target, crates });
}
@@ -2351,7 +2348,7 @@ fn run_cargo_test<'a>(
) -> bool {
let mut cargo =
prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let _group = description.into().and_then(|what| {
builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target)
});
@@ -2406,7 +2403,7 @@ fn prepare_cargo_test(
if krate.has_lib {
cargo.arg("--lib");
}
cargo.args(&["--bins", "--examples", "--tests", "--benches"]);
cargo.args(["--bins", "--examples", "--tests", "--benches"]);
}
DocTests::Yes => {}
}
@@ -2468,11 +2465,8 @@ impl Step for Crate {
let builder = run.builder;
let host = run.build_triple();
let compiler = builder.compiler_for(builder.top_stage, host, host);
let crates = run
.paths
.iter()
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
.collect();
let crates =
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();

builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates });
}
@@ -2844,11 +2838,11 @@ impl Step for Bootstrap {
let compiler = builder.compiler(0, host);
let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host);

let mut check_bootstrap = Command::new(&builder.python());
let mut check_bootstrap = Command::new(builder.python());
check_bootstrap
.args(["-m", "unittest", "bootstrap_test.py"])
.env("BUILD_DIR", &builder.out)
.env("BUILD_PLATFORM", &builder.build.build.triple)
.env("BUILD_PLATFORM", builder.build.build.triple)
.current_dir(builder.src.join("src/bootstrap/"));
// NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible.
// Use `python -m unittest` manually if you want to pass arguments.
@@ -3171,7 +3165,7 @@ impl Step for CodegenCranelift {
&compiler.host,
target
));
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);

// FIXME handle vendoring for source tarballs before removing the --skip-test below
let download_dir = builder.out.join("cg_clif_download");
@@ -3300,7 +3294,7 @@ impl Step for CodegenGCC {
&compiler.host,
target
));
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);

// FIXME: Uncomment the `prepare` command below once vendoring is implemented.
/*
2 changes: 1 addition & 1 deletion src/bootstrap/src/core/build_steps/tool.rs
Original file line number Diff line number Diff line change
@@ -819,7 +819,7 @@ impl<'a> Builder<'a> {
if compiler.host.is_msvc() {
let curpaths = env::var_os("PATH").unwrap_or_default();
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
for &(ref k, ref v) in self.cc.borrow()[&compiler.host].env() {
for (k, v) in self.cc.borrow()[&compiler.host].env() {
if k != "PATH" {
continue;
}
4 changes: 2 additions & 2 deletions src/bootstrap/src/core/build_steps/toolstate.rs
Original file line number Diff line number Diff line change
@@ -346,7 +346,7 @@ fn prepare_toolstate_config(token: &str) {

let credential = format!("https://{token}:x-oauth-basic@github.com\n",);
let git_credential_path = PathBuf::from(t!(env::var("HOME"))).join(".git-credentials");
t!(fs::write(&git_credential_path, credential));
t!(fs::write(git_credential_path, credential));
}

/// Reads the latest toolstate from the toolstate repo.
@@ -389,7 +389,7 @@ fn commit_toolstate_change(current_toolstate: &ToolstateData) {
// Upload the test results (the new commit-to-toolstate mapping) to the toolstate repo.
// This does *not* change the "current toolstate"; that only happens post-landing
// via `src/ci/docker/publish_toolstate.sh`.
publish_test_results(&current_toolstate);
publish_test_results(current_toolstate);

// `git commit` failing means nothing to commit.
let status = t!(Command::new("git")
Loading