diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index eac7f03d8450c..34c73b545df87 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -338,15 +338,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_expr(&mut self, e: &'a ast::Expr) { match e.kind { ast::ExprKind::TryBlock(_, None) => { + // `try { ... }` is old and is only gated post-expansion here. gate!(&self, try_blocks, e.span, "`try` expression is experimental"); } ast::ExprKind::TryBlock(_, Some(_)) => { - gate!( - &self, - try_blocks_heterogeneous, - e.span, - "`try bikeshed` expression is experimental" - ); + // `try_blocks_heterogeneous` is new, and gated pre-expansion instead. } ast::ExprKind::Lit(token::Lit { kind: token::LitKind::Float | token::LitKind::Integer, @@ -522,6 +518,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) { half_open_range_patterns_in_slices, "half-open range patterns in slices are unstable" ); + gate_all!(try_blocks_heterogeneous, "`try bikeshed` expression is experimental"); gate_all!(yeet_expr, "`do yeet` expression is experimental"); gate_all!(const_closures, "const closures are experimental"); gate_all!(builtin_syntax, "`builtin #` syntax is unstable"); diff --git a/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs b/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs index 0d19dc25d402c..f1b31365013ec 100644 --- a/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs +++ b/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs @@ -1211,3 +1211,12 @@ impl SingleAttributeParser for RustcReservationImplParser { Some(AttributeKind::RustcReservationImpl(cx.attr_span, value_str)) } } + +pub(crate) struct PreludeImportParser; + +impl NoArgsAttributeParser for PreludeImportParser { + const PATH: &[Symbol] = &[sym::prelude_import]; + const ON_DUPLICATE: OnDuplicate = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Use)]); + const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::PreludeImport; +} diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs index 87aa4150becd3..c6f0914bfbdaf 100644 --- a/compiler/rustc_attr_parsing/src/context.rs +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -258,6 +258,7 @@ attribute_parsers!( Single>, Single>, Single>, + Single>, Single>, Single>, Single>, diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs index bbd0a8ae07108..bc2e99b9ceb54 100644 --- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs +++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs @@ -649,8 +649,8 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { // We want to focus on relevant live locals in diagnostics, so when polonius is enabled, we // ensure that we don't emit live boring locals as explanations. let is_local_boring = |local| { - if let Some(polonius_diagnostics) = self.polonius_diagnostics { - polonius_diagnostics.boring_nll_locals.contains(&local) + if let Some(polonius_context) = self.polonius_context { + polonius_context.boring_nll_locals.contains(&local) } else { assert!(!tcx.sess.opts.unstable_opts.polonius.is_next_enabled()); diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 1f0fde11b8d87..2e6c1dceef98a 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -63,10 +63,10 @@ use crate::diagnostics::{ use crate::path_utils::*; use crate::place_ext::PlaceExt; use crate::places_conflict::{PlaceConflictBias, places_conflict}; +use crate::polonius::PoloniusContext; use crate::polonius::legacy::{ PoloniusFacts, PoloniusFactsExt, PoloniusLocationTable, PoloniusOutput, }; -use crate::polonius::{PoloniusContext, PoloniusDiagnosticsContext}; use crate::prefixes::PrefixSet; use crate::region_infer::RegionInferenceContext; use crate::region_infer::opaque_types::DeferredOpaqueTypeError; @@ -424,7 +424,7 @@ fn borrowck_check_region_constraints<'tcx>( polonius_output, opt_closure_req, nll_errors, - polonius_diagnostics, + polonius_context, } = nll::compute_regions( root_cx, &infcx, @@ -448,7 +448,7 @@ fn borrowck_check_region_constraints<'tcx>( ®ioncx, &opt_closure_req, &borrow_set, - polonius_diagnostics.as_ref(), + polonius_context.as_ref(), ); // We also have a `#[rustc_regions]` annotation that causes us to dump @@ -490,7 +490,7 @@ fn borrowck_check_region_constraints<'tcx>( polonius_output: None, move_errors: Vec::new(), diags_buffer, - polonius_diagnostics: polonius_diagnostics.as_ref(), + polonius_context: polonius_context.as_ref(), }; struct MoveVisitor<'a, 'b, 'infcx, 'tcx> { ctxt: &'a mut MirBorrowckCtxt<'b, 'infcx, 'tcx>, @@ -529,7 +529,7 @@ fn borrowck_check_region_constraints<'tcx>( move_errors: Vec::new(), diags_buffer, polonius_output: polonius_output.as_deref(), - polonius_diagnostics: polonius_diagnostics.as_ref(), + polonius_context: polonius_context.as_ref(), }; // Compute and report region errors, if any. @@ -779,7 +779,7 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { /// Results of Polonius analysis. polonius_output: Option<&'a PoloniusOutput>, /// When using `-Zpolonius=next`: the data used to compute errors and diagnostics. - polonius_diagnostics: Option<&'a PoloniusDiagnosticsContext>, + polonius_context: Option<&'a PoloniusContext>, } // Check that: diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index d5a0831cf302e..acd01be470709 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -23,10 +23,10 @@ use crate::borrow_set::BorrowSet; use crate::consumers::RustcFacts; use crate::diagnostics::RegionErrors; use crate::handle_placeholders::compute_sccs_applying_placeholder_outlives_constraints; +use crate::polonius::PoloniusContext; use crate::polonius::legacy::{ PoloniusFacts, PoloniusFactsExt, PoloniusLocationTable, PoloniusOutput, }; -use crate::polonius::{PoloniusContext, PoloniusDiagnosticsContext}; use crate::region_infer::RegionInferenceContext; use crate::type_check::MirTypeckRegionConstraints; use crate::type_check::free_region_relations::UniversalRegionRelations; @@ -47,7 +47,7 @@ pub(crate) struct NllOutput<'tcx> { /// When using `-Zpolonius=next`: the data used to compute errors and diagnostics, e.g. /// localized typeck and liveness constraints. - pub polonius_diagnostics: Option, + pub polonius_context: Option, } /// Rewrites the regions in the MIR to use NLL variables, also scraping out the set of universal @@ -122,7 +122,7 @@ pub(crate) fn compute_regions<'tcx>( universal_region_relations: Frozen>, constraints: MirTypeckRegionConstraints<'tcx>, mut polonius_facts: Option>, - polonius_context: Option, + mut polonius_context: Option, ) -> NllOutput<'tcx> { let polonius_output = root_cx.consumer.as_ref().map_or(false, |c| c.polonius_output()) || infcx.tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled(); @@ -154,9 +154,9 @@ pub(crate) fn compute_regions<'tcx>( // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives constraints // and use them to compute loan liveness. - let polonius_diagnostics = polonius_context.map(|polonius_context| { - polonius_context.compute_loan_liveness(infcx.tcx, &mut regioncx, body, borrow_set) - }); + if let Some(polonius_context) = polonius_context.as_mut() { + polonius_context.compute_loan_liveness(&mut regioncx, body, borrow_set) + } // If requested: dump NLL facts, and run legacy polonius analysis. let polonius_output = polonius_facts.as_ref().and_then(|polonius_facts| { @@ -189,7 +189,7 @@ pub(crate) fn compute_regions<'tcx>( polonius_output, opt_closure_req: closure_region_requirements, nll_errors, - polonius_diagnostics, + polonius_context, } } diff --git a/compiler/rustc_borrowck/src/polonius/constraints.rs b/compiler/rustc_borrowck/src/polonius/constraints.rs index 5259575785955..559b1bdc38d83 100644 --- a/compiler/rustc_borrowck/src/polonius/constraints.rs +++ b/compiler/rustc_borrowck/src/polonius/constraints.rs @@ -1,6 +1,19 @@ +use std::collections::BTreeMap; + +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; +use rustc_index::interval::SparseIntervalMatrix; +use rustc_middle::mir::{Body, Location}; use rustc_middle::ty::RegionVid; use rustc_mir_dataflow::points::PointIndex; +use crate::BorrowSet; +use crate::constraints::OutlivesConstraint; +use crate::dataflow::BorrowIndex; +use crate::polonius::ConstraintDirection; +use crate::region_infer::values::LivenessValues; +use crate::type_check::Locations; +use crate::universal_regions::UniversalRegions; + /// A localized outlives constraint reifies the CFG location where the outlives constraint holds, /// within the origins themselves as if they were different from point to point: from `a: b` /// outlives constraints to `a@p: b@p`, where `p` is the point in the CFG. @@ -12,32 +25,300 @@ use rustc_mir_dataflow::points::PointIndex; /// of `q`. These depend on the liveness of the regions at these points, as well as their /// variance. /// -/// The `source` origin at `from` flows into the `target` origin at `to`. -/// /// This dual of NLL's [crate::constraints::OutlivesConstraint] therefore encodes the /// position-dependent outlives constraints used by Polonius, to model the flow-sensitive loan /// propagation via reachability within a graph of localized constraints. -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub(crate) struct LocalizedOutlivesConstraint { - pub source: RegionVid, - pub from: PointIndex, - pub target: RegionVid, - pub to: PointIndex, +/// +/// That `LocalizedConstraintGraph` can create these edges on-demand during traversal, and we +/// therefore model them as a pair of `LocalizedNode` vertices. +/// +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub(super) struct LocalizedNode { + pub region: RegionVid, + pub point: PointIndex, +} + +/// The localized constraint graph indexes the physical and logical edges to lazily compute a given +/// node's successors during traversal. +pub(super) struct LocalizedConstraintGraph { + /// The actual, physical, edges we have recorded for a given node. We localize them on-demand + /// when traversing from the node to the successor region. + edges: FxHashMap>, + + /// The logical edges representing the outlives constraints that hold at all points in the CFG, + /// which we don't localize to avoid creating a lot of unnecessary edges in the graph. Some CFGs + /// can be big, and we don't need to create such a physical edge for every point in the CFG. + logical_edges: FxHashMap>, } -/// A container of [LocalizedOutlivesConstraint]s that can be turned into a traversable -/// `rustc_data_structures` graph. -#[derive(Clone, Default, Debug)] -pub(crate) struct LocalizedOutlivesConstraintSet { - pub outlives: Vec, +/// The visitor interface when traversing a `LocalizedConstraintGraph`. +pub(super) trait LocalizedConstraintGraphVisitor { + /// Callback called when traversing a given `loan` encounters a localized `node` it hasn't + /// visited before. + fn on_node_traversed(&mut self, _loan: BorrowIndex, _node: LocalizedNode) {} + + /// Callback called when discovering a new `successor` node for the `current_node`. + fn on_successor_discovered(&mut self, _current_node: LocalizedNode, _successor: LocalizedNode) { + } } -impl LocalizedOutlivesConstraintSet { - pub(crate) fn push(&mut self, constraint: LocalizedOutlivesConstraint) { - if constraint.source == constraint.target && constraint.from == constraint.to { - // 'a@p: 'a@p is pretty uninteresting - return; +impl LocalizedConstraintGraph { + /// Traverses the constraints and returns the indexed graph of edges per node. + pub(super) fn new<'tcx>( + liveness: &LivenessValues, + outlives_constraints: impl Iterator>, + ) -> Self { + let mut edges: FxHashMap<_, FxIndexSet<_>> = FxHashMap::default(); + let mut logical_edges: FxHashMap<_, FxIndexSet<_>> = FxHashMap::default(); + + for outlives_constraint in outlives_constraints { + match outlives_constraint.locations { + Locations::All(_) => { + logical_edges + .entry(outlives_constraint.sup) + .or_default() + .insert(outlives_constraint.sub); + } + + Locations::Single(location) => { + let node = LocalizedNode { + region: outlives_constraint.sup, + point: liveness.point_from_location(location), + }; + edges.entry(node).or_default().insert(outlives_constraint.sub); + } + } + } + + LocalizedConstraintGraph { edges, logical_edges } + } + + /// Traverses the localized constraint graph per-loan, and notifies the `visitor` of discovered + /// nodes and successors. + pub(super) fn traverse<'tcx>( + &self, + body: &Body<'tcx>, + liveness: &LivenessValues, + live_region_variances: &BTreeMap, + universal_regions: &UniversalRegions<'tcx>, + borrow_set: &BorrowSet<'tcx>, + visitor: &mut impl LocalizedConstraintGraphVisitor, + ) { + let live_regions = liveness.points(); + + let mut visited = FxHashSet::default(); + let mut stack = Vec::new(); + + // Compute reachability per loan by traversing each loan's subgraph starting from where it + // is introduced. + for (loan_idx, loan) in borrow_set.iter_enumerated() { + visited.clear(); + stack.clear(); + + let start_node = LocalizedNode { + region: loan.region, + point: liveness.point_from_location(loan.reserve_location), + }; + stack.push(start_node); + + while let Some(node) = stack.pop() { + if !visited.insert(node) { + continue; + } + + // We've reached a node we haven't visited before. + let location = liveness.location_from_point(node.point); + visitor.on_node_traversed(loan_idx, node); + + // When we find a _new_ successor, we'd like to + // - visit it eventually, + // - and let the generic visitor know about it. + let mut successor_found = |succ| { + if !visited.contains(&succ) { + stack.push(succ); + visitor.on_successor_discovered(node, succ); + } + }; + + // Then, we propagate the loan along the localized constraint graph. The outgoing + // edges are computed lazily, from: + // - the various physical edges present at this node, + // - the materialized logical edges that exist virtually at all points for this + // node's region, localized at this point. + + // Universal regions propagate loans along the CFG, i.e. forwards only. + let is_universal_region = universal_regions.is_universal_region(node.region); + + // The physical edges present at this node are: + // + // 1. the typeck edges that flow from region to region *at this point*. + for &succ in self.edges.get(&node).into_iter().flatten() { + let succ = LocalizedNode { region: succ, point: node.point }; + successor_found(succ); + } + + // 2a. the liveness edges that flow *forward*, from this node's point to its + // successors in the CFG. + if body[location.block].statements.get(location.statement_index).is_some() { + // Intra-block edges, straight line constraints from each point to its successor + // within the same block. + let next_point = node.point + 1; + if let Some(succ) = compute_forward_successor( + node.region, + next_point, + live_regions, + live_region_variances, + is_universal_region, + ) { + successor_found(succ); + } + } else { + // Inter-block edges, from the block's terminator to each successor block's + // entry point. + for successor_block in body[location.block].terminator().successors() { + let next_location = Location { block: successor_block, statement_index: 0 }; + let next_point = liveness.point_from_location(next_location); + if let Some(succ) = compute_forward_successor( + node.region, + next_point, + live_regions, + live_region_variances, + is_universal_region, + ) { + successor_found(succ); + } + } + } + + // 2b. the liveness edges that flow *backward*, from this node's point to its + // predecessors in the CFG. + if !is_universal_region { + if location.statement_index > 0 { + // Backward edges to the predecessor point in the same block. + let previous_point = PointIndex::from(node.point.as_usize() - 1); + if let Some(succ) = compute_backward_successor( + node.region, + node.point, + previous_point, + live_regions, + live_region_variances, + ) { + successor_found(succ); + } + } else { + // Backward edges from the block entry point to the terminator of the + // predecessor blocks. + let predecessors = body.basic_blocks.predecessors(); + for &pred_block in &predecessors[location.block] { + let previous_location = Location { + block: pred_block, + statement_index: body[pred_block].statements.len(), + }; + let previous_point = liveness.point_from_location(previous_location); + if let Some(succ) = compute_backward_successor( + node.region, + node.point, + previous_point, + live_regions, + live_region_variances, + ) { + successor_found(succ); + } + } + } + } + + // And finally, we have the logical edges, materialized at this point. + for &logical_succ in self.logical_edges.get(&node.region).into_iter().flatten() { + let succ = LocalizedNode { region: logical_succ, point: node.point }; + successor_found(succ); + } + } + } + } +} + +/// Returns the successor for the current region/point node when propagating a loan through forward +/// edges, if applicable, according to liveness and variance. +fn compute_forward_successor( + region: RegionVid, + next_point: PointIndex, + live_regions: &SparseIntervalMatrix, + live_region_variances: &BTreeMap, + is_universal_region: bool, +) -> Option { + // 1. Universal regions are semantically live at all points. + if is_universal_region { + let succ = LocalizedNode { region, point: next_point }; + return Some(succ); + } + + // 2. Otherwise, gather the edges due to explicit region liveness, when applicable. + if !live_regions.contains(region, next_point) { + return None; + } + + // Here, `region` could be live at the current point, and is live at the next point: add a + // constraint between them, according to variance. + + // Note: there currently are cases related to promoted and const generics, where we don't yet + // have variance information (possibly about temporary regions created when typeck sanitizes the + // promoteds). Until that is done, we conservatively fallback to maximizing reachability by + // adding a bidirectional edge here. This will not limit traversal whatsoever, and thus + // propagate liveness when needed. + // + // FIXME: add the missing variance information and remove this fallback bidirectional edge. + let direction = + live_region_variances.get(®ion).unwrap_or(&ConstraintDirection::Bidirectional); + + match direction { + ConstraintDirection::Backward => { + // Contravariant cases: loans flow in the inverse direction, but we're only interested + // in forward successors and there are none here. + None + } + ConstraintDirection::Forward | ConstraintDirection::Bidirectional => { + // 1. For covariant cases: loans flow in the regular direction, from the current point + // to the next point. + // 2. For invariant cases, loans can flow in both directions, but here as well, we only + // want the forward path of the bidirectional edge. + Some(LocalizedNode { region, point: next_point }) + } + } +} + +/// Returns the successor for the current region/point node when propagating a loan through backward +/// edges, if applicable, according to liveness and variance. +fn compute_backward_successor( + region: RegionVid, + current_point: PointIndex, + previous_point: PointIndex, + live_regions: &SparseIntervalMatrix, + live_region_variances: &BTreeMap, +) -> Option { + // Liveness flows into the regions live at the next point. So, in a backwards view, we'll link + // the region from the current point, if it's live there, to the previous point. + if !live_regions.contains(region, current_point) { + return None; + } + + // FIXME: add the missing variance information and remove this fallback bidirectional edge. See + // the same comment in `compute_forward_successor`. + let direction = + live_region_variances.get(®ion).unwrap_or(&ConstraintDirection::Bidirectional); + + match direction { + ConstraintDirection::Forward => { + // Covariant cases: loans flow in the regular direction, but we're only interested in + // backward successors and there are none here. + None + } + ConstraintDirection::Backward | ConstraintDirection::Bidirectional => { + // 1. For contravariant cases: loans flow in the inverse direction, from the current + // point to the previous point. + // 2. For invariant cases, loans can flow in both directions, but here as well, we only + // want the backward path of the bidirectional edge. + Some(LocalizedNode { region, point: previous_point }) } - self.outlives.push(constraint); } } diff --git a/compiler/rustc_borrowck/src/polonius/dump.rs b/compiler/rustc_borrowck/src/polonius/dump.rs index a8e7ab234f006..af50f68640f8b 100644 --- a/compiler/rustc_borrowck/src/polonius/dump.rs +++ b/compiler/rustc_borrowck/src/polonius/dump.rs @@ -10,9 +10,7 @@ use rustc_session::config::MirIncludeSpans; use crate::borrow_set::BorrowSet; use crate::constraints::OutlivesConstraint; -use crate::polonius::{ - LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet, PoloniusDiagnosticsContext, -}; +use crate::polonius::{LocalizedConstraintGraphVisitor, LocalizedNode, PoloniusContext}; use crate::region_infer::values::LivenessValues; use crate::type_check::Locations; use crate::{BorrowckInferCtxt, ClosureRegionRequirements, RegionInferenceContext}; @@ -24,7 +22,7 @@ pub(crate) fn dump_polonius_mir<'tcx>( regioncx: &RegionInferenceContext<'tcx>, closure_region_requirements: &Option>, borrow_set: &BorrowSet<'tcx>, - polonius_diagnostics: Option<&PoloniusDiagnosticsContext>, + polonius_context: Option<&PoloniusContext>, ) { let tcx = infcx.tcx; if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { @@ -33,8 +31,22 @@ pub(crate) fn dump_polonius_mir<'tcx>( let Some(dumper) = MirDumper::new(tcx, "polonius", body) else { return }; - let polonius_diagnostics = - polonius_diagnostics.expect("missing diagnostics context with `-Zpolonius=next`"); + let polonius_context = + polonius_context.expect("missing polonius context with `-Zpolonius=next`"); + + // If we have a polonius graph to dump along the rest of the MIR and NLL info, we extract its + // constraints here. + let mut collector = LocalizedOutlivesConstraintCollector { constraints: Vec::new() }; + if let Some(graph) = &polonius_context.graph { + graph.traverse( + body, + regioncx.liveness_constraints(), + &polonius_context.live_region_variances, + regioncx.universal_regions(), + borrow_set, + &mut collector, + ); + } let extra_data = &|pass_where, out: &mut dyn io::Write| { emit_polonius_mir( @@ -42,7 +54,7 @@ pub(crate) fn dump_polonius_mir<'tcx>( regioncx, closure_region_requirements, borrow_set, - &polonius_diagnostics.localized_outlives_constraints, + &collector.constraints, pass_where, out, ) @@ -60,17 +72,34 @@ pub(crate) fn dump_polonius_mir<'tcx>( let _ = try { let mut file = dumper.create_dump_file("html", body)?; - emit_polonius_dump( - &dumper, - body, - regioncx, - borrow_set, - &polonius_diagnostics.localized_outlives_constraints, - &mut file, - )?; + emit_polonius_dump(&dumper, body, regioncx, borrow_set, &collector.constraints, &mut file)?; }; } +/// The constraints we'll dump as text or a mermaid graph. +struct LocalizedOutlivesConstraint { + source: RegionVid, + from: PointIndex, + target: RegionVid, + to: PointIndex, +} + +/// Visitor to record constraints encountered when traversing the localized constraint graph. +struct LocalizedOutlivesConstraintCollector { + constraints: Vec, +} + +impl LocalizedConstraintGraphVisitor for LocalizedOutlivesConstraintCollector { + fn on_successor_discovered(&mut self, current_node: LocalizedNode, successor: LocalizedNode) { + self.constraints.push(LocalizedOutlivesConstraint { + source: current_node.region, + from: current_node.point, + target: successor.region, + to: successor.point, + }); + } +} + /// The polonius dump consists of: /// - the NLL MIR /// - the list of polonius localized constraints @@ -82,7 +111,7 @@ fn emit_polonius_dump<'tcx>( body: &Body<'tcx>, regioncx: &RegionInferenceContext<'tcx>, borrow_set: &BorrowSet<'tcx>, - localized_outlives_constraints: &LocalizedOutlivesConstraintSet, + localized_outlives_constraints: &[LocalizedOutlivesConstraint], out: &mut dyn io::Write, ) -> io::Result<()> { // Prepare the HTML dump file prologue. @@ -193,7 +222,7 @@ fn emit_polonius_mir<'tcx>( regioncx: &RegionInferenceContext<'tcx>, closure_region_requirements: &Option>, borrow_set: &BorrowSet<'tcx>, - localized_outlives_constraints: &LocalizedOutlivesConstraintSet, + localized_outlives_constraints: &[LocalizedOutlivesConstraint], pass_where: PassWhere, out: &mut dyn io::Write, ) -> io::Result<()> { @@ -212,10 +241,10 @@ fn emit_polonius_mir<'tcx>( // Add localized outlives constraints match pass_where { PassWhere::BeforeCFG => { - if localized_outlives_constraints.outlives.len() > 0 { + if localized_outlives_constraints.len() > 0 { writeln!(out, "| Localized constraints")?; - for constraint in &localized_outlives_constraints.outlives { + for constraint in localized_outlives_constraints { let LocalizedOutlivesConstraint { source, from, target, to } = constraint; let from = liveness.location_from_point(*from); let to = liveness.location_from_point(*to); @@ -399,7 +428,7 @@ fn emit_mermaid_nll_sccs<'tcx>( fn emit_mermaid_constraint_graph<'tcx>( borrow_set: &BorrowSet<'tcx>, liveness: &LivenessValues, - localized_outlives_constraints: &LocalizedOutlivesConstraintSet, + localized_outlives_constraints: &[LocalizedOutlivesConstraint], out: &mut dyn io::Write, ) -> io::Result { let location_name = |location: Location| { @@ -438,7 +467,7 @@ fn emit_mermaid_constraint_graph<'tcx>( // The regions subgraphs containing the region/point nodes. let mut points_per_region: FxIndexMap> = FxIndexMap::default(); - for constraint in &localized_outlives_constraints.outlives { + for constraint in localized_outlives_constraints { points_per_region.entry(constraint.source).or_default().insert(constraint.from); points_per_region.entry(constraint.target).or_default().insert(constraint.to); } @@ -451,7 +480,7 @@ fn emit_mermaid_constraint_graph<'tcx>( } // The constraint graph edges. - for constraint in &localized_outlives_constraints.outlives { + for constraint in localized_outlives_constraints { // FIXME: add killed loans and constraint kind as edge labels. writeln!( out, @@ -463,6 +492,6 @@ fn emit_mermaid_constraint_graph<'tcx>( // Return the number of edges: this is the biggest graph in the dump and its edge count will be // mermaid's max edge count to support. - let edge_count = borrow_set.len() + localized_outlives_constraints.outlives.len(); + let edge_count = borrow_set.len() + localized_outlives_constraints.len(); Ok(edge_count) } diff --git a/compiler/rustc_borrowck/src/polonius/liveness_constraints.rs b/compiler/rustc_borrowck/src/polonius/liveness_constraints.rs index f1338b3bf1ee5..b6f8b4a79f39b 100644 --- a/compiler/rustc_borrowck/src/polonius/liveness_constraints.rs +++ b/compiler/rustc_borrowck/src/polonius/liveness_constraints.rs @@ -1,22 +1,15 @@ use std::collections::BTreeMap; use rustc_hir::def_id::DefId; -use rustc_index::bit_set::SparseBitMatrix; -use rustc_middle::mir::{Body, Location}; use rustc_middle::ty::relate::{ self, Relate, RelateResult, TypeRelation, relate_args_with_variances, }; use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeVisitable}; -use rustc_mir_dataflow::points::PointIndex; -use super::{ - ConstraintDirection, LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet, - PoloniusLivenessContext, -}; -use crate::region_infer::values::LivenessValues; +use super::{ConstraintDirection, PoloniusContext}; use crate::universal_regions::UniversalRegions; -impl PoloniusLivenessContext { +impl PoloniusContext { /// Record the variance of each region contained within the given value. pub(crate) fn record_live_region_variance<'tcx>( &mut self, @@ -34,165 +27,6 @@ impl PoloniusLivenessContext { } } -/// Propagate loans throughout the CFG: for each statement in the MIR, create localized outlives -/// constraints for loans that are propagated to the next statements. -pub(super) fn create_liveness_constraints<'tcx>( - body: &Body<'tcx>, - liveness: &LivenessValues, - live_regions: &SparseBitMatrix, - live_region_variances: &BTreeMap, - universal_regions: &UniversalRegions<'tcx>, - localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, -) { - for (block, bb) in body.basic_blocks.iter_enumerated() { - let statement_count = bb.statements.len(); - for statement_index in 0..=statement_count { - let current_location = Location { block, statement_index }; - let current_point = liveness.point_from_location(current_location); - - if statement_index < statement_count { - // Intra-block edges, straight line constraints from each point to its successor - // within the same block. - let next_location = Location { block, statement_index: statement_index + 1 }; - let next_point = liveness.point_from_location(next_location); - propagate_loans_between_points( - current_point, - next_point, - live_regions, - live_region_variances, - universal_regions, - localized_outlives_constraints, - ); - } else { - // Inter-block edges, from the block's terminator to each successor block's entry - // point. - for successor_block in bb.terminator().successors() { - let next_location = Location { block: successor_block, statement_index: 0 }; - let next_point = liveness.point_from_location(next_location); - propagate_loans_between_points( - current_point, - next_point, - live_regions, - live_region_variances, - universal_regions, - localized_outlives_constraints, - ); - } - } - } - } -} - -/// Propagate loans within a region between two points in the CFG, if that region is live at both -/// the source and target points. -fn propagate_loans_between_points( - current_point: PointIndex, - next_point: PointIndex, - live_regions: &SparseBitMatrix, - live_region_variances: &BTreeMap, - universal_regions: &UniversalRegions<'_>, - localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, -) { - // Universal regions are semantically live at all points. - // Note: we always have universal regions but they're not always (or often) involved in the - // subset graph. For now, we emit all their edges unconditionally, but some of these subgraphs - // will be disconnected from the rest of the graph and thus, unnecessary. - // - // FIXME: only emit the edges of universal regions that existential regions can reach. - for region in universal_regions.universal_regions_iter() { - localized_outlives_constraints.push(LocalizedOutlivesConstraint { - source: region, - from: current_point, - target: region, - to: next_point, - }); - } - - let Some(next_live_regions) = live_regions.row(next_point) else { - // There are no constraints to add: there are no live regions at the next point. - return; - }; - - for region in next_live_regions.iter() { - // `region` could be live at the current point, and is live at the next point: add a - // constraint between them, according to variance. - if let Some(&direction) = live_region_variances.get(®ion) { - add_liveness_constraint( - region, - current_point, - next_point, - direction, - localized_outlives_constraints, - ); - } else { - // Note: there currently are cases related to promoted and const generics, where we - // don't yet have variance information (possibly about temporary regions created when - // typeck sanitizes the promoteds). Until that is done, we conservatively fallback to - // maximizing reachability by adding a bidirectional edge here. This will not limit - // traversal whatsoever, and thus propagate liveness when needed. - // - // FIXME: add the missing variance information and remove this fallback bidirectional - // edge. - let fallback = ConstraintDirection::Bidirectional; - add_liveness_constraint( - region, - current_point, - next_point, - fallback, - localized_outlives_constraints, - ); - } - } -} - -/// Adds `LocalizedOutlivesConstraint`s between two connected points, according to the given edge -/// direction. -fn add_liveness_constraint( - region: RegionVid, - current_point: PointIndex, - next_point: PointIndex, - direction: ConstraintDirection, - localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, -) { - match direction { - ConstraintDirection::Forward => { - // Covariant cases: loans flow in the regular direction, from the current point to the - // next point. - localized_outlives_constraints.push(LocalizedOutlivesConstraint { - source: region, - from: current_point, - target: region, - to: next_point, - }); - } - ConstraintDirection::Backward => { - // Contravariant cases: loans flow in the inverse direction, from the next point to the - // current point. - localized_outlives_constraints.push(LocalizedOutlivesConstraint { - source: region, - from: next_point, - target: region, - to: current_point, - }); - } - ConstraintDirection::Bidirectional => { - // For invariant cases, loans can flow in both directions: we add both edges. - localized_outlives_constraints.push(LocalizedOutlivesConstraint { - source: region, - from: current_point, - target: region, - to: next_point, - }); - localized_outlives_constraints.push(LocalizedOutlivesConstraint { - source: region, - from: next_point, - target: region, - to: current_point, - }); - } - } -} - /// Extracts variances for regions contained within types. Follows the same structure as /// `rustc_infer`'s `Generalizer`: we try to relate a type with itself to track and extract the /// variances of regions. diff --git a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs deleted file mode 100644 index bdc3047e5ba01..0000000000000 --- a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs +++ /dev/null @@ -1,160 +0,0 @@ -use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; -use rustc_middle::ty::RegionVid; -use rustc_mir_dataflow::points::PointIndex; - -use super::{LiveLoans, LocalizedOutlivesConstraintSet}; -use crate::BorrowSet; -use crate::constraints::OutlivesConstraint; -use crate::region_infer::values::LivenessValues; -use crate::type_check::Locations; - -/// Compute loan reachability to approximately trace loan liveness throughout the CFG, by -/// traversing the full graph of constraints that combines: -/// - the localized constraints (the physical edges), -/// - with the constraints that hold at all points (the logical edges). -pub(super) fn compute_loan_liveness<'tcx>( - liveness: &LivenessValues, - outlives_constraints: impl Iterator>, - borrow_set: &BorrowSet<'tcx>, - localized_outlives_constraints: &LocalizedOutlivesConstraintSet, -) -> LiveLoans { - let mut live_loans = LiveLoans::new(borrow_set.len()); - - // Create the full graph with the physical edges we've localized earlier, and the logical edges - // of constraints that hold at all points. - let logical_constraints = - outlives_constraints.filter(|c| matches!(c.locations, Locations::All(_))); - let graph = LocalizedConstraintGraph::new(&localized_outlives_constraints, logical_constraints); - let mut visited = FxHashSet::default(); - let mut stack = Vec::new(); - - // Compute reachability per loan by traversing each loan's subgraph starting from where it is - // introduced. - for (loan_idx, loan) in borrow_set.iter_enumerated() { - visited.clear(); - stack.clear(); - - let start_node = LocalizedNode { - region: loan.region, - point: liveness.point_from_location(loan.reserve_location), - }; - stack.push(start_node); - - while let Some(node) = stack.pop() { - if !visited.insert(node) { - continue; - } - - // Record the loan as being live on entry to this point if it reaches a live region - // there. - // - // This is an approximation of liveness (which is the thing we want), in that we're - // using a single notion of reachability to represent what used to be _two_ different - // transitive closures. It didn't seem impactful when coming up with the single-graph - // and reachability through space (regions) + time (CFG) concepts, but in practice the - // combination of time-traveling with kills is more impactful than initially - // anticipated. - // - // Kills should prevent a loan from reaching its successor points in the CFG, but not - // while time-traveling: we're not actually at that CFG point, but looking for - // predecessor regions that contain the loan. One of the two TCs we had pushed the - // transitive subset edges to each point instead of having backward edges, and the - // problem didn't exist before. In the abstract, naive reachability is not enough to - // model this, we'd need a slightly different solution. For example, maybe with a - // two-step traversal: - // - at each point we first traverse the subgraph (and possibly time-travel) looking for - // exit nodes while ignoring kills, - // - and then when we're back at the current point, we continue normally. - // - // Another (less annoying) subtlety is that kills and the loan use-map are - // flow-insensitive. Kills can actually appear in places before a loan is introduced, or - // at a location that is actually unreachable in the CFG from the introduction point, - // and these can also be encountered during time-traveling. - // - // The simplest change that made sense to "fix" the issues above is taking into - // account kills that are: - // - reachable from the introduction point - // - encountered during forward traversal. Note that this is not transitive like the - // two-step traversal described above: only kills encountered on exit via a backward - // edge are ignored. - // - // This version of the analysis, however, is enough in practice to pass the tests that - // we care about and NLLs reject, without regressions on crater, and is an actionable - // subset of the full analysis. It also naturally points to areas of improvement that we - // wish to explore later, namely handling kills appropriately during traversal, instead - // of continuing traversal to all the reachable nodes. - // - // FIXME: analyze potential unsoundness, possibly in concert with a borrowck - // implementation in a-mir-formality, fuzzing, or manually crafting counter-examples. - - if liveness.is_live_at(node.region, liveness.location_from_point(node.point)) { - live_loans.insert(node.point, loan_idx); - } - - for succ in graph.outgoing_edges(node) { - stack.push(succ); - } - } - } - - live_loans -} - -/// The localized constraint graph indexes the physical and logical edges to compute a given node's -/// successors during traversal. -struct LocalizedConstraintGraph { - /// The actual, physical, edges we have recorded for a given node. - edges: FxHashMap>, - - /// The logical edges representing the outlives constraints that hold at all points in the CFG, - /// which we don't localize to avoid creating a lot of unnecessary edges in the graph. Some CFGs - /// can be big, and we don't need to create such a physical edge for every point in the CFG. - logical_edges: FxHashMap>, -} - -/// A node in the graph to be traversed, one of the two vertices of a localized outlives constraint. -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] -struct LocalizedNode { - region: RegionVid, - point: PointIndex, -} - -impl LocalizedConstraintGraph { - /// Traverses the constraints and returns the indexed graph of edges per node. - fn new<'tcx>( - constraints: &LocalizedOutlivesConstraintSet, - logical_constraints: impl Iterator>, - ) -> Self { - let mut edges: FxHashMap<_, FxIndexSet<_>> = FxHashMap::default(); - for constraint in &constraints.outlives { - let source = LocalizedNode { region: constraint.source, point: constraint.from }; - let target = LocalizedNode { region: constraint.target, point: constraint.to }; - edges.entry(source).or_default().insert(target); - } - - let mut logical_edges: FxHashMap<_, FxIndexSet<_>> = FxHashMap::default(); - for constraint in logical_constraints { - logical_edges.entry(constraint.sup).or_default().insert(constraint.sub); - } - - LocalizedConstraintGraph { edges, logical_edges } - } - - /// Returns the outgoing edges of a given node, not its transitive closure. - fn outgoing_edges(&self, node: LocalizedNode) -> impl Iterator { - // The outgoing edges are: - // - the physical edges present at this node, - // - the materialized logical edges that exist virtually at all points for this node's - // region, localized at this point. - let physical_edges = - self.edges.get(&node).into_iter().flat_map(|targets| targets.iter().copied()); - let materialized_edges = - self.logical_edges.get(&node.region).into_iter().flat_map(move |targets| { - targets - .iter() - .copied() - .map(move |target| LocalizedNode { point: node.point, region: target }) - }); - physical_edges.chain(materialized_edges) - } -} diff --git a/compiler/rustc_borrowck/src/polonius/mod.rs b/compiler/rustc_borrowck/src/polonius/mod.rs index a9092b1981e1d..0924ce25c52ea 100644 --- a/compiler/rustc_borrowck/src/polonius/mod.rs +++ b/compiler/rustc_borrowck/src/polonius/mod.rs @@ -32,47 +32,37 @@ //! - //! - //! -//! -//! Data flows like this: -//! 1) during MIR typeck, record liveness data needed later: live region variances, as well as the -//! usual NLL liveness data (just computed on more locals). That's the [PoloniusLivenessContext]. -//! 2) once that is done, variance data is transferred, and the NLL region liveness is converted to -//! the polonius shape. That's the main [PoloniusContext]. -//! 3) during region inference, that data and the NLL outlives constraints are used to create the -//! localized outlives constraints, as described above. That's the [PoloniusDiagnosticsContext]. -//! 4) transfer this back to the main borrowck procedure: it handles computing errors and -//! diagnostics, debugging and MIR dumping concerns. mod constraints; mod dump; pub(crate) mod legacy; mod liveness_constraints; -mod loan_liveness; -mod typeck_constraints; use std::collections::BTreeMap; use rustc_data_structures::fx::FxHashSet; use rustc_index::bit_set::SparseBitMatrix; -use rustc_index::interval::SparseIntervalMatrix; use rustc_middle::mir::{Body, Local}; -use rustc_middle::ty::{RegionVid, TyCtxt}; +use rustc_middle::ty::RegionVid; use rustc_mir_dataflow::points::PointIndex; -pub(crate) use self::constraints::*; +pub(self) use self::constraints::*; pub(crate) use self::dump::dump_polonius_mir; -use self::liveness_constraints::create_liveness_constraints; -use self::loan_liveness::compute_loan_liveness; -use self::typeck_constraints::convert_typeck_constraints; use crate::dataflow::BorrowIndex; +use crate::region_infer::values::LivenessValues; use crate::{BorrowSet, RegionInferenceContext}; pub(crate) type LiveLoans = SparseBitMatrix; -/// This struct holds the liveness data created during MIR typeck, and which will be used later in -/// the process, to compute the polonius localized constraints. +/// This struct holds the necessary +/// - liveness data, created during MIR typeck, and which will be used to lazily compute the +/// polonius localized constraints, during NLL region inference as well as MIR dumping, +/// - data needed by the borrowck error computation and diagnostics. #[derive(Default)] -pub(crate) struct PoloniusLivenessContext { +pub(crate) struct PoloniusContext { + /// The graph from which we extract the localized outlives constraints. + graph: Option, + /// The expected edge direction per live region: the kind of directed edge we'll create as /// liveness constraints depends on the variance of types with respect to each contained region. live_region_variances: BTreeMap, @@ -84,27 +74,6 @@ pub(crate) struct PoloniusLivenessContext { pub(crate) boring_nll_locals: FxHashSet, } -/// This struct holds the data needed to create the Polonius localized constraints. Its data is -/// transferred and converted from the [PoloniusLivenessContext] at the end of MIR typeck. -pub(crate) struct PoloniusContext { - /// The liveness data we recorded during MIR typeck. - liveness_context: PoloniusLivenessContext, - - /// The set of regions that are live at a given point in the CFG, used to create localized - /// outlives constraints between regions that are live at connected points in the CFG. - live_regions: SparseBitMatrix, -} - -/// This struct holds the data needed by the borrowck error computation and diagnostics. Its data is -/// computed from the [PoloniusContext] when computing NLL regions. -pub(crate) struct PoloniusDiagnosticsContext { - /// The localized outlives constraints that were computed in the main analysis. - localized_outlives_constraints: LocalizedOutlivesConstraintSet, - - /// The liveness data computed during MIR typeck: [PoloniusLivenessContext::boring_nll_locals]. - pub(crate) boring_nll_locals: FxHashSet, -} - /// The direction a constraint can flow into. Used to create liveness constraints according to /// variance. #[derive(Copy, Clone, PartialEq, Eq, Debug)] @@ -120,26 +89,6 @@ enum ConstraintDirection { } impl PoloniusContext { - /// Unlike NLLs, in polonius we traverse the cfg to look for regions live across an edge, so we - /// need to transpose the "points where each region is live" matrix to a "live regions per point" - /// matrix. - // FIXME: avoid this conversion by always storing liveness data in this shape in the rest of - // borrowck. - pub(crate) fn create_from_liveness( - liveness_context: PoloniusLivenessContext, - num_regions: usize, - points_per_live_region: &SparseIntervalMatrix, - ) -> PoloniusContext { - let mut live_regions_per_point = SparseBitMatrix::new(num_regions); - for region in points_per_live_region.rows() { - for point in points_per_live_region.row(region).unwrap().iter() { - live_regions_per_point.insert(point, region); - } - } - - PoloniusContext { live_regions: live_regions_per_point, liveness_context } - } - /// Computes live loans using the set of loans model for `-Zpolonius=next`. /// /// First, creates a constraint graph combining regions and CFG points, by: @@ -151,44 +100,91 @@ impl PoloniusContext { /// /// The constraint data will be used to compute errors and diagnostics. pub(crate) fn compute_loan_liveness<'tcx>( - self, - tcx: TyCtxt<'tcx>, + &mut self, regioncx: &mut RegionInferenceContext<'tcx>, body: &Body<'tcx>, borrow_set: &BorrowSet<'tcx>, - ) -> PoloniusDiagnosticsContext { - let PoloniusLivenessContext { live_region_variances, boring_nll_locals } = - self.liveness_context; - - let mut localized_outlives_constraints = LocalizedOutlivesConstraintSet::default(); - convert_typeck_constraints( - tcx, - body, - regioncx.liveness_constraints(), - regioncx.outlives_constraints(), - regioncx.universal_regions(), - &mut localized_outlives_constraints, - ); - - create_liveness_constraints( - body, - regioncx.liveness_constraints(), - &self.live_regions, - &live_region_variances, - regioncx.universal_regions(), - &mut localized_outlives_constraints, - ); - - // Now that we have a complete graph, we can compute reachability to trace the liveness of - // loans for the next step in the chain, the NLL loan scope and active loans computations. - let live_loans = compute_loan_liveness( - regioncx.liveness_constraints(), - regioncx.outlives_constraints(), - borrow_set, - &localized_outlives_constraints, - ); - regioncx.record_live_loans(live_loans); - - PoloniusDiagnosticsContext { localized_outlives_constraints, boring_nll_locals } + ) { + let liveness = regioncx.liveness_constraints(); + + // We don't need to prepare the graph (index NLL constraints, etc.) if we have no loans to + // trace throughout localized constraints. + if borrow_set.len() > 0 { + // From the outlives constraints, liveness, and variances, we can compute reachability + // on the lazy localized constraint graph to trace the liveness of loans, for the next + // step in the chain (the NLL loan scope and active loans computations). + let graph = LocalizedConstraintGraph::new(liveness, regioncx.outlives_constraints()); + + let mut live_loans = LiveLoans::new(borrow_set.len()); + let mut visitor = LoanLivenessVisitor { liveness, live_loans: &mut live_loans }; + graph.traverse( + body, + liveness, + &self.live_region_variances, + regioncx.universal_regions(), + borrow_set, + &mut visitor, + ); + regioncx.record_live_loans(live_loans); + + // The graph can be traversed again during MIR dumping, so we store it here. + self.graph = Some(graph); + } + } +} + +/// Visitor to record loan liveness when traversing the localized constraint graph. +struct LoanLivenessVisitor<'a> { + liveness: &'a LivenessValues, + live_loans: &'a mut LiveLoans, +} + +impl LocalizedConstraintGraphVisitor for LoanLivenessVisitor<'_> { + fn on_node_traversed(&mut self, loan: BorrowIndex, node: LocalizedNode) { + // Record the loan as being live on entry to this point if it reaches a live region + // there. + // + // This is an approximation of liveness (which is the thing we want), in that we're + // using a single notion of reachability to represent what used to be _two_ different + // transitive closures. It didn't seem impactful when coming up with the single-graph + // and reachability through space (regions) + time (CFG) concepts, but in practice the + // combination of time-traveling with kills is more impactful than initially + // anticipated. + // + // Kills should prevent a loan from reaching its successor points in the CFG, but not + // while time-traveling: we're not actually at that CFG point, but looking for + // predecessor regions that contain the loan. One of the two TCs we had pushed the + // transitive subset edges to each point instead of having backward edges, and the + // problem didn't exist before. In the abstract, naive reachability is not enough to + // model this, we'd need a slightly different solution. For example, maybe with a + // two-step traversal: + // - at each point we first traverse the subgraph (and possibly time-travel) looking for + // exit nodes while ignoring kills, + // - and then when we're back at the current point, we continue normally. + // + // Another (less annoying) subtlety is that kills and the loan use-map are + // flow-insensitive. Kills can actually appear in places before a loan is introduced, or + // at a location that is actually unreachable in the CFG from the introduction point, + // and these can also be encountered during time-traveling. + // + // The simplest change that made sense to "fix" the issues above is taking into account + // kills that are: + // - reachable from the introduction point + // - encountered during forward traversal. Note that this is not transitive like the + // two-step traversal described above: only kills encountered on exit via a backward + // edge are ignored. + // + // This version of the analysis, however, is enough in practice to pass the tests that + // we care about and NLLs reject, without regressions on crater, and is an actionable + // subset of the full analysis. It also naturally points to areas of improvement that we + // wish to explore later, namely handling kills appropriately during traversal, instead + // of continuing traversal to all the reachable nodes. + // + // FIXME: analyze potential unsoundness, possibly in concert with a borrowck + // implementation in a-mir-formality, fuzzing, or manually crafting counter-examples. + let location = self.liveness.location_from_point(node.point); + if self.liveness.is_live_at(node.region, location) { + self.live_loans.insert(node.point, loan); + } } } diff --git a/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs b/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs deleted file mode 100644 index cfe9376fb5029..0000000000000 --- a/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs +++ /dev/null @@ -1,208 +0,0 @@ -use rustc_data_structures::fx::FxHashSet; -use rustc_middle::mir::{Body, Location, Statement, StatementKind, Terminator, TerminatorKind}; -use rustc_middle::ty::{TyCtxt, TypeVisitable}; -use rustc_mir_dataflow::points::PointIndex; - -use super::{LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet}; -use crate::constraints::OutlivesConstraint; -use crate::region_infer::values::LivenessValues; -use crate::type_check::Locations; -use crate::universal_regions::UniversalRegions; - -/// Propagate loans throughout the subset graph at a given point (with some subtleties around the -/// location where effects start to be visible). -pub(super) fn convert_typeck_constraints<'tcx>( - tcx: TyCtxt<'tcx>, - body: &Body<'tcx>, - liveness: &LivenessValues, - outlives_constraints: impl Iterator>, - universal_regions: &UniversalRegions<'tcx>, - localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, -) { - for outlives_constraint in outlives_constraints { - match outlives_constraint.locations { - Locations::All(_) => { - // We don't turn constraints holding at all points into physical edges at every - // point in the graph. They are encoded into *traversal* instead: a given node's - // successors will combine these logical edges with the regular, physical, localized - // edges. - continue; - } - - Locations::Single(location) => { - // This constraint is marked as holding at one location, we localize it to that - // location or its successor, depending on the corresponding MIR - // statement/terminator. Unfortunately, they all show up from typeck as coming "on - // entry", so for now we modify them to take effects that should apply "on exit" - // into account. - // - // FIXME: this approach is subtle, complicated, and hard to test, so we should track - // this information better in MIR typeck instead, for example with a new `Locations` - // variant that contains which node is crossing over between entry and exit. - let point = liveness.point_from_location(location); - let localized_constraint = if let Some(stmt) = - body[location.block].statements.get(location.statement_index) - { - localize_statement_constraint( - tcx, - stmt, - &outlives_constraint, - point, - universal_regions, - ) - } else { - assert_eq!(location.statement_index, body[location.block].statements.len()); - let terminator = body[location.block].terminator(); - localize_terminator_constraint( - tcx, - body, - terminator, - liveness, - &outlives_constraint, - point, - universal_regions, - ) - }; - localized_outlives_constraints.push(localized_constraint); - } - } - } -} - -/// For a given outlives constraint arising from a MIR statement, localize the constraint with the -/// needed CFG `from`-`to` intra-block nodes. -fn localize_statement_constraint<'tcx>( - tcx: TyCtxt<'tcx>, - stmt: &Statement<'tcx>, - outlives_constraint: &OutlivesConstraint<'tcx>, - current_point: PointIndex, - universal_regions: &UniversalRegions<'tcx>, -) -> LocalizedOutlivesConstraint { - match &stmt.kind { - StatementKind::Assign(box (lhs, rhs)) => { - // To create localized outlives constraints without midpoints, we rely on the property - // that no input regions from the RHS of the assignment will flow into themselves: they - // should not appear in the output regions in the LHS. We believe this to be true by - // construction of the MIR, via temporaries, and assert it here. - // - // We think we don't need midpoints because: - // - every LHS Place has a unique set of regions that don't appear elsewhere - // - this implies that for them to be part of the RHS, the same Place must be read and - // written - // - and that should be impossible in MIR - // - // When we have a more complete implementation in the future, tested with crater, etc, - // we can remove this assertion. It's a debug assert because it can be expensive. - debug_assert!( - { - let mut lhs_regions = FxHashSet::default(); - tcx.for_each_free_region(lhs, |region| { - let region = universal_regions.to_region_vid(region); - lhs_regions.insert(region); - }); - - let mut rhs_regions = FxHashSet::default(); - tcx.for_each_free_region(rhs, |region| { - let region = universal_regions.to_region_vid(region); - rhs_regions.insert(region); - }); - - // The intersection between LHS and RHS regions should be empty. - lhs_regions.is_disjoint(&rhs_regions) - }, - "there should be no common regions between the LHS and RHS of an assignment" - ); - } - _ => { - // Assignments should be the only statement that can both generate constraints that - // apply on entry (specific to the RHS place) *and* others that only apply on exit (the - // subset of RHS regions that actually flow into the LHS): i.e., where midpoints would - // be used to ensure the former happen before the latter, within the same MIR Location. - } - } - - // We generally localize an outlives constraint to where it arises. - LocalizedOutlivesConstraint { - source: outlives_constraint.sup, - from: current_point, - target: outlives_constraint.sub, - to: current_point, - } -} - -/// For a given outlives constraint arising from a MIR terminator, localize the constraint with the -/// needed CFG `from`-`to` inter-block nodes. -fn localize_terminator_constraint<'tcx>( - tcx: TyCtxt<'tcx>, - body: &Body<'tcx>, - terminator: &Terminator<'tcx>, - liveness: &LivenessValues, - outlives_constraint: &OutlivesConstraint<'tcx>, - current_point: PointIndex, - universal_regions: &UniversalRegions<'tcx>, -) -> LocalizedOutlivesConstraint { - // FIXME: check if other terminators need the same handling as `Call`s, in particular - // Assert/Yield/Drop. - match &terminator.kind { - // FIXME: also handle diverging calls. - TerminatorKind::Call { destination, target: Some(target), .. } => { - // If there is a target for the call we also relate what flows into the destination here - // to entry to that successor. - let destination_ty = destination.ty(&body.local_decls, tcx); - let successor_location = Location { block: *target, statement_index: 0 }; - let successor_point = liveness.point_from_location(successor_location); - compute_constraint_direction( - tcx, - outlives_constraint, - &destination_ty, - current_point, - successor_point, - universal_regions, - ) - } - _ => { - // Typeck constraints guide loans between regions at the current point, so we do that in - // the general case, and liveness will take care of making them flow to the terminator's - // successors. - LocalizedOutlivesConstraint { - source: outlives_constraint.sup, - from: current_point, - target: outlives_constraint.sub, - to: current_point, - } - } - } -} - -/// For a given outlives constraint and CFG edge, returns the localized constraint with the -/// appropriate `from`-`to` direction. This is computed according to whether the constraint flows to -/// or from a free region in the given `value`, some kind of result for an effectful operation, like -/// the LHS of an assignment. -fn compute_constraint_direction<'tcx>( - tcx: TyCtxt<'tcx>, - outlives_constraint: &OutlivesConstraint<'tcx>, - value: &impl TypeVisitable>, - current_point: PointIndex, - successor_point: PointIndex, - universal_regions: &UniversalRegions<'tcx>, -) -> LocalizedOutlivesConstraint { - let mut to = current_point; - let mut from = current_point; - tcx.for_each_free_region(value, |region| { - let region = universal_regions.to_region_vid(region); - if region == outlives_constraint.sub { - // This constraint flows into the result, its effects start becoming visible on exit. - to = successor_point; - } else if region == outlives_constraint.sup { - // This constraint flows from the result, its effects start becoming visible on exit. - from = successor_point; - } - }); - - LocalizedOutlivesConstraint { - source: outlives_constraint.sup, - from, - target: outlives_constraint.sub, - to, - } -} diff --git a/compiler/rustc_borrowck/src/type_check/liveness/mod.rs b/compiler/rustc_borrowck/src/type_check/liveness/mod.rs index ca1b850f7665d..442c37e26ec18 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/mod.rs @@ -11,7 +11,7 @@ use tracing::debug; use super::TypeChecker; use crate::constraints::OutlivesConstraintSet; -use crate::polonius::PoloniusLivenessContext; +use crate::polonius::PoloniusContext; use crate::region_infer::values::LivenessValues; use crate::universal_regions::UniversalRegions; @@ -48,7 +48,7 @@ pub(super) fn generate<'tcx>( if typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() { let (_, boring_locals) = compute_relevant_live_locals(typeck.tcx(), &free_regions, typeck.body); - typeck.polonius_liveness.as_mut().unwrap().boring_nll_locals = + typeck.polonius_context.as_mut().unwrap().boring_nll_locals = boring_locals.into_iter().collect(); free_regions = typeck.universal_regions.universal_regions_iter().collect(); } @@ -63,7 +63,7 @@ pub(super) fn generate<'tcx>( typeck.tcx(), &mut typeck.constraints.liveness_constraints, &typeck.universal_regions, - &mut typeck.polonius_liveness, + &mut typeck.polonius_context, typeck.body, ); } @@ -140,11 +140,11 @@ fn record_regular_live_regions<'tcx>( tcx: TyCtxt<'tcx>, liveness_constraints: &mut LivenessValues, universal_regions: &UniversalRegions<'tcx>, - polonius_liveness: &mut Option, + polonius_context: &mut Option, body: &Body<'tcx>, ) { let mut visitor = - LiveVariablesVisitor { tcx, liveness_constraints, universal_regions, polonius_liveness }; + LiveVariablesVisitor { tcx, liveness_constraints, universal_regions, polonius_context }; for (bb, data) in body.basic_blocks.iter_enumerated() { visitor.visit_basic_block_data(bb, data); } @@ -155,7 +155,7 @@ struct LiveVariablesVisitor<'a, 'tcx> { tcx: TyCtxt<'tcx>, liveness_constraints: &'a mut LivenessValues, universal_regions: &'a UniversalRegions<'tcx>, - polonius_liveness: &'a mut Option, + polonius_context: &'a mut Option, } impl<'a, 'tcx> Visitor<'tcx> for LiveVariablesVisitor<'a, 'tcx> { @@ -207,8 +207,8 @@ impl<'a, 'tcx> LiveVariablesVisitor<'a, 'tcx> { }); // When using `-Zpolonius=next`, we record the variance of each live region. - if let Some(polonius_liveness) = self.polonius_liveness { - polonius_liveness.record_live_region_variance(self.tcx, self.universal_regions, value); + if let Some(polonius_context) = self.polonius_context { + polonius_context.record_live_region_variance(self.tcx, self.universal_regions, value); } } } diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index 7ac94020de03b..840210496eb44 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -622,8 +622,8 @@ impl<'tcx> LivenessContext<'_, '_, 'tcx> { }); // When using `-Zpolonius=next`, we record the variance of each live region. - if let Some(polonius_liveness) = typeck.polonius_liveness.as_mut() { - polonius_liveness.record_live_region_variance( + if let Some(polonius_context) = typeck.polonius_context.as_mut() { + polonius_context.record_live_region_variance( typeck.infcx.tcx, typeck.universal_regions, value, diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index 98b4e4d81b92b..7d34d7c88e629 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -42,8 +42,8 @@ use tracing::{debug, instrument, trace}; use crate::borrow_set::BorrowSet; use crate::constraints::{OutlivesConstraint, OutlivesConstraintSet}; use crate::diagnostics::UniverseInfo; +use crate::polonius::PoloniusContext; use crate::polonius::legacy::{PoloniusFacts, PoloniusLocationTable}; -use crate::polonius::{PoloniusContext, PoloniusLivenessContext}; use crate::region_infer::TypeTest; use crate::region_infer::values::{LivenessValues, PlaceholderIndex, PlaceholderIndices}; use crate::session_diagnostics::{MoveUnsized, SimdIntrinsicArgConst}; @@ -139,8 +139,8 @@ pub(crate) fn type_check<'tcx>( debug!(?normalized_inputs_and_output); - let polonius_liveness = if infcx.tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { - Some(PoloniusLivenessContext::default()) + let polonius_context = if infcx.tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + Some(PoloniusContext::default()) } else { None }; @@ -162,7 +162,7 @@ pub(crate) fn type_check<'tcx>( borrow_set, constraints: &mut constraints, deferred_closure_requirements: &mut deferred_closure_requirements, - polonius_liveness, + polonius_context, }; typeck.check_user_type_annotations(); @@ -172,14 +172,7 @@ pub(crate) fn type_check<'tcx>( liveness::generate(&mut typeck, &location_map, move_data); - // We're done with typeck, we can finalize the polonius liveness context for region inference. - let polonius_context = typeck.polonius_liveness.take().map(|liveness_context| { - PoloniusContext::create_from_liveness( - liveness_context, - infcx.num_region_vars(), - typeck.constraints.liveness_constraints.points(), - ) - }); + let polonius_context = typeck.polonius_context; // In case type check encountered an error region, we suppress unhelpful extra // errors in by clearing out all outlives bounds that we may end up checking. @@ -238,7 +231,7 @@ struct TypeChecker<'a, 'tcx> { constraints: &'a mut MirTypeckRegionConstraints<'tcx>, deferred_closure_requirements: &'a mut DeferredClosureRequirements<'tcx>, /// When using `-Zpolonius=next`, the liveness helper data used to create polonius constraints. - polonius_liveness: Option, + polonius_context: Option, } /// Holder struct for passing results from MIR typeck to the rest of the non-lexical regions diff --git a/compiler/rustc_codegen_gcc/src/back/lto.rs b/compiler/rustc_codegen_gcc/src/back/lto.rs index dda777a540027..9a9040708ef89 100644 --- a/compiler/rustc_codegen_gcc/src/back/lto.rs +++ b/compiler/rustc_codegen_gcc/src/back/lto.rs @@ -30,6 +30,7 @@ use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput, SharedEmitter} use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file}; use rustc_data_structures::memmap::Mmap; +use rustc_data_structures::profiling::SelfProfilerRef; use rustc_errors::{DiagCtxt, DiagCtxtHandle}; use rustc_log::tracing::info; use rustc_middle::bug; @@ -112,6 +113,7 @@ fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> { /// for further optimization. pub(crate) fn run_fat( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, each_linked_rlib_for_lto: &[PathBuf], modules: Vec>, @@ -123,6 +125,7 @@ pub(crate) fn run_fat( lto_data.symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::>();*/ fat_lto( cgcx, + prof, dcx, modules, lto_data.upstream_modules, @@ -133,13 +136,14 @@ pub(crate) fn run_fat( fn fat_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, _dcx: DiagCtxtHandle<'_>, modules: Vec>, mut serialized_modules: Vec<(SerializedModule, CString)>, tmp_path: TempDir, //symbols_below_threshold: &[String], ) -> ModuleCodegen { - let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module"); + let _timer = prof.generic_activity("GCC_fat_lto_build_monolithic_module"); info!("going for a fat lto"); // Sort out all our lists of incoming modules into two lists. @@ -223,8 +227,7 @@ fn fat_lto( // We add the object files and save in should_combine_object_files that we should combine // them into a single object file when compiling later. for (bc_decoded, name) in serialized_modules { - let _timer = cgcx - .prof + let _timer = prof .generic_activity_with_arg_recorder("GCC_fat_lto_link_module", |recorder| { recorder.record_arg(format!("{:?}", name)) }); @@ -284,6 +287,7 @@ impl ModuleBufferMethods for ModuleBuffer { /// can simply be copied over from the incr. comp. cache. pub(crate) fn run_thin( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, ThinBuffer)>, @@ -298,6 +302,7 @@ pub(crate) fn run_thin( } thin_lto( cgcx, + prof, dcx, modules, lto_data.upstream_modules, @@ -345,7 +350,8 @@ pub(crate) fn prepare_thin(module: ModuleCodegen) -> (String, ThinBu /// all of the `LtoModuleCodegen` units returned below and destroyed once /// they all go out of scope. fn thin_lto( - cgcx: &CodegenContext, + _cgcx: &CodegenContext, + prof: &SelfProfilerRef, _dcx: DiagCtxtHandle<'_>, modules: Vec<(String, ThinBuffer)>, serialized_modules: Vec<(SerializedModule, CString)>, @@ -353,7 +359,7 @@ fn thin_lto( cached_modules: Vec<(SerializedModule, WorkProduct)>, //_symbols_below_threshold: &[String], ) -> (Vec>, Vec) { - let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); + let _timer = prof.generic_activity("LLVM_thin_lto_global_analysis"); info!("going for that thin, thin LTO"); /*let green_modules: FxHashMap<_, _> = diff --git a/compiler/rustc_codegen_gcc/src/back/write.rs b/compiler/rustc_codegen_gcc/src/back/write.rs index 5e96447234758..ddf13558027bd 100644 --- a/compiler/rustc_codegen_gcc/src/back/write.rs +++ b/compiler/rustc_codegen_gcc/src/back/write.rs @@ -6,6 +6,7 @@ use rustc_codegen_ssa::back::write::{ BitcodeSection, CodegenContext, EmitObj, ModuleConfig, SharedEmitter, }; use rustc_codegen_ssa::{CompiledModule, ModuleCodegen}; +use rustc_data_structures::profiling::SelfProfilerRef; use rustc_errors::DiagCtxt; use rustc_fs_util::link_or_copy; use rustc_log::tracing::debug; @@ -18,6 +19,7 @@ use crate::{GccContext, LtoMode}; pub(crate) fn codegen( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: ModuleCodegen, config: &ModuleConfig, @@ -25,7 +27,7 @@ pub(crate) fn codegen( let dcx = DiagCtxt::new(Box::new(shared_emitter.clone())); let dcx = dcx.handle(); - let _timer = cgcx.prof.generic_activity_with_arg("GCC_module_codegen", &*module.name); + let _timer = prof.generic_activity_with_arg("GCC_module_codegen", &*module.name); { let context = &module.module_llvm.context; @@ -44,9 +46,8 @@ pub(crate) fn codegen( ); if config.bitcode_needed() { - let _timer = cgcx - .prof - .generic_activity_with_arg("GCC_module_codegen_make_bitcode", &*module.name); + let _timer = + prof.generic_activity_with_arg("GCC_module_codegen_make_bitcode", &*module.name); // TODO(antoyo) /*if let Some(bitcode_filename) = bc_out.file_name() { @@ -58,8 +59,7 @@ pub(crate) fn codegen( }*/ if config.emit_bc || config.emit_obj == EmitObj::Bitcode { - let _timer = cgcx - .prof + let _timer = prof .generic_activity_with_arg("GCC_module_codegen_emit_bitcode", &*module.name); if lto_supported { context.add_command_line_option("-flto=auto"); @@ -70,8 +70,7 @@ pub(crate) fn codegen( } if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) { - let _timer = cgcx - .prof + let _timer = prof .generic_activity_with_arg("GCC_module_codegen_embed_bitcode", &*module.name); if lto_supported { // TODO(antoyo): maybe we should call embed_bitcode to have the proper iOS fixes? @@ -98,7 +97,7 @@ pub(crate) fn codegen( if config.emit_asm { let _timer = - cgcx.prof.generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name); + prof.generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name); let path = cgcx.output_filenames.temp_path_for_cgu( OutputType::Assembly, &module.name, @@ -109,9 +108,8 @@ pub(crate) fn codegen( match config.emit_obj { EmitObj::ObjectCode(_) => { - let _timer = cgcx - .prof - .generic_activity_with_arg("GCC_module_codegen_emit_obj", &*module.name); + let _timer = + prof.generic_activity_with_arg("GCC_module_codegen_emit_obj", &*module.name); if env::var("CG_GCCJIT_DUMP_MODULE_NAMES").as_deref() == Ok("1") { println!("Module {}", module.name); } diff --git a/compiler/rustc_codegen_gcc/src/lib.rs b/compiler/rustc_codegen_gcc/src/lib.rs index d490650c37f76..24a065d69ecac 100644 --- a/compiler/rustc_codegen_gcc/src/lib.rs +++ b/compiler/rustc_codegen_gcc/src/lib.rs @@ -90,6 +90,7 @@ use rustc_codegen_ssa::target_features::cfg_target_feature; use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, WriteBackendMethods}; use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen, TargetConfig}; use rustc_data_structures::fx::FxIndexMap; +use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::sync::IntoDynSyncSend; use rustc_errors::DiagCtxtHandle; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; @@ -427,6 +428,7 @@ impl WriteBackendMethods for GccCodegenBackend { fn run_and_optimize_fat_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, _tm_factory: TargetMachineFactoryFn, // FIXME(bjorn3): Limit LTO exports to these symbols @@ -434,11 +436,12 @@ impl WriteBackendMethods for GccCodegenBackend { each_linked_rlib_for_lto: &[PathBuf], modules: Vec>, ) -> ModuleCodegen { - back::lto::run_fat(cgcx, shared_emitter, each_linked_rlib_for_lto, modules) + back::lto::run_fat(cgcx, prof, shared_emitter, each_linked_rlib_for_lto, modules) } fn run_thin_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, // FIXME(bjorn3): Limit LTO exports to these symbols _exported_symbols_for_lto: &[String], @@ -446,7 +449,7 @@ impl WriteBackendMethods for GccCodegenBackend { modules: Vec<(String, Self::ThinBuffer)>, cached_modules: Vec<(SerializedModule, WorkProduct)>, ) -> (Vec>, Vec) { - back::lto::run_thin(cgcx, dcx, each_linked_rlib_for_lto, modules, cached_modules) + back::lto::run_thin(cgcx, prof, dcx, each_linked_rlib_for_lto, modules, cached_modules) } fn print_pass_timings(&self) { @@ -459,6 +462,7 @@ impl WriteBackendMethods for GccCodegenBackend { fn optimize( _cgcx: &CodegenContext, + _prof: &SelfProfilerRef, _shared_emitter: &SharedEmitter, module: &mut ModuleCodegen, config: &ModuleConfig, @@ -468,6 +472,7 @@ impl WriteBackendMethods for GccCodegenBackend { fn optimize_thin( cgcx: &CodegenContext, + _prof: &SelfProfilerRef, _shared_emitter: &SharedEmitter, _tm_factory: TargetMachineFactoryFn, thin: ThinModule, @@ -477,11 +482,12 @@ impl WriteBackendMethods for GccCodegenBackend { fn codegen( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: ModuleCodegen, config: &ModuleConfig, ) -> CompiledModule { - back::write::codegen(cgcx, shared_emitter, module, config) + back::write::codegen(cgcx, prof, shared_emitter, module, config) } fn prepare_thin(module: ModuleCodegen) -> (String, Self::ThinBuffer) { diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index f5eb9c10db996..b5ab26aea4922 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -301,17 +301,6 @@ fn stackprotector_attr<'ll>(cx: &SimpleCx<'ll>, sess: &Session) -> Option<&'ll A Some(sspattr.create_attr(cx.llcx)) } -fn backchain_attr<'ll>(cx: &SimpleCx<'ll>, sess: &Session) -> Option<&'ll Attribute> { - if sess.target.arch != Arch::S390x { - return None; - } - - let requested_features = sess.opts.cg.target_feature.split(','); - let found_positive = requested_features.clone().any(|r| r == "+backchain"); - - if found_positive { Some(llvm::CreateAttrString(cx.llcx, "backchain")) } else { None } -} - pub(crate) fn target_cpu_attr<'ll>(cx: &SimpleCx<'ll>, sess: &Session) -> &'ll Attribute { let target_cpu = llvm_util::target_cpu(sess); llvm::CreateAttrStringValue(cx.llcx, "target-cpu", target_cpu) @@ -530,9 +519,6 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( if let Some(align) = codegen_fn_attrs.alignment { llvm::set_alignment(llfn, align); } - if let Some(backchain) = backchain_attr(cx, sess) { - to_add.push(backchain); - } to_add.extend(patchable_function_entry_attrs( cx, sess, diff --git a/compiler/rustc_codegen_llvm/src/back/lto.rs b/compiler/rustc_codegen_llvm/src/back/lto.rs index 5bd856a3ac435..5d272d10930b1 100644 --- a/compiler/rustc_codegen_llvm/src/back/lto.rs +++ b/compiler/rustc_codegen_llvm/src/back/lto.rs @@ -16,6 +16,7 @@ use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::memmap::Mmap; +use rustc_data_structures::profiling::SelfProfilerRef; use rustc_errors::{DiagCtxt, DiagCtxtHandle}; use rustc_hir::attrs::SanitizerSet; use rustc_middle::bug; @@ -152,6 +153,7 @@ fn get_bitcode_slice_from_object_data<'a>( /// for further optimization. pub(crate) fn run_fat( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, tm_factory: TargetMachineFactoryFn, exported_symbols_for_lto: &[String], @@ -166,6 +168,7 @@ pub(crate) fn run_fat( symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::>(); fat_lto( cgcx, + prof, dcx, shared_emitter, tm_factory, @@ -180,6 +183,7 @@ pub(crate) fn run_fat( /// can simply be copied over from the incr. comp. cache. pub(crate) fn run_thin( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], @@ -196,7 +200,7 @@ pub(crate) fn run_thin( is deferred to the linker" ); } - thin_lto(cgcx, dcx, modules, upstream_modules, cached_modules, &symbols_below_threshold) + thin_lto(cgcx, prof, dcx, modules, upstream_modules, cached_modules, &symbols_below_threshold) } pub(crate) fn prepare_thin(module: ModuleCodegen) -> (String, ThinBuffer) { @@ -207,6 +211,7 @@ pub(crate) fn prepare_thin(module: ModuleCodegen) -> (String, ThinBu fn fat_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, shared_emitter: &SharedEmitter, tm_factory: TargetMachineFactoryFn, @@ -214,7 +219,7 @@ fn fat_lto( mut serialized_modules: Vec<(SerializedModule, CString)>, symbols_below_threshold: &[*const libc::c_char], ) -> ModuleCodegen { - let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module"); + let _timer = prof.generic_activity("LLVM_fat_lto_build_monolithic_module"); info!("going for a fat lto"); // Sort out all our lists of incoming modules into two lists. @@ -303,8 +308,7 @@ fn fat_lto( // above, this is all mostly handled in C++. let mut linker = Linker::new(llmod); for (bc_decoded, name) in serialized_modules { - let _timer = cgcx - .prof + let _timer = prof .generic_activity_with_arg_recorder("LLVM_fat_lto_link_module", |recorder| { recorder.record_arg(format!("{name:?}")) }); @@ -394,13 +398,14 @@ impl Drop for Linker<'_> { /// they all go out of scope. fn thin_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, modules: Vec<(String, ThinBuffer)>, serialized_modules: Vec<(SerializedModule, CString)>, cached_modules: Vec<(SerializedModule, WorkProduct)>, symbols_below_threshold: &[*const libc::c_char], ) -> (Vec>, Vec) { - let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); + let _timer = prof.generic_activity("LLVM_thin_lto_global_analysis"); unsafe { info!("going for that thin, thin LTO"); @@ -598,11 +603,12 @@ pub(crate) fn enable_autodiff_settings(ad: &[config::AutoDiff]) { pub(crate) fn run_pass_manager( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, module: &mut ModuleCodegen, thin: bool, ) { - let _timer = cgcx.prof.generic_activity_with_arg("LLVM_lto_optimize", &*module.name); + let _timer = prof.generic_activity_with_arg("LLVM_lto_optimize", &*module.name); let config = &cgcx.module_config; // Now we have one massive module inside of llmod. Time to run the @@ -628,7 +634,7 @@ pub(crate) fn run_pass_manager( }; unsafe { - write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage); + write::llvm_optimize(cgcx, prof, dcx, module, None, config, opt_level, opt_stage, stage); } if cfg!(feature = "llvm_enzyme") && enable_ad && !thin { @@ -636,7 +642,9 @@ pub(crate) fn run_pass_manager( let stage = write::AutodiffStage::PostAD; if !config.autodiff.contains(&config::AutoDiff::NoPostopt) { unsafe { - write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage); + write::llvm_optimize( + cgcx, prof, dcx, module, None, config, opt_level, opt_stage, stage, + ); } } @@ -739,6 +747,7 @@ impl Drop for ThinBuffer { pub(crate) fn optimize_thin_module( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, tm_factory: TargetMachineFactoryFn, thin_module: ThinModule, @@ -773,8 +782,7 @@ pub(crate) fn optimize_thin_module( // You can find some more comments about these functions in the LLVM // bindings we've got (currently `PassWrapper.cpp`) { - let _timer = - cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_rename", thin_module.name()); + let _timer = prof.generic_activity_with_arg("LLVM_thin_lto_rename", thin_module.name()); unsafe { llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target.raw()) }; @@ -782,9 +790,8 @@ pub(crate) fn optimize_thin_module( } { - let _timer = cgcx - .prof - .generic_activity_with_arg("LLVM_thin_lto_resolve_weak", thin_module.name()); + let _timer = + prof.generic_activity_with_arg("LLVM_thin_lto_resolve_weak", thin_module.name()); if unsafe { !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) } { write::llvm_err(dcx, LlvmError::PrepareThinLtoModule); @@ -793,9 +800,8 @@ pub(crate) fn optimize_thin_module( } { - let _timer = cgcx - .prof - .generic_activity_with_arg("LLVM_thin_lto_internalize", thin_module.name()); + let _timer = + prof.generic_activity_with_arg("LLVM_thin_lto_internalize", thin_module.name()); if unsafe { !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) } { write::llvm_err(dcx, LlvmError::PrepareThinLtoModule); @@ -804,8 +810,7 @@ pub(crate) fn optimize_thin_module( } { - let _timer = - cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_import", thin_module.name()); + let _timer = prof.generic_activity_with_arg("LLVM_thin_lto_import", thin_module.name()); if unsafe { !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod, target.raw()) } { @@ -821,7 +826,7 @@ pub(crate) fn optimize_thin_module( // little differently. { info!("running thin lto passes over {}", module.name); - run_pass_manager(cgcx, dcx, &mut module, true); + run_pass_manager(cgcx, prof, dcx, &mut module, true); save_temp_bitcode(cgcx, &module, "thin-lto-after-pm"); } } diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs index 2bb5b5db5e485..3e3ccd39e674c 100644 --- a/compiler/rustc_codegen_llvm/src/back/write.rs +++ b/compiler/rustc_codegen_llvm/src/back/write.rs @@ -560,6 +560,7 @@ pub(crate) enum AutodiffStage { pub(crate) unsafe fn llvm_optimize( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, module: &ModuleCodegen, thin_lto_buffer: Option<&mut *mut llvm::ThinLTOBuffer>, @@ -756,10 +757,9 @@ pub(crate) unsafe fn llvm_optimize( } } - let mut llvm_profiler = cgcx - .prof + let mut llvm_profiler = prof .llvm_recording_enabled() - .then(|| LlvmSelfProfiler::new(cgcx.prof.get_self_profiler().unwrap())); + .then(|| LlvmSelfProfiler::new(prof.get_self_profiler().unwrap())); let llvm_selfprofiler = llvm_profiler.as_mut().map(|s| s as *mut _ as *mut c_void).unwrap_or(std::ptr::null_mut()); @@ -878,7 +878,7 @@ pub(crate) unsafe fn llvm_optimize( &out_obj, None, llvm::FileType::ObjectFile, - &cgcx.prof, + prof, true, ); // We ignore cgcx.save_temps here and unconditionally always keep our `host.out` artifact. @@ -892,11 +892,12 @@ pub(crate) unsafe fn llvm_optimize( // Unsafe due to LLVM calls. pub(crate) fn optimize( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: &mut ModuleCodegen, config: &ModuleConfig, ) { - let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name); + let _timer = prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name); let dcx = DiagCtxt::new(Box::new(shared_emitter.clone())); let dcx = dcx.handle(); @@ -943,6 +944,7 @@ pub(crate) fn optimize( unsafe { llvm_optimize( cgcx, + prof, dcx, module, thin_lto_buffer.as_mut(), @@ -964,12 +966,12 @@ pub(crate) fn optimize( && let Some(thin_link_bitcode_filename) = bc_summary_out.file_name() { let summary_data = thin_lto_buffer.thin_link_data(); - cgcx.prof.artifact_size( + prof.artifact_size( "llvm_bitcode_summary", thin_link_bitcode_filename.to_string_lossy(), summary_data.len() as u64, ); - let _timer = cgcx.prof.generic_activity_with_arg( + let _timer = prof.generic_activity_with_arg( "LLVM_module_codegen_emit_bitcode_summary", &*module.name, ); @@ -983,11 +985,12 @@ pub(crate) fn optimize( pub(crate) fn codegen( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: ModuleCodegen, config: &ModuleConfig, ) -> CompiledModule { - let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name); + let _timer = prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name); let dcx = DiagCtxt::new(Box::new(shared_emitter.clone())); let dcx = dcx.handle(); @@ -1026,18 +1029,17 @@ pub(crate) fn codegen( if config.bitcode_needed() { if config.emit_bc || config.emit_obj == EmitObj::Bitcode { let thin = { - let _timer = cgcx.prof.generic_activity_with_arg( + let _timer = prof.generic_activity_with_arg( "LLVM_module_codegen_make_bitcode", &*module.name, ); ThinBuffer::new(llmod, config.emit_thin_lto) }; let data = thin.data(); - let _timer = cgcx - .prof + let _timer = prof .generic_activity_with_arg("LLVM_module_codegen_emit_bitcode", &*module.name); if let Some(bitcode_filename) = bc_out.file_name() { - cgcx.prof.artifact_size( + prof.artifact_size( "llvm_bitcode", bitcode_filename.to_string_lossy(), data.len() as u64, @@ -1049,8 +1051,7 @@ pub(crate) fn codegen( } if config.embed_bitcode() && module.kind == ModuleKind::Regular { - let _timer = cgcx - .prof + let _timer = prof .generic_activity_with_arg("LLVM_module_codegen_embed_bitcode", &*module.name); let thin_bc = module.thin_lto_buffer.as_deref().expect("cannot find embedded bitcode"); @@ -1060,7 +1061,7 @@ pub(crate) fn codegen( if config.emit_ir { let _timer = - cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name); + prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name); let out = cgcx.output_filenames.temp_path_for_cgu( OutputType::LlvmAssembly, &module.name, @@ -1098,7 +1099,7 @@ pub(crate) fn codegen( unsafe { llvm::LLVMRustPrintModule(llmod, out_c.as_ptr(), demangle_callback) }; if result == llvm::LLVMRustResult::Success { - record_artifact_size(&cgcx.prof, "llvm_ir", &out); + record_artifact_size(prof, "llvm_ir", &out); } result @@ -1108,7 +1109,7 @@ pub(crate) fn codegen( if config.emit_asm { let _timer = - cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name); + prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name); let path = cgcx.output_filenames.temp_path_for_cgu( OutputType::Assembly, &module.name, @@ -1132,16 +1133,15 @@ pub(crate) fn codegen( &path, None, llvm::FileType::AssemblyFile, - &cgcx.prof, + prof, config.verify_llvm_ir, ); } match config.emit_obj { EmitObj::ObjectCode(_) => { - let _timer = cgcx - .prof - .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name); + let _timer = + prof.generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name); let dwo_out = cgcx .output_filenames @@ -1168,7 +1168,7 @@ pub(crate) fn codegen( &obj_out, dwo_out, llvm::FileType::ObjectFile, - &cgcx.prof, + prof, config.verify_llvm_ir, ); } @@ -1188,7 +1188,7 @@ pub(crate) fn codegen( EmitObj::None => {} } - record_llvm_cgu_instructions_stats(&cgcx.prof, &module.name, llmod); + record_llvm_cgu_instructions_stats(prof, &module.name, llmod); } // `.dwo` files are only emitted if: diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index 577c8a98b18c6..c92de64a3349f 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -35,6 +35,7 @@ use rustc_codegen_ssa::back::write::{ use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen, TargetConfig}; use rustc_data_structures::fx::FxIndexMap; +use rustc_data_structures::profiling::SelfProfilerRef; use rustc_errors::{DiagCtxt, DiagCtxtHandle}; use rustc_metadata::EncodedMetadata; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; @@ -163,6 +164,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { } fn run_and_optimize_fat_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, tm_factory: TargetMachineFactoryFn, exported_symbols_for_lto: &[String], @@ -171,6 +173,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { ) -> ModuleCodegen { let mut module = back::lto::run_fat( cgcx, + prof, shared_emitter, tm_factory, exported_symbols_for_lto, @@ -180,12 +183,13 @@ impl WriteBackendMethods for LlvmCodegenBackend { let dcx = DiagCtxt::new(Box::new(shared_emitter.clone())); let dcx = dcx.handle(); - back::lto::run_pass_manager(cgcx, dcx, &mut module, false); + back::lto::run_pass_manager(cgcx, prof, dcx, &mut module, false); module } fn run_thin_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], @@ -194,6 +198,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { ) -> (Vec>, Vec) { back::lto::run_thin( cgcx, + prof, dcx, exported_symbols_for_lto, each_linked_rlib_for_lto, @@ -203,27 +208,30 @@ impl WriteBackendMethods for LlvmCodegenBackend { } fn optimize( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: &mut ModuleCodegen, config: &ModuleConfig, ) { - back::write::optimize(cgcx, shared_emitter, module, config) + back::write::optimize(cgcx, prof, shared_emitter, module, config) } fn optimize_thin( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, tm_factory: TargetMachineFactoryFn, thin: ThinModule, ) -> ModuleCodegen { - back::lto::optimize_thin_module(cgcx, shared_emitter, tm_factory, thin) + back::lto::optimize_thin_module(cgcx, prof, shared_emitter, tm_factory, thin) } fn codegen( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: ModuleCodegen, config: &ModuleConfig, ) -> CompiledModule { - back::write::codegen(cgcx, shared_emitter, module, config) + back::write::codegen(cgcx, prof, shared_emitter, module, config) } fn prepare_thin(module: ModuleCodegen) -> (String, Self::ThinBuffer) { back::lto::prepare_thin(module) diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs index c814f8db521cf..87a043fbdf245 100644 --- a/compiler/rustc_codegen_ssa/src/back/write.rs +++ b/compiler/rustc_codegen_ssa/src/back/write.rs @@ -23,6 +23,7 @@ use rustc_hir::find_attr; use rustc_incremental::{ copy_cgu_workproduct_to_incr_comp_cache_dir, in_incr_comp_dir, in_incr_comp_dir_sess, }; +use rustc_macros::{Decodable, Encodable}; use rustc_metadata::fs::copy_to_stdout; use rustc_middle::bug; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; @@ -50,7 +51,7 @@ use crate::{ const PRE_LTO_BC_EXT: &str = "pre-lto.bc"; /// What kind of object file to emit. -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable)] pub enum EmitObj { // No object file. None, @@ -64,7 +65,7 @@ pub enum EmitObj { } /// What kind of llvm bitcode section to embed in an object file. -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable)] pub enum BitcodeSection { // No bitcode section. None, @@ -74,6 +75,7 @@ pub enum BitcodeSection { } /// Module-specific configuration for `optimize_and_codegen`. +#[derive(Encodable, Decodable)] pub struct ModuleConfig { /// Names of additional optimization passes to run. pub passes: Vec, @@ -319,10 +321,9 @@ pub type TargetMachineFactoryFn = Arc< >; /// Additional resources used by optimize_and_codegen (not module specific) -#[derive(Clone)] +#[derive(Clone, Encodable, Decodable)] pub struct CodegenContext { // Resources needed when running LTO - pub prof: SelfProfilerRef, pub lto: Lto, pub use_linker_plugin_lto: bool, pub dylib_lto: bool, @@ -363,16 +364,18 @@ pub struct CodegenContext { fn generate_thin_lto_work( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], needs_thin_lto: Vec<(String, B::ThinBuffer)>, import_only_modules: Vec<(SerializedModule, WorkProduct)>, ) -> Vec<(ThinLtoWorkItem, u64)> { - let _prof_timer = cgcx.prof.generic_activity("codegen_thin_generate_lto_work"); + let _prof_timer = prof.generic_activity("codegen_thin_generate_lto_work"); let (lto_modules, copy_jobs) = B::run_thin_lto( cgcx, + prof, dcx, exported_symbols_for_lto, each_linked_rlib_for_lto, @@ -841,12 +844,13 @@ pub(crate) fn compute_per_cgu_lto_type( fn execute_optimize_work_item( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: SharedEmitter, mut module: ModuleCodegen, ) -> WorkItemResult { - let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_optimize", &*module.name); + let _timer = prof.generic_activity_with_arg("codegen_module_optimize", &*module.name); - B::optimize(cgcx, &shared_emitter, &mut module, &cgcx.module_config); + B::optimize(cgcx, prof, &shared_emitter, &mut module, &cgcx.module_config); // After we've done the initial round of optimizations we need to // decide whether to synchronously codegen this module or ship it @@ -867,7 +871,7 @@ fn execute_optimize_work_item( match lto_type { ComputedLtoType::No => { - let module = B::codegen(cgcx, &shared_emitter, module, &cgcx.module_config); + let module = B::codegen(cgcx, &prof, &shared_emitter, module, &cgcx.module_config); WorkItemResult::Finished(module) } ComputedLtoType::Thin => { @@ -897,12 +901,12 @@ fn execute_optimize_work_item( fn execute_copy_from_cache_work_item( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: SharedEmitter, module: CachedModuleCodegen, ) -> CompiledModule { - let _timer = cgcx - .prof - .generic_activity_with_arg("codegen_copy_artifacts_from_incr_cache", &*module.name); + let _timer = + prof.generic_activity_with_arg("codegen_copy_artifacts_from_incr_cache", &*module.name); let dcx = DiagCtxt::new(Box::new(shared_emitter)); let dcx = dcx.handle(); @@ -985,6 +989,7 @@ fn execute_copy_from_cache_work_item( fn do_fat_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: SharedEmitter, tm_factory: TargetMachineFactoryFn, exported_symbols_for_lto: &[String], @@ -992,7 +997,7 @@ fn do_fat_lto( mut needs_fat_lto: Vec>, import_only_modules: Vec<(SerializedModule, WorkProduct)>, ) -> CompiledModule { - let _timer = cgcx.prof.verbose_generic_activity("LLVM_fatlto"); + let _timer = prof.verbose_generic_activity("LLVM_fatlto"); let dcx = DiagCtxt::new(Box::new(shared_emitter.clone())); let dcx = dcx.handle(); @@ -1005,17 +1010,19 @@ fn do_fat_lto( let module = B::run_and_optimize_fat_lto( cgcx, + prof, &shared_emitter, tm_factory, exported_symbols_for_lto, each_linked_rlib_for_lto, needs_fat_lto, ); - B::codegen(cgcx, &shared_emitter, module, &cgcx.module_config) + B::codegen(cgcx, prof, &shared_emitter, module, &cgcx.module_config) } -fn do_thin_lto<'a, B: ExtraBackendMethods>( - cgcx: &'a CodegenContext, +fn do_thin_lto( + cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: SharedEmitter, tm_factory: TargetMachineFactoryFn, exported_symbols_for_lto: Arc>, @@ -1026,7 +1033,7 @@ fn do_thin_lto<'a, B: ExtraBackendMethods>( WorkProduct, )>, ) -> Vec { - let _timer = cgcx.prof.verbose_generic_activity("LLVM_thinlto"); + let _timer = prof.verbose_generic_activity("LLVM_thinlto"); let dcx = DiagCtxt::new(Box::new(shared_emitter.clone())); let dcx = dcx.handle(); @@ -1056,6 +1063,7 @@ fn do_thin_lto<'a, B: ExtraBackendMethods>( // we don't worry about tokens. for (work, cost) in generate_thin_lto_work::( cgcx, + prof, dcx, &exported_symbols_for_lto, &each_linked_rlib_for_lto, @@ -1100,6 +1108,7 @@ fn do_thin_lto<'a, B: ExtraBackendMethods>( { spawn_thin_lto_work( &cgcx, + prof, shared_emitter.clone(), Arc::clone(&tm_factory), coordinator_send.clone(), @@ -1166,14 +1175,15 @@ fn do_thin_lto<'a, B: ExtraBackendMethods>( fn execute_thin_lto_work_item( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: SharedEmitter, tm_factory: TargetMachineFactoryFn, module: lto::ThinModule, ) -> CompiledModule { - let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", module.name()); + let _timer = prof.generic_activity_with_arg("codegen_module_perform_lto", module.name()); - let module = B::optimize_thin(cgcx, &shared_emitter, tm_factory, module); - B::codegen(cgcx, &shared_emitter, module, &cgcx.module_config) + let module = B::optimize_thin(cgcx, prof, &shared_emitter, tm_factory, module); + B::codegen(cgcx, prof, &shared_emitter, module, &cgcx.module_config) } /// Messages sent to the coordinator. @@ -1273,6 +1283,7 @@ fn start_executing_work( coordinator_send: Sender>, ) -> thread::JoinHandle, ()>> { let sess = tcx.sess; + let prof = sess.prof.clone(); let mut each_linked_rlib_for_lto = Vec::new(); let mut each_linked_rlib_file_for_lto = Vec::new(); @@ -1323,7 +1334,6 @@ fn start_executing_work( fewer_names: sess.fewer_names(), save_temps: sess.opts.cg.save_temps, time_trace: sess.opts.unstable_opts.llvm_time_trace, - prof: sess.prof.clone(), remark: sess.opts.cg.remark.clone(), remark_dir, incr_comp_session_dir: sess.incr_comp_session_dir_opt().map(|r| r.clone()), @@ -1524,7 +1534,7 @@ fn start_executing_work( let mut llvm_start_time: Option> = None; if let Some(allocator_module) = &mut allocator_module { - B::optimize(&cgcx, &shared_emitter, allocator_module, &allocator_config); + B::optimize(&cgcx, &prof, &shared_emitter, allocator_module, &allocator_config); } // Run the message loop while there's still anything that needs message @@ -1564,6 +1574,7 @@ fn start_executing_work( main_thread_state = MainThreadState::Lending; spawn_work( &cgcx, + &prof, shared_emitter.clone(), coordinator_send.clone(), &mut llvm_start_time, @@ -1588,6 +1599,7 @@ fn start_executing_work( main_thread_state = MainThreadState::Lending; spawn_work( &cgcx, + &prof, shared_emitter.clone(), coordinator_send.clone(), &mut llvm_start_time, @@ -1630,6 +1642,7 @@ fn start_executing_work( { spawn_work( &cgcx, + &prof, shared_emitter.clone(), coordinator_send.clone(), &mut llvm_start_time, @@ -1788,6 +1801,7 @@ fn start_executing_work( if cgcx.lto == Lto::ThinLocal { compiled_modules.extend(do_thin_lto::( &cgcx, + &prof, shared_emitter.clone(), tm_factory, exported_symbols_for_lto, @@ -1814,7 +1828,7 @@ fn start_executing_work( Ok(MaybeLtoModules::NoLto { modules: compiled_modules, allocator_module: allocator_module.map(|allocator_module| { - B::codegen(&cgcx, &shared_emitter, allocator_module, &allocator_config) + B::codegen(&cgcx, &prof, &shared_emitter, allocator_module, &allocator_config) }), }) }) @@ -1883,23 +1897,25 @@ fn start_executing_work( pub(crate) struct WorkerFatalError; fn spawn_work<'a, B: ExtraBackendMethods>( - cgcx: &'a CodegenContext, + cgcx: &CodegenContext, + prof: &'a SelfProfilerRef, shared_emitter: SharedEmitter, coordinator_send: Sender>, llvm_start_time: &mut Option>, work: WorkItem, ) { if llvm_start_time.is_none() { - *llvm_start_time = Some(cgcx.prof.verbose_generic_activity("LLVM_passes")); + *llvm_start_time = Some(prof.verbose_generic_activity("LLVM_passes")); } let cgcx = cgcx.clone(); + let prof = prof.clone(); B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || { let result = std::panic::catch_unwind(AssertUnwindSafe(|| match work { - WorkItem::Optimize(m) => execute_optimize_work_item(&cgcx, shared_emitter, m), + WorkItem::Optimize(m) => execute_optimize_work_item(&cgcx, &prof, shared_emitter, m), WorkItem::CopyPostLtoArtifacts(m) => WorkItemResult::Finished( - execute_copy_from_cache_work_item(&cgcx, shared_emitter, m), + execute_copy_from_cache_work_item(&cgcx, &prof, shared_emitter, m), ), })); @@ -1920,22 +1936,24 @@ fn spawn_work<'a, B: ExtraBackendMethods>( .expect("failed to spawn work thread"); } -fn spawn_thin_lto_work<'a, B: ExtraBackendMethods>( - cgcx: &'a CodegenContext, +fn spawn_thin_lto_work( + cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: SharedEmitter, tm_factory: TargetMachineFactoryFn, coordinator_send: Sender, work: ThinLtoWorkItem, ) { let cgcx = cgcx.clone(); + let prof = prof.clone(); B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || { let result = std::panic::catch_unwind(AssertUnwindSafe(|| match work { ThinLtoWorkItem::CopyPostLtoArtifacts(m) => { - execute_copy_from_cache_work_item(&cgcx, shared_emitter, m) + execute_copy_from_cache_work_item(&cgcx, &prof, shared_emitter, m) } ThinLtoWorkItem::ThinLto(m) => { - execute_thin_lto_work_item(&cgcx, shared_emitter, tm_factory, m) + execute_thin_lto_work_item(&cgcx, &prof, shared_emitter, tm_factory, m) } })); @@ -2183,6 +2201,7 @@ impl OngoingCodegen { CompiledModules { modules: vec![do_fat_lto( &cgcx, + &sess.prof, shared_emitter, tm_factory, &exported_symbols_for_lto, @@ -2209,6 +2228,7 @@ impl OngoingCodegen { CompiledModules { modules: do_thin_lto::( &cgcx, + &sess.prof, shared_emitter, tm_factory, exported_symbols_for_lto, diff --git a/compiler/rustc_codegen_ssa/src/traits/write.rs b/compiler/rustc_codegen_ssa/src/traits/write.rs index 0232ba39ac1f3..5f5d0ac5d9fc4 100644 --- a/compiler/rustc_codegen_ssa/src/traits/write.rs +++ b/compiler/rustc_codegen_ssa/src/traits/write.rs @@ -1,5 +1,6 @@ use std::path::PathBuf; +use rustc_data_structures::profiling::SelfProfilerRef; use rustc_errors::DiagCtxtHandle; use rustc_middle::dep_graph::WorkProduct; @@ -20,6 +21,7 @@ pub trait WriteBackendMethods: Clone + 'static { /// if necessary and running any further optimizations fn run_and_optimize_fat_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, tm_factory: TargetMachineFactoryFn, exported_symbols_for_lto: &[String], @@ -31,6 +33,7 @@ pub trait WriteBackendMethods: Clone + 'static { /// can simply be copied over from the incr. comp. cache. fn run_thin_lto( cgcx: &CodegenContext, + prof: &SelfProfilerRef, dcx: DiagCtxtHandle<'_>, exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], @@ -41,18 +44,21 @@ pub trait WriteBackendMethods: Clone + 'static { fn print_statistics(&self); fn optimize( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: &mut ModuleCodegen, config: &ModuleConfig, ); fn optimize_thin( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, tm_factory: TargetMachineFactoryFn, thin: ThinModule, ) -> ModuleCodegen; fn codegen( cgcx: &CodegenContext, + prof: &SelfProfilerRef, shared_emitter: &SharedEmitter, module: ModuleCodegen, config: &ModuleConfig, diff --git a/compiler/rustc_data_structures/src/sync.rs b/compiler/rustc_data_structures/src/sync.rs index 3881f3c2aa841..31768fe189aef 100644 --- a/compiler/rustc_data_structures/src/sync.rs +++ b/compiler/rustc_data_structures/src/sync.rs @@ -41,7 +41,8 @@ pub use self::freeze::{FreezeLock, FreezeReadGuard, FreezeWriteGuard}; pub use self::lock::{Lock, LockGuard, Mode}; pub use self::mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode}; pub use self::parallel::{ - broadcast, join, par_for_each_in, par_map, parallel_guard, scope, spawn, try_par_for_each_in, + broadcast, par_fns, par_for_each_in, par_join, par_map, parallel_guard, spawn, + try_par_for_each_in, }; pub use self::vec::{AppendOnlyIndexVec, AppendOnlyVec}; pub use self::worker_local::{Registry, WorkerLocal}; diff --git a/compiler/rustc_data_structures/src/sync/parallel.rs b/compiler/rustc_data_structures/src/sync/parallel.rs index b515c0bee8a6e..044767321c273 100644 --- a/compiler/rustc_data_structures/src/sync/parallel.rs +++ b/compiler/rustc_data_structures/src/sync/parallel.rs @@ -56,41 +56,6 @@ where (a.unwrap(), b.unwrap()) } -/// Runs a list of blocks in parallel. The first block is executed immediately on -/// the current thread. Use that for the longest running block. -#[macro_export] -macro_rules! parallel { - (impl $fblock:block [$($c:expr,)*] [$block:expr $(, $rest:expr)*]) => { - parallel!(impl $fblock [$block, $($c,)*] [$($rest),*]) - }; - (impl $fblock:block [$($blocks:expr,)*] []) => { - $crate::sync::parallel_guard(|guard| { - $crate::sync::scope(|s| { - $( - let block = $crate::sync::FromDyn::from(|| $blocks); - s.spawn(move |_| { - guard.run(move || block.into_inner()()); - }); - )* - guard.run(|| $fblock); - }); - }); - }; - ($fblock:block, $($blocks:block),*) => { - if $crate::sync::is_dyn_thread_safe() { - // Reverse the order of the later blocks since Rayon executes them in reverse order - // when using a single thread. This ensures the execution order matches that - // of a single threaded rustc. - parallel!(impl $fblock [] [$($blocks),*]); - } else { - $crate::sync::parallel_guard(|guard| { - guard.run(|| $fblock); - $(guard.run(|| $blocks);)* - }); - } - }; - } - pub fn spawn(func: impl FnOnce() + DynSend + 'static) { if mode::is_dyn_thread_safe() { let func = FromDyn::from(func); @@ -102,18 +67,43 @@ pub fn spawn(func: impl FnOnce() + DynSend + 'static) { } } -// This function only works when `mode::is_dyn_thread_safe()`. -pub fn scope<'scope, OP, R>(op: OP) -> R -where - OP: FnOnce(&rustc_thread_pool::Scope<'scope>) -> R + DynSend, - R: DynSend, -{ - let op = FromDyn::from(op); - rustc_thread_pool::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner() +/// Runs the functions in parallel. +/// +/// The first function is executed immediately on the current thread. +/// Use that for the longest running function for better scheduling. +pub fn par_fns(funcs: &mut [&mut (dyn FnMut() + DynSend)]) { + parallel_guard(|guard: &ParallelGuard| { + if mode::is_dyn_thread_safe() { + let funcs = FromDyn::from(funcs); + rustc_thread_pool::scope(|s| { + let Some((first, rest)) = funcs.into_inner().split_at_mut_checked(1) else { + return; + }; + + // Reverse the order of the later functions since Rayon executes them in reverse + // order when using a single thread. This ensures the execution order matches + // that of a single threaded rustc. + for f in rest.iter_mut().rev() { + let f = FromDyn::from(f); + s.spawn(|_| { + guard.run(|| (f.into_inner())()); + }); + } + + // Run the first function without spawning to + // ensure it executes immediately on this thread. + guard.run(|| first[0]()); + }); + } else { + for f in funcs { + guard.run(|| f()); + } + } + }); } #[inline] -pub fn join(oper_a: A, oper_b: B) -> (RA, RB) +pub fn par_join(oper_a: A, oper_b: B) -> (RA, RB) where A: FnOnce() -> RA + DynSend, B: FnOnce() -> RB + DynSend, diff --git a/compiler/rustc_hir/src/attrs/data_structures.rs b/compiler/rustc_hir/src/attrs/data_structures.rs index 8e68a3d002334..e28ecd06b89bc 100644 --- a/compiler/rustc_hir/src/attrs/data_structures.rs +++ b/compiler/rustc_hir/src/attrs/data_structures.rs @@ -1053,6 +1053,9 @@ pub enum AttributeKind { /// Represents `#[pointee]` Pointee(Span), + /// Represents `#[prelude_import]` + PreludeImport, + /// Represents `#[proc_macro]` ProcMacro(Span), diff --git a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs index e68ab1c42bafd..b4cf244bfb8aa 100644 --- a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs +++ b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs @@ -82,6 +82,7 @@ impl AttributeKind { PatternComplexityLimit { .. } => No, PinV2(..) => Yes, Pointee(..) => No, + PreludeImport => No, ProcMacro(..) => No, ProcMacroAttribute(..) => No, ProcMacroDerive { .. } => No, diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index 36a07b361d9de..88d2e80f1521e 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -644,7 +644,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { .infcx .visit_proof_tree( Goal::new(self.tcx, self.param_env, pred), - &mut CoerceVisitor { fcx: self.fcx, span: self.cause.span }, + &mut CoerceVisitor { fcx: self.fcx, span: self.cause.span, errored: false }, ) .is_break() { @@ -1961,6 +1961,10 @@ impl<'tcx> CoerceMany<'tcx> { struct CoerceVisitor<'a, 'tcx> { fcx: &'a FnCtxt<'a, 'tcx>, span: Span, + /// Whether the coercion is impossible. If so we sometimes still try to + /// coerce in these cases to emit better errors. This changes the behavior + /// when hitting the recursion limit. + errored: bool, } impl<'tcx> ProofTreeVisitor<'tcx> for CoerceVisitor<'_, 'tcx> { @@ -1987,6 +1991,7 @@ impl<'tcx> ProofTreeVisitor<'tcx> for CoerceVisitor<'_, 'tcx> { // If we prove the `Unsize` or `CoerceUnsized` goal, continue recursing. Ok(Certainty::Yes) => ControlFlow::Continue(()), Err(NoSolution) => { + self.errored = true; // Even if we find no solution, continue recursing if we find a single candidate // for which we're shallowly certain it holds to get the right error source. if let [only_candidate] = &goal.candidates()[..] @@ -2019,4 +2024,15 @@ impl<'tcx> ProofTreeVisitor<'tcx> for CoerceVisitor<'_, 'tcx> { } } } + + fn on_recursion_limit(&mut self) -> Self::Result { + if self.errored { + // This prevents accidentally committing unfulfilled unsized coercions while trying to + // find the error source for diagnostics. + // See https://github.com/rust-lang/trait-system-refactor-initiative/issues/266. + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + } } diff --git a/compiler/rustc_incremental/src/persist/save.rs b/compiler/rustc_incremental/src/persist/save.rs index be16b543e824d..996ae162607d3 100644 --- a/compiler/rustc_incremental/src/persist/save.rs +++ b/compiler/rustc_incremental/src/persist/save.rs @@ -2,7 +2,7 @@ use std::fs; use std::sync::Arc; use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::sync::join; +use rustc_data_structures::sync::par_join; use rustc_middle::dep_graph::{ DepGraph, SerializedDepGraph, WorkProduct, WorkProductId, WorkProductMap, }; @@ -44,7 +44,7 @@ pub(crate) fn save_dep_graph(tcx: TyCtxt<'_>) { sess.time("assert_dep_graph", || assert_dep_graph(tcx)); sess.time("check_clean", || clean::check_clean_annotations(tcx)); - join( + par_join( move || { sess.time("incr_comp_persist_dep_graph", || { if let Err(err) = fs::rename(&staging_dep_graph_path, &dep_graph_path) { diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index e474f106433df..a2c11c608330a 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -12,8 +12,8 @@ use rustc_codegen_ssa::{CodegenResults, CrateInfo}; use rustc_data_structures::indexmap::IndexMap; use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::steal::Steal; -use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, WorkerLocal}; -use rustc_data_structures::{parallel, thousands}; +use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, WorkerLocal, par_fns}; +use rustc_data_structures::thousands; use rustc_errors::timings::TimingSection; use rustc_expand::base::{ExtCtxt, LintStoreExpand}; use rustc_feature::Features; @@ -1052,8 +1052,8 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { let sess = tcx.sess; sess.time("misc_checking_1", || { - parallel!( - { + par_fns(&mut [ + &mut || { sess.time("looking_for_entry_point", || tcx.ensure_ok().entry_fn(())); sess.time("check_externally_implementable_items", || { tcx.ensure_ok().check_externally_implementable_items(()) @@ -1065,7 +1065,7 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { CStore::from_tcx(tcx).report_unused_deps(tcx); }, - { + &mut || { tcx.ensure_ok().exportable_items(LOCAL_CRATE); tcx.ensure_ok().stable_order_of_exportable_impls(LOCAL_CRATE); tcx.par_hir_for_each_module(|module| { @@ -1073,14 +1073,14 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { tcx.ensure_ok().check_mod_unstable_api_usage(module); }); }, - { + &mut || { // We force these queries to run, // since they might not otherwise get called. // This marks the corresponding crate-level attributes // as used, and ensures that their values are valid. tcx.ensure_ok().limits(()); - } - ); + }, + ]); }); rustc_hir_analysis::check_crate(tcx); @@ -1152,39 +1152,39 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) { } sess.time("misc_checking_3", || { - parallel!( - { + par_fns(&mut [ + &mut || { tcx.ensure_ok().effective_visibilities(()); - parallel!( - { + par_fns(&mut [ + &mut || { tcx.par_hir_for_each_module(|module| { tcx.ensure_ok().check_private_in_public(module) }) }, - { + &mut || { tcx.par_hir_for_each_module(|module| { tcx.ensure_ok().check_mod_deathness(module) }); }, - { + &mut || { sess.time("lint_checking", || { rustc_lint::check_crate(tcx); }); }, - { + &mut || { tcx.ensure_ok().clashing_extern_declarations(()); - } - ); + }, + ]); }, - { + &mut || { sess.time("privacy_checking_modules", || { tcx.par_hir_for_each_module(|module| { tcx.ensure_ok().check_mod_privacy(module); }); }); - } - ); + }, + ]); // This check has to be run after all lints are done processing. We don't // define a lint filter, as all lint checks should have finished at this point. diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 59f0b67d67299..613b3c64efed2 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -248,7 +248,7 @@ internal compiler error: query cycle handler thread panicked, aborting process"; tls::with(|tcx| { // Accessing session globals is sound as they outlive `GlobalCtxt`. // They are needed to hash query keys containing spans or symbols. - let query_map = rustc_span::set_session_globals_then( + let job_map = rustc_span::set_session_globals_then( unsafe { &*(session_globals as *const SessionGlobals) }, || { // Ensure there were no errors collecting all active jobs. @@ -258,7 +258,7 @@ internal compiler error: query cycle handler thread panicked, aborting process"; ) }, ); - break_query_cycles(query_map, ®istry); + break_query_cycles(job_map, ®istry); }) }) }); diff --git a/compiler/rustc_lint/src/late.rs b/compiler/rustc_lint/src/late.rs index ccfba715a1be3..3cc0d46d8541f 100644 --- a/compiler/rustc_lint/src/late.rs +++ b/compiler/rustc_lint/src/late.rs @@ -7,7 +7,7 @@ use std::any::Any; use std::cell::Cell; use rustc_data_structures::stack::ensure_sufficient_stack; -use rustc_data_structures::sync::join; +use rustc_data_structures::sync::par_join; use rustc_hir::def_id::{LocalDefId, LocalModDefId}; use rustc_hir::{self as hir, AmbigArg, HirId, intravisit as hir_visit}; use rustc_middle::hir::nested_filter; @@ -461,7 +461,7 @@ fn late_lint_crate_inner<'tcx, T: LateLintPass<'tcx>>( /// Performs lint checking on a crate. pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>) { - join( + par_join( || { tcx.sess.time("crate_lints", || { // Run whole crate non-incremental lints diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp index 97f95ac01e861..7aa4ddea78e1d 100644 --- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp @@ -812,12 +812,12 @@ extern "C" LLVMRustResult LLVMRustOptimize( auto ThinLTOBuffer = std::make_unique(); raw_string_ostream ThinLTODataOS(ThinLTOBuffer->data); raw_string_ostream ThinLinkDataOS(ThinLTOBuffer->thin_link_data); + bool IsLTO = OptStage == LLVMRustOptStage::ThinLTO || + OptStage == LLVMRustOptStage::FatLTO; if (!NoPrepopulatePasses) { // The pre-link pipelines don't support O0 and require using // buildO0DefaultPipeline() instead. At the same time, the LTO pipelines do // support O0 and using them is required. - bool IsLTO = OptStage == LLVMRustOptStage::ThinLTO || - OptStage == LLVMRustOptStage::FatLTO; if (OptLevel == OptimizationLevel::O0 && !IsLTO) { for (const auto &C : PipelineStartEPCallbacks) PB.registerPipelineStartEPCallback(C); @@ -908,7 +908,10 @@ extern "C" LLVMRustResult LLVMRustOptimize( // now load "-enzyme" pass: // With dlopen, ENZYME macro may not be defined, so check EnzymePtr directly - if (EnzymePtr) { + // In the case of debug builds with multiple codegen units, we might not + // have all function definitions available during the early compiler + // invocations. We therefore wait for the final lto step to run Enzyme. + if (EnzymePtr && IsLTO) { if (PrintBeforeEnzyme) { // Handle the Rust flag `-Zautodiff=PrintModBefore`. @@ -929,6 +932,7 @@ extern "C" LLVMRustResult LLVMRustOptimize( MPM.addPass(PrintModulePass(outs(), Banner, true, false)); } } + if (PrintPasses) { // Print all passes from the PM: std::string Pipeline; diff --git a/compiler/rustc_macros/src/query.rs b/compiler/rustc_macros/src/query.rs index 6b5c427c8bc1b..5b869dc3409a7 100644 --- a/compiler/rustc_macros/src/query.rs +++ b/compiler/rustc_macros/src/query.rs @@ -1,4 +1,5 @@ use proc_macro::TokenStream; +use proc_macro2::Span; use quote::{quote, quote_spanned}; use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; @@ -62,7 +63,7 @@ impl Parse for Query { // If there are no doc-comments, give at least some idea of what // it does by showing the query description. if doc_comments.is_empty() { - doc_comments.push(doc_comment_from_desc(&modifiers.desc.1)?); + doc_comments.push(doc_comment_from_desc(&modifiers.desc.expr_list)?); } Ok(Query { doc_comments, modifiers, name, key, arg, result }) @@ -82,15 +83,27 @@ impl Parse for List { } } +struct Desc { + modifier: Ident, + tcx_binding: Option, + expr_list: Punctuated, +} + +struct CacheOnDiskIf { + modifier: Ident, + tcx_binding: Option, + block: Block, +} + struct QueryModifiers { /// The description of the query. - desc: (Option, Punctuated), + desc: Desc, /// Use this type for the in-memory cache. arena_cache: Option, /// Cache the query to disk if the `Block` returns true. - cache_on_disk_if: Option<(Option, Block)>, + cache_on_disk_if: Option, /// A cycle error for this query aborting the compilation with a fatal error. cycle_fatal: Option, @@ -164,7 +177,7 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result { // `desc { |tcx| "foo {}", tcx.item_path(key) }` let attr_content; braced!(attr_content in input); - let tcx = if attr_content.peek(Token![|]) { + let tcx_binding = if attr_content.peek(Token![|]) { attr_content.parse::()?; let tcx = attr_content.parse()?; attr_content.parse::()?; @@ -172,15 +185,15 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result { } else { None }; - let list = attr_content.parse_terminated(Expr::parse, Token![,])?; - try_insert!(desc = (tcx, list)); + let expr_list = attr_content.parse_terminated(Expr::parse, Token![,])?; + try_insert!(desc = Desc { modifier, tcx_binding, expr_list }); } else if modifier == "cache_on_disk_if" { // Parse a cache-on-disk modifier like: // // `cache_on_disk_if { true }` // `cache_on_disk_if { key.is_local() }` // `cache_on_disk_if(tcx) { tcx.is_typeck_child(key.to_def_id()) }` - let args = if input.peek(token::Paren) { + let tcx_binding = if input.peek(token::Paren) { let args; parenthesized!(args in input); let tcx = Pat::parse_single(&args)?; @@ -189,7 +202,7 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result { None }; let block = input.parse()?; - try_insert!(cache_on_disk_if = (args, block)); + try_insert!(cache_on_disk_if = CacheOnDiskIf { modifier, tcx_binding, block }); } else if modifier == "arena_cache" { try_insert!(arena_cache = modifier); } else if modifier == "cycle_fatal" { @@ -275,44 +288,32 @@ struct HelperTokenStreams { } fn make_helpers_for_query(query: &Query, streams: &mut HelperTokenStreams) { - let Query { name, key, modifiers, .. } = &query; + let Query { name, key, modifiers, arg, .. } = &query; - // This dead code exists to instruct rust-analyzer about the link between the `rustc_queries` - // query names and the corresponding produced provider. The issue is that by nature of this - // macro producing a higher order macro that has all its token in the macro declaration we lose - // any meaningful spans, resulting in rust-analyzer being unable to make the connection between - // the query name and the corresponding providers field. The trick to fix this is to have - // `rustc_queries` emit a field access with the given name's span which allows it to successfully - // show references / go to definition to the corresponding provider assignment which is usually - // the more interesting place. - let ra_hint = quote! { - let crate::query::Providers { #name: _, .. }; - }; + // Replace span for `name` to make rust-analyzer ignore it. + let mut erased_name = name.clone(); + erased_name.set_span(Span::call_site()); // Generate a function to check whether we should cache the query to disk, for some key. - if let Some((args, expr)) = modifiers.cache_on_disk_if.as_ref() { - let tcx = args.as_ref().map(|t| quote! { #t }).unwrap_or_else(|| quote! { _ }); - // expr is a `Block`, meaning that `{ #expr }` gets expanded - // to `{ { stmts... } }`, which triggers the `unused_braces` lint. + if let Some(CacheOnDiskIf { tcx_binding, block, .. }) = modifiers.cache_on_disk_if.as_ref() { + let tcx = tcx_binding.as_ref().map(|t| quote! { #t }).unwrap_or_else(|| quote! { _ }); // we're taking `key` by reference, but some rustc types usually prefer being passed by value streams.cache_on_disk_if_fns_stream.extend(quote! { - #[allow(unused_variables, unused_braces, rustc::pass_by_value)] + #[allow(unused_variables, rustc::pass_by_value)] #[inline] - pub fn #name<'tcx>(#tcx: TyCtxt<'tcx>, #key: &crate::queries::#name::Key<'tcx>) -> bool { - #ra_hint - #expr - } + pub fn #erased_name<'tcx>(#tcx: TyCtxt<'tcx>, #key: &crate::queries::#name::Key<'tcx>) -> bool + #block }); } - let (tcx, desc) = &modifiers.desc; - let tcx = tcx.as_ref().map_or_else(|| quote! { _ }, |t| quote! { #t }); + let Desc { tcx_binding, expr_list, .. } = &modifiers.desc; + let tcx = tcx_binding.as_ref().map_or_else(|| quote! { _ }, |t| quote! { #t }); let desc = quote! { #[allow(unused_variables)] - pub fn #name<'tcx>(tcx: TyCtxt<'tcx>, key: crate::queries::#name::Key<'tcx>) -> String { + pub fn #erased_name<'tcx>(tcx: TyCtxt<'tcx>, key: #arg) -> String { let (#tcx, #key) = (tcx, key); - format!(#desc) + format!(#expr_list) } }; @@ -321,12 +322,88 @@ fn make_helpers_for_query(query: &Query, streams: &mut HelperTokenStreams) { }); } +/// Add hints for rust-analyzer +fn add_to_analyzer_stream(query: &Query, analyzer_stream: &mut proc_macro2::TokenStream) { + // Add links to relevant modifiers + + let modifiers = &query.modifiers; + + let mut modifiers_stream = quote! {}; + + let name = &modifiers.desc.modifier; + modifiers_stream.extend(quote! { + crate::query::modifiers::#name; + }); + + if let Some(CacheOnDiskIf { modifier, .. }) = &modifiers.cache_on_disk_if { + modifiers_stream.extend(quote! { + crate::query::modifiers::#modifier; + }); + } + + macro_rules! doc_link { + ( $( $modifier:ident ),+ $(,)? ) => { + $( + if let Some(name) = &modifiers.$modifier { + modifiers_stream.extend(quote! { + crate::query::modifiers::#name; + }); + } + )+ + } + } + + doc_link!( + arena_cache, + cycle_fatal, + cycle_delay_bug, + cycle_stash, + no_hash, + anon, + eval_always, + depth_limit, + separate_provide_extern, + feedable, + return_result_from_ensure_ok, + ); + + let name = &query.name; + + // Replace span for `name` to make rust-analyzer ignore it. + let mut erased_name = name.clone(); + erased_name.set_span(Span::call_site()); + + let result = &query.result; + + // This dead code exists to instruct rust-analyzer about the link between the `rustc_queries` + // query names and the corresponding produced provider. The issue is that by nature of this + // macro producing a higher order macro that has all its token in the macro declaration we lose + // any meaningful spans, resulting in rust-analyzer being unable to make the connection between + // the query name and the corresponding providers field. The trick to fix this is to have + // `rustc_queries` emit a field access with the given name's span which allows it to successfully + // show references / go to definition to the corresponding provider assignment which is usually + // the more interesting place. + let ra_hint = quote! { + let crate::query::Providers { #name: _, .. }; + }; + + analyzer_stream.extend(quote! { + #[inline(always)] + fn #erased_name<'tcx>() #result { + #ra_hint + #modifiers_stream + loop {} + } + }); +} + pub(super) fn rustc_queries(input: TokenStream) -> TokenStream { let queries = parse_macro_input!(input as List); let mut query_stream = quote! {}; let mut helpers = HelperTokenStreams::default(); let mut feedable_queries = quote! {}; + let mut analyzer_stream = quote! {}; let mut errors = quote! {}; macro_rules! assert { @@ -409,6 +486,7 @@ pub(super) fn rustc_queries(input: TokenStream) -> TokenStream { }); } + add_to_analyzer_stream(&query, &mut analyzer_stream); make_helpers_for_query(&query, &mut helpers); } @@ -442,6 +520,12 @@ pub(super) fn rustc_queries(input: TokenStream) -> TokenStream { } } + // Add hints for rust-analyzer + mod _analyzer_hints { + use super::*; + #analyzer_stream + } + /// Functions that format a human-readable description of each query /// and its key, as specified by the `desc` query modifier. /// diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 4988cafdd3637..3a85b0a050526 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::memmap::{Mmap, MmapMut}; -use rustc_data_structures::sync::{join, par_for_each_in}; +use rustc_data_structures::sync::{par_for_each_in, par_join}; use rustc_data_structures::temp_dir::MaybeTempDir; use rustc_data_structures::thousands::usize_with_underscores; use rustc_feature::Features; @@ -2461,7 +2461,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path, ref_path: Option<&Path>) { // Prefetch some queries used by metadata encoding. // This is not necessary for correctness, but is only done for performance reasons. // It can be removed if it turns out to cause trouble or be detrimental to performance. - join( + par_join( || prefetch_mir(tcx), || { let _ = tcx.exported_non_generic_symbols(LOCAL_CRATE); diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 24a38e70ff6f2..62d5c1f9dd208 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -13,6 +13,7 @@ mod keys; pub mod on_disk_cache; #[macro_use] pub mod plumbing; +pub(crate) mod modifiers; pub fn describe_as_module(def_id: impl Into, tcx: TyCtxt<'_>) -> String { let def_id = def_id.into(); diff --git a/compiler/rustc_middle/src/query/modifiers.rs b/compiler/rustc_middle/src/query/modifiers.rs new file mode 100644 index 0000000000000..81b9f0da6446c --- /dev/null +++ b/compiler/rustc_middle/src/query/modifiers.rs @@ -0,0 +1,77 @@ +//! This contains documentation which is linked from query modifiers used in the `rustc_queries!` proc macro. +#![allow(unused, non_camel_case_types)] +// FIXME: Update and clarify documentation for these modifiers. + +/// # `desc` query modifier +/// +/// The description of the query. This modifier is required on every query. +pub struct desc; + +/// # `arena_cache` query modifier +/// +/// Use this type for the in-memory cache. +pub struct arena_cache; + +/// # `cache_on_disk_if` query modifier +/// +/// Cache the query to disk if the `Block` returns true. +pub struct cache_on_disk_if; + +/// # `cycle_fatal` query modifier +/// +/// A cycle error for this query aborting the compilation with a fatal error. +pub struct cycle_fatal; + +/// # `cycle_delay_bug` query modifier +/// +/// A cycle error results in a delay_bug call +pub struct cycle_delay_bug; + +/// # `cycle_stash` query modifier +/// +/// A cycle error results in a stashed cycle error that can be unstashed and canceled later +pub struct cycle_stash; + +/// # `no_hash` query modifier +/// +/// Don't hash the result, instead just mark a query red if it runs +pub struct no_hash; + +/// # `anon` query modifier +/// +/// Generate a dep node based on the dependencies of the query +pub struct anon; + +/// # `eval_always` query modifier +/// +/// Always evaluate the query, ignoring its dependencies +pub struct eval_always; + +/// # `depth_limit` query modifier +/// +/// Whether the query has a call depth limit +pub struct depth_limit; + +/// # `separate_provide_extern` query modifier +/// +/// Use a separate query provider for local and extern crates +pub struct separate_provide_extern; + +/// # `feedable` query modifier +/// +/// Generate a `feed` method to set the query's value from another query. +pub struct feedable; + +/// # `return_result_from_ensure_ok` query modifier +/// +/// When this query is called via `tcx.ensure_ok()`, it returns +/// `Result<(), ErrorGuaranteed>` instead of `()`. If the query needs to +/// be executed, and that execution returns an error, the error result is +/// returned to the caller. +/// +/// If execution is skipped, a synthetic `Ok(())` is returned, on the +/// assumption that a query with all-green inputs must have succeeded. +/// +/// Can only be applied to queries with a return value of +/// `Result<_, ErrorGuaranteed>`. +pub struct return_result_from_ensure_ok; diff --git a/compiler/rustc_middle/src/query/plumbing.rs b/compiler/rustc_middle/src/query/plumbing.rs index 3c844eac1fca0..9a709d2c43c80 100644 --- a/compiler/rustc_middle/src/query/plumbing.rs +++ b/compiler/rustc_middle/src/query/plumbing.rs @@ -462,10 +462,14 @@ macro_rules! define_callbacks { } pub struct Providers { - $(pub $name: for<'tcx> fn( - TyCtxt<'tcx>, - $name::LocalKey<'tcx>, - ) -> $name::ProvidedValue<'tcx>,)* + $( + /// This is the provider for the query. Use `Find references` on this to + /// navigate between the provider assignment and the query definition. + pub $name: for<'tcx> fn( + TyCtxt<'tcx>, + $name::LocalKey<'tcx>, + ) -> $name::ProvidedValue<'tcx>, + )* } pub struct ExternProviders { diff --git a/compiler/rustc_middle/src/traits/cache.rs b/compiler/rustc_middle/src/traits/cache.rs index ed41a69f97148..9391764bf1ce2 100644 --- a/compiler/rustc_middle/src/traits/cache.rs +++ b/compiler/rustc_middle/src/traits/cache.rs @@ -5,7 +5,8 @@ use std::hash::Hash; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lock; -use crate::dep_graph::{DepContext, DepNodeIndex}; +use crate::dep_graph::DepNodeIndex; +use crate::ty::TyCtxt; pub struct WithDepNodeCache { hashmap: Lock>>, @@ -24,7 +25,7 @@ impl Default for WithDepNodeCache { } impl WithDepNodeCache { - pub fn get(&self, key: &Key, tcx: Tcx) -> Option { + pub fn get<'tcx>(&self, key: &Key, tcx: TyCtxt<'tcx>) -> Option { Some(self.hashmap.borrow().get(key)?.get(tcx)) } @@ -40,12 +41,12 @@ pub struct WithDepNode { } impl WithDepNode { - pub fn new(dep_node: DepNodeIndex, cached_value: T) -> Self { + pub(crate) fn new(dep_node: DepNodeIndex, cached_value: T) -> Self { WithDepNode { dep_node, cached_value } } - pub fn get(&self, tcx: Tcx) -> T { - tcx.dep_graph().read_index(self.dep_node); + pub(crate) fn get<'tcx>(&self, tcx: TyCtxt<'tcx>) -> T { + tcx.dep_graph.read_index(self.dep_node); self.cached_value.clone() } } diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index a86230e9ab22c..d8f4e01945075 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -99,7 +99,7 @@ use std::io::Write; use std::path::{Path, PathBuf}; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; -use rustc_data_structures::sync; +use rustc_data_structures::sync::par_join; use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_hir::LangItem; use rustc_hir::attrs::{InlineAttr, Linkage}; @@ -1145,7 +1145,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio tcx.dcx().abort_if_errors(); let (codegen_units, _) = tcx.sess.time("partition_and_assert_distinct_symbols", || { - sync::join( + par_join( || { let mut codegen_units = partition(tcx, items.iter().copied(), &usage_map); codegen_units[0].make_primary(); diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 05216be06ff56..5c44bfb0cf3f3 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -1524,7 +1524,7 @@ impl<'a> Parser<'a> { }, ) } else if this.check_inline_const(0) { - this.parse_const_block(lo) + this.parse_const_block(lo, false) } else if this.may_recover() && this.is_do_catch_block() { this.recover_do_catch() } else if this.is_try_block() { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 5b81acb0f91f0..b7ba92bac5249 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -534,7 +534,7 @@ impl<'a> Parser<'a> { match self.parse_delim_args() { // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`. Ok(args) => { - self.eat_semi_for_macro_if_needed(&args); + self.eat_semi_for_macro_if_needed(&args, Some(&path)); self.complain_if_pub_macro(vis, false); Ok(MacCall { path, args }) } @@ -2392,7 +2392,7 @@ impl<'a> Parser<'a> { } let body = self.parse_delim_args()?; - self.eat_semi_for_macro_if_needed(&body); + self.eat_semi_for_macro_if_needed(&body, None); self.complain_if_pub_macro(vis, true); Ok(ItemKind::MacroDef( @@ -2417,13 +2417,13 @@ impl<'a> Parser<'a> { } } - fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) { + fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs, path: Option<&Path>) { if args.need_semicolon() && !self.eat(exp!(Semi)) { - self.report_invalid_macro_expansion_item(args); + self.report_invalid_macro_expansion_item(args, path); } } - fn report_invalid_macro_expansion_item(&self, args: &DelimArgs) { + fn report_invalid_macro_expansion_item(&self, args: &DelimArgs, path: Option<&Path>) { let span = args.dspan.entire(); let mut err = self.dcx().struct_span_err( span, @@ -2433,17 +2433,32 @@ impl<'a> Parser<'a> { // macros within the same crate (that we can fix), which is sad. if !span.from_expansion() { let DelimSpan { open, close } = args.dspan; - err.multipart_suggestion( - "change the delimiters to curly braces", - vec![(open, "{".to_string()), (close, '}'.to_string())], - Applicability::MaybeIncorrect, - ); - err.span_suggestion( - span.with_neighbor(self.token.span).shrink_to_hi(), - "add a semicolon", - ';', - Applicability::MaybeIncorrect, - ); + // Check if this looks like `macro_rules!(name) { ... }` + // a common mistake when trying to define a macro. + if let Some(path) = path + && path.segments.first().is_some_and(|seg| seg.ident.name == sym::macro_rules) + && args.delim == Delimiter::Parenthesis + { + let replace = + if path.span.hi() + rustc_span::BytePos(1) < open.lo() { "" } else { " " }; + err.multipart_suggestion( + "to define a macro, remove the parentheses around the macro name", + vec![(open, replace.to_string()), (close, String::new())], + Applicability::MachineApplicable, + ); + } else { + err.multipart_suggestion( + "change the delimiters to curly braces", + vec![(open, "{".to_string()), (close, '}'.to_string())], + Applicability::MaybeIncorrect, + ); + err.span_suggestion( + span.with_neighbor(self.token.span).shrink_to_hi(), + "add a semicolon", + ';', + Applicability::MaybeIncorrect, + ); + } } err.emit(); } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index a6b956b09bc16..f95fe61b0abdc 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -1270,7 +1270,7 @@ impl<'a> Parser<'a> { } /// Parses inline const expressions. - fn parse_const_block(&mut self, span: Span) -> PResult<'a, Box> { + fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, Box> { self.expect_keyword(exp!(Const))?; let (attrs, blk) = self.parse_inner_attrs_and_block(None)?; let anon_const = AnonConst { @@ -1279,7 +1279,18 @@ impl<'a> Parser<'a> { mgca_disambiguation: MgcaDisambiguation::AnonConst, }; let blk_span = anon_const.value.span; - let kind = ExprKind::ConstBlock(anon_const); + let kind = if pat { + let guar = self + .dcx() + .struct_span_err(blk_span, "const blocks cannot be used as patterns") + .with_help( + "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead", + ) + .emit(); + ExprKind::Err(guar) + } else { + ExprKind::ConstBlock(anon_const) + }; Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs)) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index c1e864985e190..bc73c3a2007a0 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -785,10 +785,8 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(exp!(Box)) { self.parse_pat_box()? } else if self.check_inline_const(0) { - // Parse `const pat`. - // NOTE: This will always error later during AST lowering because - // inline const cannot be used as patterns. - let const_expr = self.parse_const_block(lo.to(self.token.span))?; + // Parse `const pat` + let const_expr = self.parse_const_block(lo.to(self.token.span), true)?; if let Some(re) = self.parse_range_end() { self.parse_pat_range_begin_with(const_expr, re)? @@ -1283,7 +1281,7 @@ impl<'a> Parser<'a> { .then_some(self.prev_token.span); let bound = if self.check_inline_const(0) { - self.parse_const_block(self.token.span) + self.parse_const_block(self.token.span, true) } else if self.check_path() { let lo = self.token.span; let (qself, path) = if self.eat_lt() { diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index b6f0e9fedd6d8..08f2597e874d8 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -282,6 +282,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | AttributeKind::PatternComplexityLimit { .. } | AttributeKind::PinV2(..) | AttributeKind::Pointee(..) + | AttributeKind::PreludeImport | AttributeKind::ProfilerRuntime | AttributeKind::RecursionLimit { .. } | AttributeKind::ReexportTestHarnessMain(..) @@ -394,7 +395,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // need to be fixed | sym::deprecated_safe // FIXME(deprecated_safe) // internal - | sym::prelude_import | sym::panic_handler | sym::lang | sym::default_lib_allocator diff --git a/compiler/rustc_query_impl/src/execution.rs b/compiler/rustc_query_impl/src/execution.rs index b246f077e747e..50fb4f29ed717 100644 --- a/compiler/rustc_query_impl/src/execution.rs +++ b/compiler/rustc_query_impl/src/execution.rs @@ -15,7 +15,7 @@ use rustc_query_system::query::{ use rustc_span::{DUMMY_SP, Span}; use crate::dep_graph::{DepContext, DepNode, DepNodeIndex}; -use crate::job::{QueryJobInfo, QueryMap, find_cycle_in_stack, report_cycle}; +use crate::job::{QueryJobInfo, QueryJobMap, find_cycle_in_stack, report_cycle}; use crate::{QueryCtxt, QueryFlags, SemiDynamicQueryDispatcher}; #[inline] @@ -45,8 +45,8 @@ pub(crate) fn gather_active_jobs_inner<'tcx, K: Copy>( state: &QueryState<'tcx, K>, tcx: TyCtxt<'tcx>, make_frame: fn(TyCtxt<'tcx>, K) -> QueryStackFrame>, - jobs: &mut QueryMap<'tcx>, require_complete: bool, + job_map_out: &mut QueryJobMap<'tcx>, // Out-param; job info is gathered into this map ) -> Option<()> { let mut active = Vec::new(); @@ -77,7 +77,7 @@ pub(crate) fn gather_active_jobs_inner<'tcx, K: Copy>( // queries leading to a deadlock. for (key, job) in active { let frame = make_frame(tcx, key); - jobs.insert(job.id, QueryJobInfo { frame, job }); + job_map_out.insert(job.id, QueryJobInfo { frame, job }); } Some(()) @@ -213,12 +213,12 @@ fn cycle_error<'tcx, C: QueryCache, const FLAGS: QueryFlags>( ) -> (C::Value, Option) { // Ensure there was no errors collecting all active jobs. // We need the complete map to ensure we find a cycle to break. - let query_map = qcx + let job_map = qcx .collect_active_jobs_from_all_queries(false) .ok() .expect("failed to collect active queries"); - let error = find_cycle_in_stack(try_execute, query_map, &qcx.current_query_job(), span); + let error = find_cycle_in_stack(try_execute, job_map, &qcx.current_query_job(), span); (mk_cycle(query, qcx, error.lift()), None) } diff --git a/compiler/rustc_query_impl/src/job.rs b/compiler/rustc_query_impl/src/job.rs index f1eba0f76d171..19b8245b97e7a 100644 --- a/compiler/rustc_query_impl/src/job.rs +++ b/compiler/rustc_query_impl/src/job.rs @@ -17,39 +17,45 @@ use crate::dep_graph::DepContext; /// Map from query job IDs to job information collected by /// `collect_active_jobs_from_all_queries`. -pub type QueryMap<'tcx> = FxHashMap>; - -fn query_job_id_frame<'a, 'tcx>( - id: QueryJobId, - map: &'a QueryMap<'tcx>, -) -> QueryStackFrame> { - map.get(&id).unwrap().frame.clone() +#[derive(Debug, Default)] +pub struct QueryJobMap<'tcx> { + map: FxHashMap>, } -fn query_job_id_span<'a, 'tcx>(id: QueryJobId, map: &'a QueryMap<'tcx>) -> Span { - map.get(&id).unwrap().job.span -} +impl<'tcx> QueryJobMap<'tcx> { + /// Adds information about a job ID to the job map. + /// + /// Should only be called by `gather_active_jobs_inner`. + pub(crate) fn insert(&mut self, id: QueryJobId, info: QueryJobInfo<'tcx>) { + self.map.insert(id, info); + } -fn query_job_id_parent<'a, 'tcx>(id: QueryJobId, map: &'a QueryMap<'tcx>) -> Option { - map.get(&id).unwrap().job.parent -} + fn frame_of(&self, id: QueryJobId) -> &QueryStackFrame> { + &self.map[&id].frame + } -fn query_job_id_latch<'a, 'tcx>( - id: QueryJobId, - map: &'a QueryMap<'tcx>, -) -> Option<&'a QueryLatch<'tcx>> { - map.get(&id).unwrap().job.latch.as_ref() + fn span_of(&self, id: QueryJobId) -> Span { + self.map[&id].job.span + } + + fn parent_of(&self, id: QueryJobId) -> Option { + self.map[&id].job.parent + } + + fn latch_of(&self, id: QueryJobId) -> Option<&QueryLatch<'tcx>> { + self.map[&id].job.latch.as_ref() + } } #[derive(Clone, Debug)] -pub struct QueryJobInfo<'tcx> { - pub frame: QueryStackFrame>, - pub job: QueryJob<'tcx>, +pub(crate) struct QueryJobInfo<'tcx> { + pub(crate) frame: QueryStackFrame>, + pub(crate) job: QueryJob<'tcx>, } pub(crate) fn find_cycle_in_stack<'tcx>( id: QueryJobId, - query_map: QueryMap<'tcx>, + job_map: QueryJobMap<'tcx>, current_job: &Option, span: Span, ) -> CycleError> { @@ -58,7 +64,7 @@ pub(crate) fn find_cycle_in_stack<'tcx>( let mut current_job = Option::clone(current_job); while let Some(job) = current_job { - let info = query_map.get(&job).unwrap(); + let info = &job_map.map[&job]; cycle.push(QueryInfo { span: info.job.span, frame: info.frame.clone() }); if job == id { @@ -70,11 +76,10 @@ pub(crate) fn find_cycle_in_stack<'tcx>( // Replace it with the span which caused the cycle to form cycle[0].span = span; // Find out why the cycle itself was used - let usage = info - .job - .parent - .as_ref() - .map(|parent| (info.job.span, query_job_id_frame(*parent, &query_map))); + let usage = try { + let parent = info.job.parent?; + (info.job.span, job_map.frame_of(parent).clone()) + }; return CycleError { usage, cycle }; } @@ -88,16 +93,16 @@ pub(crate) fn find_cycle_in_stack<'tcx>( #[inline(never)] pub(crate) fn find_dep_kind_root<'tcx>( id: QueryJobId, - query_map: QueryMap<'tcx>, + job_map: QueryJobMap<'tcx>, ) -> (QueryJobInfo<'tcx>, usize) { let mut depth = 1; - let info = query_map.get(&id).unwrap(); + let info = &job_map.map[&id]; let dep_kind = info.frame.dep_kind; let mut current_id = info.job.parent; let mut last_layout = (info.clone(), depth); while let Some(id) = current_id { - let info = query_map.get(&id).unwrap(); + let info = &job_map.map[&id]; if info.frame.dep_kind == dep_kind { depth += 1; last_layout = (info.clone(), depth); @@ -120,7 +125,7 @@ type Waiter = (QueryJobId, usize); /// required information to resume the waiter. /// If all `visit` calls returns None, this function also returns None. fn visit_waiters<'tcx, F>( - query_map: &QueryMap<'tcx>, + job_map: &QueryJobMap<'tcx>, query: QueryJobId, mut visit: F, ) -> Option> @@ -128,14 +133,14 @@ where F: FnMut(Span, QueryJobId) -> Option>, { // Visit the parent query which is a non-resumable waiter since it's on the same stack - if let Some(parent) = query_job_id_parent(query, query_map) - && let Some(cycle) = visit(query_job_id_span(query, query_map), parent) + if let Some(parent) = job_map.parent_of(query) + && let Some(cycle) = visit(job_map.span_of(query), parent) { return Some(cycle); } // Visit the explicit waiters which use condvars and are resumable - if let Some(latch) = query_job_id_latch(query, query_map) { + if let Some(latch) = job_map.latch_of(query) { for (i, waiter) in latch.info.lock().waiters.iter().enumerate() { if let Some(waiter_query) = waiter.query { if visit(waiter.span, waiter_query).is_some() { @@ -154,7 +159,7 @@ where /// If a cycle is detected, this initial value is replaced with the span causing /// the cycle. fn cycle_check<'tcx>( - query_map: &QueryMap<'tcx>, + job_map: &QueryJobMap<'tcx>, query: QueryJobId, span: Span, stack: &mut Vec<(Span, QueryJobId)>, @@ -178,8 +183,8 @@ fn cycle_check<'tcx>( stack.push((span, query)); // Visit all the waiters - let r = visit_waiters(query_map, query, |span, successor| { - cycle_check(query_map, successor, span, stack, visited) + let r = visit_waiters(job_map, query, |span, successor| { + cycle_check(job_map, successor, span, stack, visited) }); // Remove the entry in our stack if we didn't find a cycle @@ -194,7 +199,7 @@ fn cycle_check<'tcx>( /// from `query` without going through any of the queries in `visited`. /// This is achieved with a depth first search. fn connected_to_root<'tcx>( - query_map: &QueryMap<'tcx>, + job_map: &QueryJobMap<'tcx>, query: QueryJobId, visited: &mut FxHashSet, ) -> bool { @@ -204,18 +209,18 @@ fn connected_to_root<'tcx>( } // This query is connected to the root (it has no query parent), return true - if query_job_id_parent(query, query_map).is_none() { + if job_map.parent_of(query).is_none() { return true; } - visit_waiters(query_map, query, |_, successor| { - connected_to_root(query_map, successor, visited).then_some(None) + visit_waiters(job_map, query, |_, successor| { + connected_to_root(job_map, successor, visited).then_some(None) }) .is_some() } // Deterministically pick an query from a list -fn pick_query<'a, 'tcx, T, F>(query_map: &QueryMap<'tcx>, queries: &'a [T], f: F) -> &'a T +fn pick_query<'a, 'tcx, T, F>(job_map: &QueryJobMap<'tcx>, queries: &'a [T], f: F) -> &'a T where F: Fn(&T) -> (Span, QueryJobId), { @@ -225,7 +230,7 @@ where .iter() .min_by_key(|v| { let (span, query) = f(v); - let hash = query_job_id_frame(query, query_map).hash; + let hash = job_map.frame_of(query).hash; // Prefer entry points which have valid spans for nicer error messages // We add an integer to the tuple ensuring that entry points // with valid spans are picked first @@ -241,7 +246,7 @@ where /// If a cycle was not found, the starting query is removed from `jobs` and /// the function returns false. fn remove_cycle<'tcx>( - query_map: &QueryMap<'tcx>, + job_map: &QueryJobMap<'tcx>, jobs: &mut Vec, wakelist: &mut Vec>>, ) -> bool { @@ -249,7 +254,7 @@ fn remove_cycle<'tcx>( let mut stack = Vec::new(); // Look for a cycle starting with the last query in `jobs` if let Some(waiter) = - cycle_check(query_map, jobs.pop().unwrap(), DUMMY_SP, &mut stack, &mut visited) + cycle_check(job_map, jobs.pop().unwrap(), DUMMY_SP, &mut stack, &mut visited) { // The stack is a vector of pairs of spans and queries; reverse it so that // the earlier entries require later entries @@ -273,17 +278,17 @@ fn remove_cycle<'tcx>( let entry_points = stack .iter() .filter_map(|&(span, query)| { - if query_job_id_parent(query, query_map).is_none() { + if job_map.parent_of(query).is_none() { // This query is connected to the root (it has no query parent) Some((span, query, None)) } else { let mut waiters = Vec::new(); // Find all the direct waiters who lead to the root - visit_waiters(query_map, query, |span, waiter| { + visit_waiters(job_map, query, |span, waiter| { // Mark all the other queries in the cycle as already visited let mut visited = FxHashSet::from_iter(stack.iter().map(|q| q.1)); - if connected_to_root(query_map, waiter, &mut visited) { + if connected_to_root(job_map, waiter, &mut visited) { waiters.push((span, waiter)); } @@ -293,7 +298,7 @@ fn remove_cycle<'tcx>( None } else { // Deterministically pick one of the waiters to show to the user - let waiter = *pick_query(query_map, &waiters, |s| *s); + let waiter = *pick_query(job_map, &waiters, |s| *s); Some((span, query, Some(waiter))) } } @@ -301,7 +306,7 @@ fn remove_cycle<'tcx>( .collect::)>>(); // Deterministically pick an entry point - let (_, entry_point, usage) = pick_query(query_map, &entry_points, |e| (e.0, e.1)); + let (_, entry_point, usage) = pick_query(job_map, &entry_points, |e| (e.0, e.1)); // Shift the stack so that our entry point is first let entry_point_pos = stack.iter().position(|(_, query)| query == entry_point); @@ -309,15 +314,14 @@ fn remove_cycle<'tcx>( stack.rotate_left(pos); } - let usage = - usage.as_ref().map(|(span, query)| (*span, query_job_id_frame(*query, query_map))); + let usage = usage.map(|(span, job)| (span, job_map.frame_of(job).clone())); // Create the cycle error let error = CycleError { usage, cycle: stack .iter() - .map(|&(s, ref q)| QueryInfo { span: s, frame: query_job_id_frame(*q, query_map) }) + .map(|&(span, job)| QueryInfo { span, frame: job_map.frame_of(job).clone() }) .collect(), }; @@ -326,8 +330,7 @@ fn remove_cycle<'tcx>( let (waitee_query, waiter_idx) = waiter.unwrap(); // Extract the waiter we want to resume - let waiter = - query_job_id_latch(waitee_query, query_map).unwrap().extract_waiter(waiter_idx); + let waiter = job_map.latch_of(waitee_query).unwrap().extract_waiter(waiter_idx); // Set the cycle error so it will be picked up when resumed *waiter.cycle.lock() = Some(error); @@ -346,18 +349,21 @@ fn remove_cycle<'tcx>( /// uses a query latch and then resuming that waiter. /// There may be multiple cycles involved in a deadlock, so this searches /// all active queries for cycles before finally resuming all the waiters at once. -pub fn break_query_cycles<'tcx>(query_map: QueryMap<'tcx>, registry: &rustc_thread_pool::Registry) { +pub fn break_query_cycles<'tcx>( + job_map: QueryJobMap<'tcx>, + registry: &rustc_thread_pool::Registry, +) { let mut wakelist = Vec::new(); // It is OK per the comments: // - https://github.com/rust-lang/rust/pull/131200#issuecomment-2798854932 // - https://github.com/rust-lang/rust/pull/131200#issuecomment-2798866392 #[allow(rustc::potential_query_instability)] - let mut jobs: Vec = query_map.keys().cloned().collect(); + let mut jobs: Vec = job_map.map.keys().copied().collect(); let mut found_cycle = false; while jobs.len() > 0 { - if remove_cycle(&query_map, &mut jobs, &mut wakelist) { + if remove_cycle(&job_map, &mut jobs, &mut wakelist) { found_cycle = true; } } @@ -372,8 +378,7 @@ pub fn break_query_cycles<'tcx>(query_map: QueryMap<'tcx>, registry: &rustc_thre if !found_cycle { panic!( "deadlock detected as we're unable to find a query cycle to break\n\ - current query map:\n{:#?}", - query_map + current query map:\n{job_map:#?}", ); } @@ -402,17 +407,16 @@ pub fn print_query_stack<'tcx>( let mut count_printed = 0; let mut count_total = 0; - // Make use of a partial query map if we fail to take locks collecting active queries. - let query_map = match qcx.collect_active_jobs_from_all_queries(false) { - Ok(query_map) => query_map, - Err(query_map) => query_map, - }; + // Make use of a partial query job map if we fail to take locks collecting active queries. + let job_map: QueryJobMap<'_> = qcx + .collect_active_jobs_from_all_queries(false) + .unwrap_or_else(|partial_job_map| partial_job_map); if let Some(ref mut file) = file { let _ = writeln!(file, "\n\nquery stack during panic:"); } while let Some(query) = current_query { - let Some(query_info) = query_map.get(&query) else { + let Some(query_info) = job_map.map.get(&query) else { break; }; let query_extra = query_info.frame.info.extract(); diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs index e1c22c187b230..9b2078275aae5 100644 --- a/compiler/rustc_query_impl/src/lib.rs +++ b/compiler/rustc_query_impl/src/lib.rs @@ -6,6 +6,7 @@ #![feature(core_intrinsics)] #![feature(min_specialization)] #![feature(rustc_attrs)] +#![feature(try_blocks)] // tidy-alphabetical-end use std::marker::ConstParamTy; @@ -26,7 +27,7 @@ use rustc_query_system::query::{ }; use rustc_span::{ErrorGuaranteed, Span}; -pub use crate::job::{QueryMap, break_query_cycles, print_query_stack}; +pub use crate::job::{QueryJobMap, break_query_cycles, print_query_stack}; pub use crate::plumbing::{QueryCtxt, query_key_hash_verify_all}; use crate::plumbing::{encode_all_query_results, try_mark_green}; use crate::profiling_support::QueryKeyStringCache; diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs index 9a07df361800c..9804e6b217567 100644 --- a/compiler/rustc_query_impl/src/plumbing.rs +++ b/compiler/rustc_query_impl/src/plumbing.rs @@ -36,7 +36,7 @@ use rustc_span::def_id::LOCAL_CRATE; use crate::error::{QueryOverflow, QueryOverflowNote}; use crate::execution::{all_inactive, force_query}; -use crate::job::{QueryMap, find_dep_kind_root}; +use crate::job::{QueryJobMap, find_dep_kind_root}; use crate::{QueryDispatcherUnerased, QueryFlags, SemiDynamicQueryDispatcher}; /// Implements [`QueryContext`] for use by [`rustc_query_system`], since that @@ -53,10 +53,10 @@ impl<'tcx> QueryCtxt<'tcx> { } fn depth_limit_error(self, job: QueryJobId) { - let query_map = self + let job_map = self .collect_active_jobs_from_all_queries(true) .expect("failed to collect active queries"); - let (info, depth) = find_dep_kind_root(job, query_map); + let (info, depth) = find_dep_kind_root(job, job_map); let suggested_limit = match self.tcx.recursion_limit() { Limit(0) => Limit(2), @@ -131,17 +131,17 @@ impl<'tcx> QueryCtxt<'tcx> { pub fn collect_active_jobs_from_all_queries( self, require_complete: bool, - ) -> Result, QueryMap<'tcx>> { - let mut jobs = QueryMap::default(); + ) -> Result, QueryJobMap<'tcx>> { + let mut job_map_out = QueryJobMap::default(); let mut complete = true; for gather_fn in crate::PER_QUERY_GATHER_ACTIVE_JOBS_FNS.iter() { - if gather_fn(self.tcx, &mut jobs, require_complete).is_none() { + if gather_fn(self.tcx, require_complete, &mut job_map_out).is_none() { complete = false; } } - if complete { Ok(jobs) } else { Err(jobs) } + if complete { Ok(job_map_out) } else { Err(job_map_out) } } } @@ -753,8 +753,8 @@ macro_rules! define_queries { /// Should only be called through `PER_QUERY_GATHER_ACTIVE_JOBS_FNS`. pub(crate) fn gather_active_jobs<'tcx>( tcx: TyCtxt<'tcx>, - qmap: &mut QueryMap<'tcx>, require_complete: bool, + job_map_out: &mut QueryJobMap<'tcx>, ) -> Option<()> { let make_frame = |tcx: TyCtxt<'tcx>, key| { let vtable = &tcx.query_system.query_vtables.$name; @@ -765,8 +765,8 @@ macro_rules! define_queries { let res = crate::execution::gather_active_jobs_inner(&tcx.query_system.states.$name, tcx, make_frame, - qmap, require_complete, + job_map_out, ); // this can be called during unwinding, and the function has a `try_`-prefix, so @@ -849,9 +849,13 @@ macro_rules! define_queries { /// each individual query, so that we have distinct function names to /// grep for.) const PER_QUERY_GATHER_ACTIVE_JOBS_FNS: &[ - for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<'tcx>, require_complete: bool) -> Option<()> + for<'tcx> fn( + tcx: TyCtxt<'tcx>, + require_complete: bool, + job_map_out: &mut QueryJobMap<'tcx>, + ) -> Option<()> ] = &[ - $(query_impl::$name::gather_active_jobs),* + $( $crate::query_impl::$name::gather_active_jobs ),* ]; const ALLOC_SELF_PROFILE_QUERY_STRINGS: &[ diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 5e2671ef4ef6b..b278a6179fe7f 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -88,7 +88,7 @@ pub enum CFProtection { Full, } -#[derive(Clone, Copy, Debug, PartialEq, Hash, HashStable_Generic)] +#[derive(Clone, Copy, Debug, PartialEq, Hash, HashStable_Generic, Encodable, Decodable)] pub enum OptLevel { /// `-Copt-level=0` No, @@ -108,7 +108,7 @@ pub enum OptLevel { /// and taking other command line options into account. /// /// Note that linker plugin-based LTO is a different mechanism entirely. -#[derive(Clone, PartialEq)] +#[derive(Clone, PartialEq, Encodable, Decodable)] pub enum Lto { /// Don't do any LTO whatsoever. No, @@ -190,7 +190,7 @@ pub enum CoverageLevel { } // The different settings that the `-Z offload` flag can have. -#[derive(Clone, PartialEq, Hash, Debug)] +#[derive(Clone, PartialEq, Hash, Debug, Encodable, Decodable)] pub enum Offload { /// Entry point for `std::offload`, enables kernel compilation for a gpu device Device, @@ -201,7 +201,7 @@ pub enum Offload { } /// The different settings that the `-Z autodiff` flag can have. -#[derive(Clone, PartialEq, Hash, Debug)] +#[derive(Clone, PartialEq, Hash, Debug, Encodable, Decodable)] pub enum AutoDiff { /// Enable the autodiff opt pipeline Enable, @@ -528,7 +528,7 @@ impl FmtDebug { } } -#[derive(Clone, PartialEq, Hash, Debug)] +#[derive(Clone, PartialEq, Hash, Debug, Encodable, Decodable)] pub enum SwitchWithOptPath { Enabled(Option), Disabled, @@ -583,7 +583,7 @@ pub enum MirStripDebugInfo { /// DWARF provides a mechanism which allows the linker to skip the sections which don't require /// link-time relocation - either by putting those sections in DWARF object files, or by keeping /// them in the object file in such a way that the linker will skip them. -#[derive(Clone, Copy, Debug, PartialEq, Hash)] +#[derive(Clone, Copy, Debug, PartialEq, Hash, Encodable, Decodable)] pub enum SplitDwarfKind { /// Sections which do not require relocation are written into object file but ignored by the /// linker. @@ -1539,7 +1539,7 @@ pub enum EntryFnType { pub use rustc_hir::attrs::CrateType; -#[derive(Clone, Hash, Debug, PartialEq, Eq)] +#[derive(Clone, Hash, Debug, PartialEq, Eq, Encodable, Decodable)] pub enum Passes { Some(Vec), All, diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 537185f536ab1..2d2f15651c431 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -1082,7 +1082,7 @@ crate::target_spec_enum! { } crate::target_spec_enum! { - #[derive(Default)] + #[derive(Default, Encodable, Decodable)] pub enum SplitDebuginfo { /// Split debug-information is disabled, meaning that on supported platforms /// you can find all debug information in the executable itself. This is diff --git a/compiler/rustc_target/src/target_features.rs b/compiler/rustc_target/src/target_features.rs index 36bf89ed4f35b..f179274846488 100644 --- a/compiler/rustc_target/src/target_features.rs +++ b/compiler/rustc_target/src/target_features.rs @@ -984,8 +984,11 @@ const RISCV_FEATURES_FOR_CORRECT_FIXED_LENGTH_VECTOR_ABI: &'static [(u64, &'stat const SPARC_FEATURES_FOR_CORRECT_FIXED_LENGTH_VECTOR_ABI: &'static [(u64, &'static str)] = &[/*(64, "vis")*/]; -const HEXAGON_FEATURES_FOR_CORRECT_FIXED_LENGTH_VECTOR_ABI: &'static [(u64, &'static str)] = - &[(512, "hvx-length64b"), (1024, "hvx-length128b")]; +const HEXAGON_FEATURES_FOR_CORRECT_FIXED_LENGTH_VECTOR_ABI: &'static [(u64, &'static str)] = &[ + (512, "hvx-length64b"), // HvxVector in 64-byte mode + (1024, "hvx-length128b"), // HvxVector in 128-byte mode, or HvxVectorPair in 64-byte mode + (2048, "hvx-length128b"), // HvxVectorPair in 128-byte mode +]; const MIPS_FEATURES_FOR_CORRECT_FIXED_LENGTH_VECTOR_ABI: &'static [(u64, &'static str)] = &[(128, "msa")]; const CSKY_FEATURES_FOR_CORRECT_FIXED_LENGTH_VECTOR_ABI: &'static [(u64, &'static str)] = diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index ea8360c10b6fe..b8da64b9729a2 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -443,9 +443,10 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { pub(crate) fn visit_with>(&self, visitor: &mut V) -> V::Result { if self.depth < visitor.config().max_depth { try_visit!(visitor.visit_goal(self)); + V::Result::output() + } else { + visitor.on_recursion_limit() } - - V::Result::output() } } @@ -460,6 +461,10 @@ pub trait ProofTreeVisitor<'tcx> { } fn visit_goal(&mut self, goal: &InspectGoal<'_, 'tcx>) -> Self::Result; + + fn on_recursion_limit(&mut self) -> Self::Result { + Self::Result::output() + } } #[extension(pub trait InferCtxtProofTreeExt<'tcx>)] diff --git a/library/Cargo.lock b/library/Cargo.lock index 4801f92c63e5a..f35c6d9f32ae9 100644 --- a/library/Cargo.lock +++ b/library/Cargo.lock @@ -146,9 +146,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.178" +version = "0.2.181" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" +checksum = "459427e2af2b9c839b132acb702a1c654d95e10f8c326bfc2ad11310e458b1c5" dependencies = [ "rustc-std-workspace-core", ] diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index f4cc8edc0c1da..1438e3bf68c63 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -33,7 +33,7 @@ miniz_oxide = { version = "0.8.0", optional = true, default-features = false } addr2line = { version = "0.25.0", optional = true, default-features = false } [target.'cfg(not(all(windows, target_env = "msvc")))'.dependencies] -libc = { version = "0.2.178", default-features = false, features = [ +libc = { version = "0.2.181", default-features = false, features = [ 'rustc-dep-of-std', ], public = true } diff --git a/library/std/src/fs.rs b/library/std/src/fs.rs index a24baad615012..cf6f9594c0027 100644 --- a/library/std/src/fs.rs +++ b/library/std/src/fs.rs @@ -277,6 +277,7 @@ pub struct OpenOptions(fs_imp::OpenOptions); /// Representation of the various timestamps on a file. #[derive(Copy, Clone, Debug, Default)] #[stable(feature = "file_set_times", since = "1.75.0")] +#[must_use = "must be applied to a file via `File::set_times` to have any effect"] pub struct FileTimes(fs_imp::FileTimes); /// Representation of the various permissions on a file. diff --git a/tests/codegen-llvm/backchain.rs b/tests/codegen-llvm/backchain.rs new file mode 100644 index 0000000000000..fae494dcad1b4 --- /dev/null +++ b/tests/codegen-llvm/backchain.rs @@ -0,0 +1,15 @@ +//@ add-minicore +//@ compile-flags: -Copt-level=3 --crate-type=lib --target=s390x-unknown-linux-gnu -Ctarget-feature=+backchain +//@ needs-llvm-components: systemz +#![crate_type = "lib"] +#![feature(no_core, lang_items)] +#![no_core] + +extern crate minicore; +use minicore::*; + +#[no_mangle] +pub fn test_backchain() { + // CHECK: @test_backchain() unnamed_addr #0 +} +// CHECK: attributes #0 = { {{.*}}"target-features"="{{[^"]*}}+backchain{{.*}} } diff --git a/tests/pretty/delegation-inherit-attributes.pp b/tests/pretty/delegation-inherit-attributes.pp index 9d51a80da7b98..26ca5e99b885d 100644 --- a/tests/pretty/delegation-inherit-attributes.pp +++ b/tests/pretty/delegation-inherit-attributes.pp @@ -7,7 +7,7 @@ #![allow(incomplete_features)] #![feature(fn_delegation)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use std::prelude::rust_2021::*; extern crate to_reuse_functions; diff --git a/tests/pretty/delegation-inline-attribute.pp b/tests/pretty/delegation-inline-attribute.pp index f83ae47b81f6c..9f362fa863f82 100644 --- a/tests/pretty/delegation-inline-attribute.pp +++ b/tests/pretty/delegation-inline-attribute.pp @@ -5,7 +5,7 @@ #![allow(incomplete_features)] #![feature(fn_delegation)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; mod to_reuse { diff --git a/tests/pretty/hir-delegation.pp b/tests/pretty/hir-delegation.pp index 44a1deb750dc0..59491b6ebd7c1 100644 --- a/tests/pretty/hir-delegation.pp +++ b/tests/pretty/hir-delegation.pp @@ -5,7 +5,7 @@ #![allow(incomplete_features)] #![feature(fn_delegation)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; fn b(e: C) { } diff --git a/tests/pretty/hir-fn-params.pp b/tests/pretty/hir-fn-params.pp index 52310d5024cd6..15373bba24d3f 100644 --- a/tests/pretty/hir-fn-params.pp +++ b/tests/pretty/hir-fn-params.pp @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ pretty-compare-only //@ pretty-mode:hir diff --git a/tests/pretty/hir-fn-variadic.pp b/tests/pretty/hir-fn-variadic.pp index 6356eec80e0e8..3837b260cc5d6 100644 --- a/tests/pretty/hir-fn-variadic.pp +++ b/tests/pretty/hir-fn-variadic.pp @@ -4,7 +4,7 @@ #![feature(c_variadic)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; extern "C" { diff --git a/tests/pretty/hir-if-else.pp b/tests/pretty/hir-if-else.pp index d3721e1758157..c2050e1f6e472 100644 --- a/tests/pretty/hir-if-else.pp +++ b/tests/pretty/hir-if-else.pp @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ pretty-compare-only //@ pretty-mode:hir diff --git a/tests/pretty/hir-lifetimes.pp b/tests/pretty/hir-lifetimes.pp index ceb0f6e3b7c24..c35a40eed0c50 100644 --- a/tests/pretty/hir-lifetimes.pp +++ b/tests/pretty/hir-lifetimes.pp @@ -6,7 +6,7 @@ #![allow(unused)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; struct Foo<'a> { diff --git a/tests/pretty/hir-pretty-attr.pp b/tests/pretty/hir-pretty-attr.pp index a9d8b5e7e5770..be23294e8f7eb 100644 --- a/tests/pretty/hir-pretty-attr.pp +++ b/tests/pretty/hir-pretty-attr.pp @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ pretty-compare-only //@ pretty-mode:hir diff --git a/tests/pretty/hir-pretty-loop.pp b/tests/pretty/hir-pretty-loop.pp index e6614ce318cce..f9bc7416e4bcf 100644 --- a/tests/pretty/hir-pretty-loop.pp +++ b/tests/pretty/hir-pretty-loop.pp @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ pretty-compare-only //@ pretty-mode:hir diff --git a/tests/pretty/hir-struct-expr.pp b/tests/pretty/hir-struct-expr.pp index 198d7ad6a9b6b..2557aa42378af 100644 --- a/tests/pretty/hir-struct-expr.pp +++ b/tests/pretty/hir-struct-expr.pp @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ pretty-compare-only //@ pretty-mode:hir diff --git a/tests/pretty/issue-4264.pp b/tests/pretty/issue-4264.pp index 568269644bb87..d73ad35d62228 100644 --- a/tests/pretty/issue-4264.pp +++ b/tests/pretty/issue-4264.pp @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ pretty-compare-only //@ pretty-mode:hir,typed diff --git a/tests/pretty/issue-85089.pp b/tests/pretty/issue-85089.pp index 919573220fddd..ab386666da172 100644 --- a/tests/pretty/issue-85089.pp +++ b/tests/pretty/issue-85089.pp @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; // Test to print lifetimes on HIR pretty-printing. diff --git a/tests/pretty/pin-ergonomics-hir.pp b/tests/pretty/pin-ergonomics-hir.pp index cf9b6707ed2ff..e422edf54e0c5 100644 --- a/tests/pretty/pin-ergonomics-hir.pp +++ b/tests/pretty/pin-ergonomics-hir.pp @@ -5,7 +5,7 @@ #![feature(pin_ergonomics)] #![allow(dead_code, incomplete_features)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; use std::pin::Pin; diff --git a/tests/ui/abi/compatibility.rs b/tests/ui/abi/compatibility.rs index 84294ab343111..6071ad9bb435b 100644 --- a/tests/ui/abi/compatibility.rs +++ b/tests/ui/abi/compatibility.rs @@ -78,7 +78,6 @@ // FIXME: some targets are broken in various ways. // Hence there are `cfg` throughout this test to disable parts of it on those targets. -// sparc64: https://github.com/rust-lang/rust/issues/115336 // mips64: https://github.com/rust-lang/rust/issues/115404 extern crate minicore; @@ -246,7 +245,7 @@ test_transparent!(zst, Zst); test_transparent!(unit, ()); test_transparent!(enum_, Option); test_transparent!(enum_niched, Option<&'static i32>); -#[cfg(not(any(target_arch = "mips64", target_arch = "sparc64")))] +#[cfg(not(any(target_arch = "mips64")))] mod tuples { use super::*; // mixing in some floats since they often get special treatment @@ -260,7 +259,6 @@ mod tuples { test_transparent!(tuple, (i32, f32, i64, f64)); } // Some targets have special rules for arrays. -#[cfg(not(any(target_arch = "mips64", target_arch = "sparc64")))] mod arrays { use super::*; test_transparent!(empty_array, [u32; 0]); diff --git a/tests/ui/autodiff/incremental.rs b/tests/ui/autodiff/incremental.rs index a79059deaa778..00dd632712aba 100644 --- a/tests/ui/autodiff/incremental.rs +++ b/tests/ui/autodiff/incremental.rs @@ -1,6 +1,6 @@ //@ revisions: DEBUG RELEASE //@[RELEASE] compile-flags: -Zautodiff=Enable,NoTT -C opt-level=3 -Clto=fat -//@[DEBUG] compile-flags: -Zautodiff=Enable,NoTT -C opt-level=0 -Clto=fat -C debuginfo=2 +//@[DEBUG] compile-flags: -Zautodiff=Enable,NoTT -Copt-level=0 -Clto=fat -Cdebuginfo=2 -Ccodegen-units=8 //@ needs-enzyme //@ incremental //@ no-prefer-dynamic @@ -13,6 +13,10 @@ // dropped. We now use globals instead and add this test to verify that incremental // keeps working. Also testing debug mode while at it. +// We extended this test to use 8 codegen-units in debug mode and call an intrinsic like powi, +// rather than just simple arithmetic. This caused a compilation failure, since the definition of +// the intrinsic was not available in the same cgu as the function being differentiated. + use std::autodiff::autodiff_reverse; #[autodiff_reverse(bar, Duplicated, Duplicated)] @@ -20,7 +24,7 @@ pub fn foo(r: &[f64; 10], res: &mut f64) { let mut output = [0.0; 10]; output[0] = r[0]; output[1] = r[1] * r[2]; - output[2] = r[4] * r[5]; + output[2] = r[4] * r[5].powi(2); output[3] = r[2] * r[6]; output[4] = r[1] * r[7]; output[5] = r[2] * r[8]; diff --git a/tests/ui/issues/issue-3026.rs b/tests/ui/borrowck/borrow-box-in-map-3026.rs similarity index 73% rename from tests/ui/issues/issue-3026.rs rename to tests/ui/borrowck/borrow-box-in-map-3026.rs index 05dc46c3cc096..dd63075eecba2 100644 --- a/tests/ui/issues/issue-3026.rs +++ b/tests/ui/borrowck/borrow-box-in-map-3026.rs @@ -1,3 +1,5 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/3026 + //@ run-pass use std::collections::HashMap; diff --git a/tests/ui/consts/const-block-items/hir.stdout b/tests/ui/consts/const-block-items/hir.stdout index e2df04e98d4ea..2b7f0818556ff 100644 --- a/tests/ui/consts/const-block-items/hir.stdout +++ b/tests/ui/consts/const-block-items/hir.stdout @@ -3,7 +3,7 @@ #![feature(const_block_items)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; const _: () = diff --git a/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.rs b/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.rs index 9f0073eac9adc..5a20370e2eeb8 100644 --- a/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.rs +++ b/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.rs @@ -6,4 +6,11 @@ pub fn main() { x }; assert_eq!(try_result, Some(5)); + + // The heterogenous form is new, so is gated even under a `cfg(false)`. + // See + + #[cfg(false)] + try bikeshed () {} + //~^ error `try bikeshed` expression is experimental } diff --git a/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.stderr b/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.stderr index 0d31dc507fdde..e448945b2ba81 100644 --- a/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.stderr +++ b/tests/ui/feature-gates/feature-gate-try_blocks_heterogeneous.stderr @@ -12,6 +12,16 @@ LL | | }; = help: add `#![feature(try_blocks_heterogeneous)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 1 previous error +error[E0658]: `try bikeshed` expression is experimental + --> $DIR/feature-gate-try_blocks_heterogeneous.rs:14:5 + | +LL | try bikeshed () {} + | ^^^^^^^^^^^^^^^^^^ + | + = note: see issue #149488 for more information + = help: add `#![feature(try_blocks_heterogeneous)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 2 previous errors For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/inline-const/in-pat-recovery.rs b/tests/ui/inline-const/in-pat-recovery.rs index d519217fad3b5..037c58d3bf98c 100644 --- a/tests/ui/inline-const/in-pat-recovery.rs +++ b/tests/ui/inline-const/in-pat-recovery.rs @@ -4,63 +4,63 @@ fn main() { match 1 { const { 1 + 7 } => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns 2 => {} _ => {} } match 5 { const { 1 } ..= 10 => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } match 5 { 1 ..= const { 10 } => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } match 5 { const { 1 } ..= const { 10 } => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns - //~| ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns + //~| ERROR const blocks cannot be used as patterns _ => {} } match 5 { const { 1 } .. 10 => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } match 5 { 1 .. const { 10 } => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } match 5 { const { 1 + 2 } ..= 10 => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } match 5 { 1 ..= const { 5 + 5 } => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } match 5 { const { 3 } .. => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } match 5 { ..= const { 7 } => {} - //~^ ERROR arbitrary expressions aren't allowed in patterns + //~^ ERROR const blocks cannot be used as patterns _ => {} } } diff --git a/tests/ui/inline-const/in-pat-recovery.stderr b/tests/ui/inline-const/in-pat-recovery.stderr index 376c43aaecca6..55adb5c49a6d1 100644 --- a/tests/ui/inline-const/in-pat-recovery.stderr +++ b/tests/ui/inline-const/in-pat-recovery.stderr @@ -1,88 +1,88 @@ -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:6:9 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:6:15 | LL | const { 1 + 7 } => {} - | ^^^^^^^^^^^^^^^ + | ^^^^^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:13:9 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:13:15 | LL | const { 1 } ..= 10 => {} - | ^^^^^^^^^^^ + | ^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:19:15 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:19:21 | LL | 1 ..= const { 10 } => {} - | ^^^^^^^^^^^^ + | ^^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:25:9 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:25:15 | LL | const { 1 } ..= const { 10 } => {} - | ^^^^^^^^^^^ + | ^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:25:25 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:25:31 | LL | const { 1 } ..= const { 10 } => {} - | ^^^^^^^^^^^^ + | ^^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:32:9 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:32:15 | LL | const { 1 } .. 10 => {} - | ^^^^^^^^^^^ + | ^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:38:14 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:38:20 | LL | 1 .. const { 10 } => {} - | ^^^^^^^^^^^^ + | ^^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:44:9 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:44:15 | LL | const { 1 + 2 } ..= 10 => {} - | ^^^^^^^^^^^^^^^ + | ^^^^^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:50:15 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:50:21 | LL | 1 ..= const { 5 + 5 } => {} - | ^^^^^^^^^^^^^^^ + | ^^^^^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:56:9 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:56:15 | LL | const { 3 } .. => {} - | ^^^^^^^^^^^ + | ^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead -error: arbitrary expressions aren't allowed in patterns - --> $DIR/in-pat-recovery.rs:62:13 +error: const blocks cannot be used as patterns + --> $DIR/in-pat-recovery.rs:62:19 | LL | ..= const { 7 } => {} - | ^^^^^^^^^^^ + | ^^^^^ | = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead diff --git a/tests/ui/inline-const/reject-const-block-pat-pre-expansion.rs b/tests/ui/inline-const/reject-const-block-pat-pre-expansion.rs new file mode 100644 index 0000000000000..9af4925c0432a --- /dev/null +++ b/tests/ui/inline-const/reject-const-block-pat-pre-expansion.rs @@ -0,0 +1,12 @@ +//! Regression test for : reject inline const +//! patterns pre-expansion when possible. + +macro_rules! analyze { ($p:pat) => {}; } +analyze!(const { 0 }); +//~^ ERROR: const blocks cannot be used as patterns + +#[cfg(false)] +fn scope() { let const { 0 }; } +//~^ ERROR: const blocks cannot be used as patterns + +fn main() {} diff --git a/tests/ui/inline-const/reject-const-block-pat-pre-expansion.stderr b/tests/ui/inline-const/reject-const-block-pat-pre-expansion.stderr new file mode 100644 index 0000000000000..034b97699396e --- /dev/null +++ b/tests/ui/inline-const/reject-const-block-pat-pre-expansion.stderr @@ -0,0 +1,18 @@ +error: const blocks cannot be used as patterns + --> $DIR/reject-const-block-pat-pre-expansion.rs:9:24 + | +LL | fn scope() { let const { 0 }; } + | ^^^^^ + | + = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead + +error: const blocks cannot be used as patterns + --> $DIR/reject-const-block-pat-pre-expansion.rs:5:16 + | +LL | analyze!(const { 0 }); + | ^^^^^ + | + = help: use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead + +error: aborting due to 2 previous errors + diff --git a/tests/ui/issues/issue-2904.rs b/tests/ui/issues/issue-2904.rs deleted file mode 100644 index 1ae3a8ad656ea..0000000000000 --- a/tests/ui/issues/issue-2904.rs +++ /dev/null @@ -1,79 +0,0 @@ -//@ build-pass -#![allow(unused_must_use)] -#![allow(dead_code)] -#![allow(unused_mut)] -#![allow(non_camel_case_types)] - -// Map representation - -use std::fmt; -use std::io::prelude::*; -use square::{bot, wall, rock, lambda, closed_lift, open_lift, earth, empty}; - -enum square { - bot, - wall, - rock, - lambda, - closed_lift, - open_lift, - earth, - empty -} - -impl fmt::Debug for square { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", match *self { - bot => { "R".to_string() } - wall => { "#".to_string() } - rock => { "*".to_string() } - lambda => { "\\".to_string() } - closed_lift => { "L".to_string() } - open_lift => { "O".to_string() } - earth => { ".".to_string() } - empty => { " ".to_string() } - }) - } -} - -fn square_from_char(c: char) -> square { - match c { - 'R' => { bot } - '#' => { wall } - '*' => { rock } - '\\' => { lambda } - 'L' => { closed_lift } - 'O' => { open_lift } - '.' => { earth } - ' ' => { empty } - _ => { - println!("invalid square: {}", c); - panic!() - } - } -} - -fn read_board_grid(mut input: rdr) - -> Vec> { - let mut input: &mut dyn Read = &mut input; - let mut grid = Vec::new(); - let mut line = [0; 10]; - input.read(&mut line); - let mut row = Vec::new(); - for c in &line { - row.push(square_from_char(*c as char)) - } - grid.push(row); - let width = grid[0].len(); - for row in &grid { assert_eq!(row.len(), width) } - grid -} - -mod test { - #[test] - pub fn trivial_to_string() { - assert_eq!(lambda.to_string(), "\\") - } -} - -pub fn main() {} diff --git a/tests/ui/issues/issue-3121.rs b/tests/ui/issues/issue-3121.rs deleted file mode 100644 index aa150f11cf400..0000000000000 --- a/tests/ui/issues/issue-3121.rs +++ /dev/null @@ -1,24 +0,0 @@ -//@ run-pass -#![allow(dead_code)] -#![allow(non_camel_case_types)] - -#[derive(Copy, Clone)] -enum side { mayo, catsup, vinegar } -#[derive(Copy, Clone)] -enum order { hamburger, fries(side), shake } -#[derive(Copy, Clone)] -enum meal { to_go(order), for_here(order) } - -fn foo(m: Box, cond: bool) { - match *m { - meal::to_go(_) => { } - meal::for_here(_) if cond => {} - meal::for_here(order::hamburger) => {} - meal::for_here(order::fries(_s)) => {} - meal::for_here(order::shake) => {} - } -} - -pub fn main() { - foo(Box::new(meal::for_here(order::hamburger)), true) -} diff --git a/tests/ui/macros/genercs-in-path-with-prettry-hir.stdout b/tests/ui/macros/genercs-in-path-with-prettry-hir.stdout index 6e41432ad7df1..f01807ec06175 100644 --- a/tests/ui/macros/genercs-in-path-with-prettry-hir.stdout +++ b/tests/ui/macros/genercs-in-path-with-prettry-hir.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir //@ edition: 2015 diff --git a/tests/ui/macros/issue-118786.fixed b/tests/ui/macros/issue-118786.fixed new file mode 100644 index 0000000000000..5d4006acd6c81 --- /dev/null +++ b/tests/ui/macros/issue-118786.fixed @@ -0,0 +1,20 @@ +#![allow(unused_macros)] +//@ compile-flags: --crate-type lib +//@ dont-require-annotations: NOTE +//@ run-rustfix + +// Regression test for issue 118786 + +macro_rules! make_macro { + ($macro_name:tt) => { + macro_rules! $macro_name { + //~^ ERROR macro expansion ignores `{` and any tokens following + //~| ERROR cannot find macro `macro_rules` in this scope + //~| NOTE put a macro name here + () => {} + } + } +} + +make_macro!(meow); +//~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon diff --git a/tests/ui/macros/issue-118786.rs b/tests/ui/macros/issue-118786.rs index 78fd6ab6edddf..b79a2c7eedd74 100644 --- a/tests/ui/macros/issue-118786.rs +++ b/tests/ui/macros/issue-118786.rs @@ -1,5 +1,7 @@ -//@ compile-flags: --crate-type lib -O -C debug-assertions=yes +#![allow(unused_macros)] +//@ compile-flags: --crate-type lib //@ dont-require-annotations: NOTE +//@ run-rustfix // Regression test for issue 118786 diff --git a/tests/ui/macros/issue-118786.stderr b/tests/ui/macros/issue-118786.stderr index ddec281b82325..02b26e5a1f31b 100644 --- a/tests/ui/macros/issue-118786.stderr +++ b/tests/ui/macros/issue-118786.stderr @@ -1,21 +1,17 @@ error: macros that expand to items must be delimited with braces or followed by a semicolon - --> $DIR/issue-118786.rs:17:13 + --> $DIR/issue-118786.rs:19:13 | LL | make_macro!((meow)); | ^^^^^^ | -help: change the delimiters to curly braces +help: to define a macro, remove the parentheses around the macro name | LL - make_macro!((meow)); -LL + make_macro!({meow}); +LL + make_macro!(meow); | -help: add a semicolon - | -LL | macro_rules! $macro_name; { - | + error: macro expansion ignores `{` and any tokens following - --> $DIR/issue-118786.rs:8:34 + --> $DIR/issue-118786.rs:10:34 | LL | macro_rules! $macro_name { | ^ @@ -26,7 +22,7 @@ LL | make_macro!((meow)); = note: the usage of `make_macro!` is likely invalid in item context error: cannot find macro `macro_rules` in this scope - --> $DIR/issue-118786.rs:8:9 + --> $DIR/issue-118786.rs:10:9 | LL | macro_rules! $macro_name { | ^^^^^^^^^^^ @@ -35,7 +31,7 @@ LL | make_macro!((meow)); | ------------------- in this macro invocation | note: maybe you have forgotten to define a name for this `macro_rules!` - --> $DIR/issue-118786.rs:8:20 + --> $DIR/issue-118786.rs:10:20 | LL | macro_rules! $macro_name { | ^ put a macro name here diff --git a/tests/ui/match/issue-82392.stdout b/tests/ui/match/issue-82392.stdout index d44ffbe216716..297a6f4827909 100644 --- a/tests/ui/match/issue-82392.stdout +++ b/tests/ui/match/issue-82392.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; // https://github.com/rust-lang/rust/issues/82329 //@ compile-flags: -Zunpretty=hir,typed diff --git a/tests/ui/match/match-nested-enum-box-3121.rs b/tests/ui/match/match-nested-enum-box-3121.rs new file mode 100644 index 0000000000000..f2ab4bf080750 --- /dev/null +++ b/tests/ui/match/match-nested-enum-box-3121.rs @@ -0,0 +1,37 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/3121 + +//@ run-pass +#![allow(dead_code)] +#![allow(non_camel_case_types)] + +#[derive(Copy, Clone)] +enum side { + mayo, + catsup, + vinegar, +} +#[derive(Copy, Clone)] +enum order { + hamburger, + fries(side), + shake, +} +#[derive(Copy, Clone)] +enum meal { + to_go(order), + for_here(order), +} + +fn foo(m: Box, cond: bool) { + match *m { + meal::to_go(_) => {} + meal::for_here(_) if cond => {} + meal::for_here(order::hamburger) => {} + meal::for_here(order::fries(_s)) => {} + meal::for_here(order::shake) => {} + } +} + +pub fn main() { + foo(Box::new(meal::for_here(order::hamburger)), true) +} diff --git a/tests/ui/issues/issue-3029.rs b/tests/ui/panics/vec-extend-after-panic-3029.rs similarity index 74% rename from tests/ui/issues/issue-3029.rs rename to tests/ui/panics/vec-extend-after-panic-3029.rs index 22d0906ccf701..3ae708d91e197 100644 --- a/tests/ui/issues/issue-3029.rs +++ b/tests/ui/panics/vec-extend-after-panic-3029.rs @@ -1,3 +1,5 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/3029 + //@ run-fail //@ error-pattern:so long //@ needs-subprocess diff --git a/tests/ui/parser/macro-rules-paren-name-issue-150899.rs b/tests/ui/parser/macro-rules-paren-name-issue-150899.rs new file mode 100644 index 0000000000000..174a6e7e7de8f --- /dev/null +++ b/tests/ui/parser/macro-rules-paren-name-issue-150899.rs @@ -0,0 +1,7 @@ +macro_rules!(i_think_the_name_should_go_here) { + //~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon + //~| ERROR expected item, found `{` + () => {} +} + +fn main() {} diff --git a/tests/ui/parser/macro-rules-paren-name-issue-150899.stderr b/tests/ui/parser/macro-rules-paren-name-issue-150899.stderr new file mode 100644 index 0000000000000..f5b6ff40f27ea --- /dev/null +++ b/tests/ui/parser/macro-rules-paren-name-issue-150899.stderr @@ -0,0 +1,22 @@ +error: macros that expand to items must be delimited with braces or followed by a semicolon + --> $DIR/macro-rules-paren-name-issue-150899.rs:1:13 + | +LL | macro_rules!(i_think_the_name_should_go_here) { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: to define a macro, remove the parentheses around the macro name + | +LL - macro_rules!(i_think_the_name_should_go_here) { +LL + macro_rules! i_think_the_name_should_go_here { + | + +error: expected item, found `{` + --> $DIR/macro-rules-paren-name-issue-150899.rs:1:47 + | +LL | macro_rules!(i_think_the_name_should_go_here) { + | ^ expected item + | + = note: for a full list of items that can appear in modules, see + +error: aborting due to 2 previous errors + diff --git a/tests/ui/resolve/enum-variant-import-2904.rs b/tests/ui/resolve/enum-variant-import-2904.rs new file mode 100644 index 0000000000000..3272ee5fb5005 --- /dev/null +++ b/tests/ui/resolve/enum-variant-import-2904.rs @@ -0,0 +1,101 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/2904 + +//@ build-pass +#![allow(unused_must_use)] +#![allow(dead_code)] +#![allow(unused_mut)] + +// Map representation + +use Square::{Bot, ClosedLift, Earth, Empty, Lambda, OpenLift, Rock, Wall}; +use std::fmt; +use std::io::prelude::*; + +enum Square { + Bot, + Wall, + Rock, + Lambda, + ClosedLift, + OpenLift, + Earth, + Empty, +} + +impl fmt::Debug for Square { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "{}", + match *self { + Bot => { + "R".to_string() + } + Wall => { + "#".to_string() + } + Rock => { + "*".to_string() + } + Lambda => { + "\\".to_string() + } + ClosedLift => { + "L".to_string() + } + OpenLift => { + "O".to_string() + } + Earth => { + ".".to_string() + } + Empty => { + " ".to_string() + } + } + ) + } +} + +fn square_from_char(c: char) -> Square { + match c { + 'R' => Bot, + '#' => Wall, + '*' => Rock, + '\\' => Lambda, + 'L' => ClosedLift, + 'O' => OpenLift, + '.' => Earth, + ' ' => Empty, + _ => { + println!("invalid Square: {}", c); + panic!() + } + } +} + +fn read_board_grid(mut input: Rdr) -> Vec> { + let mut input: &mut dyn Read = &mut input; + let mut grid = Vec::new(); + let mut line = [0; 10]; + input.read(&mut line); + let mut row = Vec::new(); + for c in &line { + row.push(square_from_char(*c as char)) + } + grid.push(row); + let width = grid[0].len(); + for row in &grid { + assert_eq!(row.len(), width) + } + grid +} + +mod test { + #[test] + pub fn trivial_to_string() { + assert_eq!(Lambda.to_string(), "\\") + } +} + +pub fn main() {} diff --git a/tests/ui/issues/issue-2708.rs b/tests/ui/resolve/struct-function-same-name-2708.rs similarity index 69% rename from tests/ui/issues/issue-2708.rs rename to tests/ui/resolve/struct-function-same-name-2708.rs index 09d19f87aa647..729a5819ae4e5 100644 --- a/tests/ui/issues/issue-2708.rs +++ b/tests/ui/resolve/struct-function-same-name-2708.rs @@ -1,15 +1,13 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/2708 + //@ run-pass #![allow(dead_code)] #![allow(non_snake_case)] - - - struct Font { fontbuf: usize, cairo_font: usize, font_dtor: usize, - } impl Drop for Font { @@ -17,11 +15,7 @@ impl Drop for Font { } fn Font() -> Font { - Font { - fontbuf: 0, - cairo_font: 0, - font_dtor: 0 - } + Font { fontbuf: 0, cairo_font: 0, font_dtor: 0 } } pub fn main() { diff --git a/tests/ui/issues/issue-2895.rs b/tests/ui/structs/struct-size-with-drop-2895.rs similarity index 84% rename from tests/ui/issues/issue-2895.rs rename to tests/ui/structs/struct-size-with-drop-2895.rs index 6301a86375344..9540d340ff68b 100644 --- a/tests/ui/issues/issue-2895.rs +++ b/tests/ui/structs/struct-size-with-drop-2895.rs @@ -1,10 +1,12 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/2895 + //@ run-pass #![allow(dead_code)] use std::mem; struct Cat { - x: isize + x: isize, } struct Kitty { diff --git a/tests/ui/traits/next-solver/coercion/unfulfilled-unsize-coercion-recursion-limit.rs b/tests/ui/traits/next-solver/coercion/unfulfilled-unsize-coercion-recursion-limit.rs new file mode 100644 index 0000000000000..24b32db3060d3 --- /dev/null +++ b/tests/ui/traits/next-solver/coercion/unfulfilled-unsize-coercion-recursion-limit.rs @@ -0,0 +1,35 @@ +//@ check-pass +//@ compile-flags: -Znext-solver + +// A regression test for https://github.com/rust-lang/trait-system-refactor-initiative/issues/266. +// Ensure that we do not accidentaly trying unfulfilled unsized coercions due to hitting recursion +// limits while trying to find the right fulfillment error source. + +fn argument_coercion(_: &U) {} + +pub fn test() { + argument_coercion(&{ + Nested(0.0, 0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + .add(0.0) + }); +} + +struct Nested(T, R); + +impl Nested { + fn add(self, value: U) -> Nested> { + Nested(value, self) + } +} + +fn main() {} diff --git a/tests/ui/traits/next-solver/coercion/unsize-coercion-recursion-limit.rs b/tests/ui/traits/next-solver/coercion/unsize-coercion-recursion-limit.rs new file mode 100644 index 0000000000000..42802e85cda10 --- /dev/null +++ b/tests/ui/traits/next-solver/coercion/unsize-coercion-recursion-limit.rs @@ -0,0 +1,25 @@ +//@ check-pass +//@ compile-flags: -Znext-solver + +// A test to ensure that unsized coercion is not aborted when visiting a nested goal that +// exceeds the recursion limit and evaluates to `Certainty::Maybe`. +// See https://github.com/rust-lang/rust/pull/152444. + +#![allow(warnings)] + +struct W(T); +type Four = W>>>; +type Sixteen = Four>>>; + +fn ret(x: T) -> Sixteen { + todo!(); +} + +fn please_coerce() { + let mut y = Default::default(); + let x = ret(y); + let _: &Sixteen = &x; + y = 1u32; +} + +fn main() {} diff --git a/tests/ui/issues/issue-2935.rs b/tests/ui/traits/trait-object-method-call-2935.rs similarity index 60% rename from tests/ui/issues/issue-2935.rs rename to tests/ui/traits/trait-object-method-call-2935.rs index bcc25f6187b5d..ea24aae89462c 100644 --- a/tests/ui/issues/issue-2935.rs +++ b/tests/ui/traits/trait-object-method-call-2935.rs @@ -1,3 +1,5 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/2935 + //@ run-pass #![allow(dead_code)] #![allow(non_camel_case_types)] @@ -11,14 +13,14 @@ trait it { } impl it for t { - fn f(&self) { } + fn f(&self) {} } pub fn main() { - // let x = ({a: 4} as it); - // let y = box ({a: 4}); - // let z = box ({a: 4} as it); - // let z = box ({a: true} as it); + // let x = ({a: 4} as it); + // let y = box ({a: 4}); + // let z = box ({a: 4} as it); + // let z = box ({a: true} as it); let z: Box<_> = Box::new(Box::new(true) as Box); // x.f(); // y.f(); diff --git a/tests/ui/issues/issue-3052.rs b/tests/ui/traits/trait-object-type-alias-3052.rs similarity index 69% rename from tests/ui/issues/issue-3052.rs rename to tests/ui/traits/trait-object-type-alias-3052.rs index ab3519fe7147b..e601c76713dcf 100644 --- a/tests/ui/issues/issue-3052.rs +++ b/tests/ui/traits/trait-object-type-alias-3052.rs @@ -1,3 +1,5 @@ +//! Regression test for https://github.com/rust-lang/rust/issues/3052 + //@ run-pass #![allow(dead_code)] @@ -8,5 +10,4 @@ fn f() -> Option { Some(mock_connection) } -pub fn main() { -} +pub fn main() {} diff --git a/tests/ui/try-block/try-block-homogeneous-pre-expansion.rs b/tests/ui/try-block/try-block-homogeneous-pre-expansion.rs new file mode 100644 index 0000000000000..980f97ca0672e --- /dev/null +++ b/tests/ui/try-block/try-block-homogeneous-pre-expansion.rs @@ -0,0 +1,12 @@ +//@ check-pass +//@ edition: 2018 + +// For historical reasons this is only a warning, not an error. +// See + +fn main() { + #[cfg(false)] + try {} + //~^ warn `try` blocks are unstable + //~| warn unstable syntax can change at any point +} diff --git a/tests/ui/try-block/try-block-homogeneous-pre-expansion.stderr b/tests/ui/try-block/try-block-homogeneous-pre-expansion.stderr new file mode 100644 index 0000000000000..dc92d7e64aff3 --- /dev/null +++ b/tests/ui/try-block/try-block-homogeneous-pre-expansion.stderr @@ -0,0 +1,14 @@ +warning: `try` blocks are unstable + --> $DIR/try-block-homogeneous-pre-expansion.rs:9:5 + | +LL | try {} + | ^^^^^^ + | + = note: see issue #31436 for more information + = help: add `#![feature(try_blocks)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + = warning: unstable syntax can change at any point in the future, causing a hard error! + = note: for more information, see issue #65860 + +warning: 1 warning emitted + diff --git a/tests/ui/type-alias-impl-trait/issue-60662.stdout b/tests/ui/type-alias-impl-trait/issue-60662.stdout index d1f337819f8b0..dff748b43119d 100644 --- a/tests/ui/type-alias-impl-trait/issue-60662.stdout +++ b/tests/ui/type-alias-impl-trait/issue-60662.stdout @@ -4,7 +4,7 @@ #![feature(type_alias_impl_trait)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; trait Animal { } diff --git a/tests/ui/unpretty/bad-literal.stdout b/tests/ui/unpretty/bad-literal.stdout index 267d59a868e41..711f3a9bdf875 100644 --- a/tests/ui/unpretty/bad-literal.stdout +++ b/tests/ui/unpretty/bad-literal.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir //@ check-fail diff --git a/tests/ui/unpretty/debug-fmt-hir.stdout b/tests/ui/unpretty/debug-fmt-hir.stdout index 342dc144909ce..1f0a6e2e334fd 100644 --- a/tests/ui/unpretty/debug-fmt-hir.stdout +++ b/tests/ui/unpretty/debug-fmt-hir.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir //@ check-pass diff --git a/tests/ui/unpretty/deprecated-attr.stdout b/tests/ui/unpretty/deprecated-attr.stdout index 32aac13586d5a..32d5cf06a3d67 100644 --- a/tests/ui/unpretty/deprecated-attr.stdout +++ b/tests/ui/unpretty/deprecated-attr.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir //@ check-pass diff --git a/tests/ui/unpretty/diagnostic-attr.stdout b/tests/ui/unpretty/diagnostic-attr.stdout index 25349681b02a3..0b4b5f9193435 100644 --- a/tests/ui/unpretty/diagnostic-attr.stdout +++ b/tests/ui/unpretty/diagnostic-attr.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir //@ check-pass diff --git a/tests/ui/unpretty/exhaustive-asm.hir.stdout b/tests/ui/unpretty/exhaustive-asm.hir.stdout index ed98191e1dd56..c44db08653967 100644 --- a/tests/ui/unpretty/exhaustive-asm.hir.stdout +++ b/tests/ui/unpretty/exhaustive-asm.hir.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use std::prelude::rust_2024::*; //@ revisions: expanded hir //@[expanded]compile-flags: -Zunpretty=expanded diff --git a/tests/ui/unpretty/exhaustive.hir.stdout b/tests/ui/unpretty/exhaustive.hir.stdout index f309aa0b5fb67..7ee848491d6e5 100644 --- a/tests/ui/unpretty/exhaustive.hir.stdout +++ b/tests/ui/unpretty/exhaustive.hir.stdout @@ -31,7 +31,7 @@ #![feature(yeet_expr)] #![allow(incomplete_features)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use std::prelude::rust_2024::*; mod prelude { @@ -46,7 +46,7 @@ mod prelude { } } -#[prelude_import] +#[attr = PreludeImport] use self::prelude::*; /// inner single-line doc comment diff --git a/tests/ui/unpretty/flattened-format-args.stdout b/tests/ui/unpretty/flattened-format-args.stdout index 156dcd68a674e..008d69c9b9ef7 100644 --- a/tests/ui/unpretty/flattened-format-args.stdout +++ b/tests/ui/unpretty/flattened-format-args.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir -Zflatten-format-args=yes //@ check-pass diff --git a/tests/ui/unpretty/let-else-hir.stdout b/tests/ui/unpretty/let-else-hir.stdout index cc19f392c3a43..73d627ef997a1 100644 --- a/tests/ui/unpretty/let-else-hir.stdout +++ b/tests/ui/unpretty/let-else-hir.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir //@ check-pass diff --git a/tests/ui/unpretty/self-hir.stdout b/tests/ui/unpretty/self-hir.stdout index c973e143275cd..b14c583f4f387 100644 --- a/tests/ui/unpretty/self-hir.stdout +++ b/tests/ui/unpretty/self-hir.stdout @@ -1,5 +1,5 @@ extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; //@ compile-flags: -Zunpretty=hir //@ check-pass diff --git a/tests/ui/unpretty/struct-exprs-tuple-call-pretty-printing.stdout b/tests/ui/unpretty/struct-exprs-tuple-call-pretty-printing.stdout index 8b6ca4f672dc4..c990837d2138a 100644 --- a/tests/ui/unpretty/struct-exprs-tuple-call-pretty-printing.stdout +++ b/tests/ui/unpretty/struct-exprs-tuple-call-pretty-printing.stdout @@ -5,7 +5,7 @@ #![expect(incomplete_features)] #![allow(dead_code)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; use std::marker::ConstParamTy; diff --git a/tests/ui/unpretty/unpretty-expr-fn-arg.stdout b/tests/ui/unpretty/unpretty-expr-fn-arg.stdout index 41d62d11aaa61..19bfe92e3b27f 100644 --- a/tests/ui/unpretty/unpretty-expr-fn-arg.stdout +++ b/tests/ui/unpretty/unpretty-expr-fn-arg.stdout @@ -9,7 +9,7 @@ //@ edition: 2015 #![allow(dead_code)] extern crate std; -#[prelude_import] +#[attr = PreludeImport] use ::std::prelude::rust_2015::*; fn main() ({ } as ())