From 3c9b07643a11fe4049826a45382b08a6eaf722fd Mon Sep 17 00:00:00 2001 From: Pietro Albini Date: Thu, 25 May 2023 09:39:19 +0200 Subject: [PATCH 01/14] include test suite metadata in build metrics --- src/bootstrap/metrics.rs | 77 ++++++++++++++++++++++++++++------------ src/bootstrap/test.rs | 49 +++++++++++++++++++++++++ 2 files changed, 104 insertions(+), 22 deletions(-) diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs index e19d56ccd6adc..b119cf2bc953a 100644 --- a/src/bootstrap/metrics.rs +++ b/src/bootstrap/metrics.rs @@ -57,7 +57,7 @@ impl BuildMetrics { duration_excluding_children_sec: Duration::ZERO, children: Vec::new(), - tests: Vec::new(), + test_suites: Vec::new(), }); } @@ -84,6 +84,17 @@ impl BuildMetrics { } } + pub(crate) fn begin_test_suite(&self, metadata: TestSuiteMetadata, builder: &Builder<'_>) { + // Do not record dry runs, as they'd be duplicates of the actual steps. + if builder.config.dry_run() { + return; + } + + let mut state = self.state.borrow_mut(); + let step = state.running_steps.last_mut().unwrap(); + step.test_suites.push(TestSuite { metadata, tests: Vec::new() }); + } + pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome, builder: &Builder<'_>) { // Do not record dry runs, as they'd be duplicates of the actual steps. if builder.config.dry_run() { @@ -91,12 +102,15 @@ impl BuildMetrics { } let mut state = self.state.borrow_mut(); - state - .running_steps - .last_mut() - .unwrap() - .tests - .push(Test { name: name.to_string(), outcome }); + let step = state.running_steps.last_mut().unwrap(); + + if let Some(test_suite) = step.test_suites.last_mut() { + test_suite.tests.push(Test { name: name.to_string(), outcome }); + } else { + panic!( + "metrics.record_test() called without calling metrics.record_test_suite() first" + ); + } } fn collect_stats(&self, state: &mut MetricsState) { @@ -159,11 +173,7 @@ impl BuildMetrics { fn prepare_json_step(&self, step: StepMetrics) -> JsonNode { let mut children = Vec::new(); children.extend(step.children.into_iter().map(|child| self.prepare_json_step(child))); - children.extend( - step.tests - .into_iter() - .map(|test| JsonNode::Test { name: test.name, outcome: test.outcome }), - ); + children.extend(step.test_suites.into_iter().map(|suite| JsonNode::TestSuite(suite))); JsonNode::RustbuildStep { type_: step.type_, @@ -198,12 +208,7 @@ struct StepMetrics { duration_excluding_children_sec: Duration, children: Vec, - tests: Vec, -} - -struct Test { - name: String, - outcome: TestOutcome, + test_suites: Vec, } #[derive(Serialize, Deserialize)] @@ -237,13 +242,41 @@ enum JsonNode { children: Vec, }, - Test { - name: String, - #[serde(flatten)] - outcome: TestOutcome, + TestSuite(TestSuite), +} + +#[derive(Serialize, Deserialize)] +struct TestSuite { + metadata: TestSuiteMetadata, + tests: Vec, +} + +#[derive(Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub(crate) enum TestSuiteMetadata { + Crate { + crates: Vec, + target: String, + host: String, + stage: u32, + }, + Compiletest { + suite: String, + mode: String, + compare_mode: Option, + target: String, + host: String, + stage: u32, }, } +#[derive(Serialize, Deserialize)] +pub(crate) struct Test { + name: String, + #[serde(flatten)] + outcome: TestOutcome, +} + #[derive(Serialize, Deserialize)] #[serde(tag = "outcome", rename_all = "snake_case")] pub(crate) enum TestOutcome { diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 2b72d6c48eb75..f64b5f965237e 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -317,6 +317,17 @@ impl Step for Cargo { cargo.env("CARGO_TEST_DISABLE_NIGHTLY", "1"); cargo.env("PATH", &path_for_cargo(builder, compiler)); + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + crate::metrics::TestSuiteMetadata::Crate { + crates: vec!["cargo".into()], + target: self.host.triple.to_string(), + host: self.host.triple.to_string(), + stage: self.stage, + }, + builder, + ); + let _time = util::timeit(&builder); add_flags_and_try_run_tests(builder, &mut cargo); } @@ -1759,6 +1770,19 @@ note: if you're sure you want to do this, please open an issue as to why. In the builder.ci_env.force_coloring_in_ci(&mut cmd); + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + crate::metrics::TestSuiteMetadata::Compiletest { + suite: suite.into(), + mode: mode.into(), + compare_mode: None, + target: self.target.triple.to_string(), + host: self.compiler.host.triple.to_string(), + stage: self.compiler.stage, + }, + builder, + ); + builder.info(&format!( "Check compiletest suite={} mode={} ({} -> {})", suite, mode, &compiler.host, target @@ -1768,6 +1792,20 @@ note: if you're sure you want to do this, please open an issue as to why. In the if let Some(compare_mode) = compare_mode { cmd.arg("--compare-mode").arg(compare_mode); + + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + crate::metrics::TestSuiteMetadata::Compiletest { + suite: suite.into(), + mode: mode.into(), + compare_mode: Some(compare_mode.into()), + target: self.target.triple.to_string(), + host: self.compiler.host.triple.to_string(), + stage: self.compiler.stage, + }, + builder, + ); + builder.info(&format!( "Check compiletest suite={} mode={} compare_mode={} ({} -> {})", suite, mode, compare_mode, &compiler.host, target @@ -2094,6 +2132,17 @@ fn run_cargo_test( let mut cargo = prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder); let _time = util::timeit(&builder); + + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + crate::metrics::TestSuiteMetadata::Crate { + crates: crates.iter().map(|c| c.to_string()).collect(), + target: target.triple.to_string(), + host: compiler.host.triple.to_string(), + stage: compiler.stage, + }, + builder, + ); add_flags_and_try_run_tests(builder, &mut cargo) } From 0553f71b1b27bd75102284711c84aede3001943b Mon Sep 17 00:00:00 2001 From: Pietro Albini Date: Thu, 25 May 2023 09:49:01 +0200 Subject: [PATCH 02/14] introduce build metrics version numbers to handle breaking changes --- src/bootstrap/metrics.rs | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs index b119cf2bc953a..9f68eea9a5ebe 100644 --- a/src/bootstrap/metrics.rs +++ b/src/bootstrap/metrics.rs @@ -14,6 +14,13 @@ use std::io::BufWriter; use std::time::{Duration, Instant, SystemTime}; use sysinfo::{CpuExt, System, SystemExt}; +// Update this number whenever a breaking change is made to the build metrics. +// +// Versions: +// 0: initial version +// 1: replaced JsonNode::Test with JsonNode::TestSuite +const CURRENT_METADATA_VERSION: usize = 1; + pub(crate) struct BuildMetrics { state: RefCell, } @@ -145,7 +152,20 @@ impl BuildMetrics { // Some of our CI builds consist of multiple independent CI invocations. Ensure all the // previous invocations are still present in the resulting file. let mut invocations = match std::fs::read(&dest) { - Ok(contents) => t!(serde_json::from_slice::(&contents)).invocations, + Ok(contents) => { + // We first parse just the metadata_version field to have the check succeed even if + // the rest of the contents are not valid anymore. + let version: OnlyMetadataVersion = t!(serde_json::from_slice(&contents)); + if version.metadata_version == CURRENT_METADATA_VERSION { + t!(serde_json::from_slice::(&contents)).invocations + } else { + println!( + "warning: overriding existing build/metrics.json, as it's not \ + compatible with build metrics format version {CURRENT_METADATA_VERSION}." + ); + Vec::new() + } + } Err(err) => { if err.kind() != std::io::ErrorKind::NotFound { panic!("failed to open existing metrics file at {}: {err}", dest.display()); @@ -163,7 +183,8 @@ impl BuildMetrics { children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(), }); - let json = JsonRoot { system_stats, invocations }; + let json = + JsonRoot { metadata_version: CURRENT_METADATA_VERSION, system_stats, invocations }; t!(std::fs::create_dir_all(dest.parent().unwrap())); let mut file = BufWriter::new(t!(File::create(&dest))); @@ -214,6 +235,8 @@ struct StepMetrics { #[derive(Serialize, Deserialize)] #[serde(rename_all = "snake_case")] struct JsonRoot { + #[serde(default)] // For version 0 the field was not present. + metadata_version: usize, system_stats: JsonInvocationSystemStats, invocations: Vec, } @@ -299,3 +322,9 @@ struct JsonInvocationSystemStats { struct JsonStepSystemStats { cpu_utilization_percent: f64, } + +#[derive(Deserialize)] +struct OnlyMetadataVersion { + #[serde(default)] // For version 0 the field was not present. + metadata_version: usize, +} From 844c1cc5fec38f691a2ffb53ef3366f25cf7b02b Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Thu, 25 May 2023 17:30:23 +0000 Subject: [PATCH 03/14] Remove DesugaringKind::Replace. --- .../src/diagnostics/conflict_errors.rs | 28 ------------------- .../src/diagnostics/mutability_errors.rs | 7 +---- compiler/rustc_borrowck/src/invalidation.rs | 6 ++-- compiler/rustc_borrowck/src/lib.rs | 19 +++++++++++-- compiler/rustc_codegen_cranelift/src/base.rs | 2 +- compiler/rustc_codegen_ssa/src/mir/block.rs | 2 +- .../src/interpret/terminator.rs | 2 +- compiler/rustc_middle/src/mir/syntax.rs | 6 +++- compiler/rustc_middle/src/mir/visit.rs | 1 + .../src/build/custom/parse/instruction.rs | 1 + .../src/build/expr/as_rvalue.rs | 1 + compiler/rustc_mir_build/src/build/scope.rs | 8 +++--- .../rustc_mir_dataflow/src/elaborate_drops.rs | 10 +++++-- .../src/framework/direction.rs | 2 +- .../src/add_moves_for_packed_drops.rs | 9 ++++-- .../src/elaborate_drops.rs | 9 ++---- compiler/rustc_mir_transform/src/generator.rs | 6 +++- compiler/rustc_mir_transform/src/inline.rs | 6 ++-- compiler/rustc_mir_transform/src/shim.rs | 3 ++ compiler/rustc_smir/src/rustc_smir/mod.rs | 2 +- compiler/rustc_span/src/hygiene.rs | 2 -- .../borrowck/borrowck-vec-pattern-nesting.rs | 2 -- .../borrowck-vec-pattern-nesting.stderr | 18 ++++++------ tests/ui/borrowck/issue-45199.rs | 3 -- tests/ui/borrowck/issue-45199.stderr | 4 +-- .../liveness-assign-imm-local-with-drop.rs | 1 - 26 files changed, 78 insertions(+), 82 deletions(-) diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index 04b8174079acc..15d73ed732f50 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -1635,34 +1635,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { }) } - /// Reports StorageDeadOrDrop of `place` conflicts with `borrow`. - /// - /// Depending on the origin of the StorageDeadOrDrop, this may be - /// reported as either a drop or an illegal mutation of a borrowed value. - /// The latter is preferred when the this is a drop triggered by a - /// reassignment, as it's more user friendly to report a problem with the - /// explicit assignment than the implicit drop. - #[instrument(level = "debug", skip(self))] - pub(crate) fn report_storage_dead_or_drop_of_borrowed( - &mut self, - location: Location, - place_span: (Place<'tcx>, Span), - borrow: &BorrowData<'tcx>, - ) { - // It's sufficient to check the last desugaring as Replace is the last - // one to be applied. - if let Some(DesugaringKind::Replace) = place_span.1.desugaring_kind() { - self.report_illegal_mutation_of_borrowed(location, place_span, borrow) - } else { - self.report_borrowed_value_does_not_live_long_enough( - location, - borrow, - place_span, - Some(WriteKind::StorageDeadOrDrop), - ) - } - } - /// This means that some data referenced by `borrow` needs to live /// past the point where the StorageDeadOrDrop of `place` occurs. /// This is usually interpreted as meaning that `place` has too diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs index 4bde372c847dd..d0e17bf5a0848 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs @@ -641,13 +641,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { let Some(hir::Node::Item(item)) = node else { return; }; let hir::ItemKind::Fn(.., body_id) = item.kind else { return; }; let body = self.infcx.tcx.hir().body(body_id); - let mut assign_span = span; - // Drop desugaring is done at MIR build so it's not in the HIR - if let Some(DesugaringKind::Replace) = span.desugaring_kind() { - assign_span.remove_mark(); - } - let mut v = V { assign_span, err, ty, suggested: false }; + let mut v = V { assign_span: span, err, ty, suggested: false }; v.visit_body(body); if !v.suggested { err.help(format!( diff --git a/compiler/rustc_borrowck/src/invalidation.rs b/compiler/rustc_borrowck/src/invalidation.rs index 036391d074da8..b2ff25ecb96f4 100644 --- a/compiler/rustc_borrowck/src/invalidation.rs +++ b/compiler/rustc_borrowck/src/invalidation.rs @@ -112,11 +112,13 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> { TerminatorKind::SwitchInt { discr, targets: _ } => { self.consume_operand(location, discr); } - TerminatorKind::Drop { place: drop_place, target: _, unwind: _ } => { + TerminatorKind::Drop { place: drop_place, target: _, unwind: _, replace } => { + let write_kind = + if *replace { WriteKind::Replace } else { WriteKind::StorageDeadOrDrop }; self.access_place( location, *drop_place, - (AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)), + (AccessDepth::Drop, Write(write_kind)), LocalMutationIsAllowed::Yes, ); } diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 9277a262f9789..a53ea100c2242 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -685,17 +685,19 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx TerminatorKind::SwitchInt { discr, targets: _ } => { self.consume_operand(loc, (discr, span), flow_state); } - TerminatorKind::Drop { place, target: _, unwind: _ } => { + TerminatorKind::Drop { place, target: _, unwind: _, replace } => { debug!( "visit_terminator_drop \ loc: {:?} term: {:?} place: {:?} span: {:?}", loc, term, place, span ); + let write_kind = + if *replace { WriteKind::Replace } else { WriteKind::StorageDeadOrDrop }; self.access_place( loc, (*place, span), - (AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)), + (AccessDepth::Drop, Write(write_kind)), LocalMutationIsAllowed::Yes, flow_state, ); @@ -885,6 +887,7 @@ enum ReadKind { #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum WriteKind { StorageDeadOrDrop, + Replace, MutableBorrow(BorrowKind), Mutate, Move, @@ -1132,13 +1135,21 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { this.buffer_error(err); } WriteKind::StorageDeadOrDrop => this - .report_storage_dead_or_drop_of_borrowed(location, place_span, borrow), + .report_borrowed_value_does_not_live_long_enough( + location, + borrow, + place_span, + Some(WriteKind::StorageDeadOrDrop), + ), WriteKind::Mutate => { this.report_illegal_mutation_of_borrowed(location, place_span, borrow) } WriteKind::Move => { this.report_move_out_while_borrowed(location, place_span, borrow) } + WriteKind::Replace => { + this.report_illegal_mutation_of_borrowed(location, place_span, borrow) + } } Control::Break } @@ -1982,12 +1993,14 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { Reservation( WriteKind::Move + | WriteKind::Replace | WriteKind::StorageDeadOrDrop | WriteKind::MutableBorrow(BorrowKind::Shared) | WriteKind::MutableBorrow(BorrowKind::Shallow), ) | Write( WriteKind::Move + | WriteKind::Replace | WriteKind::StorageDeadOrDrop | WriteKind::MutableBorrow(BorrowKind::Shared) | WriteKind::MutableBorrow(BorrowKind::Shallow), diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index 9c6a0fae327cf..fcfa0b862d4b5 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -473,7 +473,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { | TerminatorKind::GeneratorDrop => { bug!("shouldn't exist at codegen {:?}", bb_data.terminator()); } - TerminatorKind::Drop { place, target, unwind: _ } => { + TerminatorKind::Drop { place, target, unwind: _, replace: _ } => { let drop_place = codegen_place(fx, *place); crate::abi::codegen_drop(fx, source_info, drop_place); diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 1f5f5b69d4dc8..4084342370152 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -1256,7 +1256,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { MergingSucc::False } - mir::TerminatorKind::Drop { place, target, unwind } => { + mir::TerminatorKind::Drop { place, target, unwind, replace: _ } => { self.codegen_drop_terminator(helper, bx, place, target, unwind, mergeable_succ()) } diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs index df3879200101b..586e8f063eeef 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/terminator.rs @@ -114,7 +114,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } } - Drop { place, target, unwind } => { + Drop { place, target, unwind, replace: _ } => { let frame = self.frame(); let ty = place.ty(&frame.body.local_decls, *self.tcx).ty; let ty = self.subst_from_frame_and_normalize_erasing_regions(frame, ty)?; diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 21faf1958e911..6d6d71bc87b14 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -603,7 +603,11 @@ pub enum TerminatorKind<'tcx> { /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to /// > the place or one of its "parents" occurred more recently than a move out of it. This does not /// > consider indirect assignments. - Drop { place: Place<'tcx>, target: BasicBlock, unwind: UnwindAction }, + /// + /// The `replace` flag indicates whether this terminator was created as part of an assignment. + /// This should only be used for diagnostic purposes, and does not have any operational + /// meaning. + Drop { place: Place<'tcx>, target: BasicBlock, unwind: UnwindAction, replace: bool }, /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of /// the referred to function. The operand types must match the argument types of the function. diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 596dd80bf4874..942654b30749c 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -504,6 +504,7 @@ macro_rules! make_mir_visitor { place, target: _, unwind: _, + replace: _, } => { self.visit_place( place, diff --git a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs index b74422708ce5c..ebf830cb9c1f6 100644 --- a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs @@ -57,6 +57,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> { place: self.parse_place(args[0])?, target: self.parse_block(args[1])?, unwind: UnwindAction::Continue, + replace: false, }) }, @call("mir_call", args) => { diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs index bcab4c0d24b5f..3742d640e3b58 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs @@ -725,6 +725,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { place: to_drop, target: success, unwind: UnwindAction::Continue, + replace: false, }, ); this.diverge_from(block); diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs index 7331f8ecaa965..7c0fbc6f81c94 100644 --- a/compiler/rustc_mir_build/src/build/scope.rs +++ b/compiler/rustc_mir_build/src/build/scope.rs @@ -91,7 +91,7 @@ use rustc_middle::middle::region; use rustc_middle::mir::*; use rustc_middle::thir::{Expr, LintLevel}; -use rustc_span::{DesugaringKind, Span, DUMMY_SP}; +use rustc_span::{Span, DUMMY_SP}; #[derive(Debug)] pub struct Scopes<'tcx> { @@ -371,6 +371,7 @@ impl DropTree { // The caller will handle this if needed. unwind: UnwindAction::Terminate, place: drop_data.0.local.into(), + replace: false, }; cfg.terminate(block, drop_data.0.source_info, terminator); } @@ -1128,9 +1129,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { place: Place<'tcx>, value: Rvalue<'tcx>, ) -> BlockAnd<()> { - let span = self.tcx.with_stable_hashing_context(|hcx| { - span.mark_with_reason(None, DesugaringKind::Replace, self.tcx.sess.edition(), hcx) - }); let source_info = self.source_info(span); // create the new block for the assignment @@ -1148,6 +1146,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { place, target: assign, unwind: UnwindAction::Cleanup(assign_unwind), + replace: true, }, ); self.diverge_from(block); @@ -1261,6 +1260,7 @@ fn build_scope_drops<'tcx>( place: local.into(), target: next, unwind: UnwindAction::Continue, + replace: false, }, ); block = next; diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs index 18895072c3b96..d615c83d62191 100644 --- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs +++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs @@ -237,6 +237,7 @@ where place: self.place, target: self.succ, unwind: self.unwind.into_action(), + replace: false, }, ); } @@ -719,6 +720,7 @@ where place: tcx.mk_place_deref(ptr), target: loop_block, unwind: unwind.into_action(), + replace: false, }, ); @@ -963,8 +965,12 @@ where } fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { - let block = - TerminatorKind::Drop { place: self.place, target, unwind: unwind.into_action() }; + let block = TerminatorKind::Drop { + place: self.place, + target, + unwind: unwind.into_action(), + replace: false, + }; self.new_block(unwind, block) } diff --git a/compiler/rustc_mir_dataflow/src/framework/direction.rs b/compiler/rustc_mir_dataflow/src/framework/direction.rs index c8fe1af6674c8..ba328e78040a5 100644 --- a/compiler/rustc_mir_dataflow/src/framework/direction.rs +++ b/compiler/rustc_mir_dataflow/src/framework/direction.rs @@ -479,7 +479,7 @@ impl Direction for Forward { Goto { target } => propagate(target, exit_state), Assert { target, unwind, expected: _, msg: _, cond: _ } - | Drop { target, unwind, place: _ } + | Drop { target, unwind, place: _, replace: _ } | FalseUnwind { real_target: target, unwind } => { if let UnwindAction::Cleanup(unwind) = unwind { propagate(unwind, exit_state); diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs index b29ffcc70f93f..ef2a0c790e945 100644 --- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs +++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs @@ -80,7 +80,7 @@ fn add_move_for_packed_drop<'tcx>( is_cleanup: bool, ) { debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc); - let TerminatorKind::Drop { ref place, target, unwind } = terminator.kind else { + let TerminatorKind::Drop { ref place, target, unwind, replace } = terminator.kind else { unreachable!(); }; @@ -98,6 +98,11 @@ fn add_move_for_packed_drop<'tcx>( patch.add_assign(loc, Place::from(temp), Rvalue::Use(Operand::Move(*place))); patch.patch_terminator( loc.block, - TerminatorKind::Drop { place: Place::from(temp), target: storage_dead_block, unwind }, + TerminatorKind::Drop { + place: Place::from(temp), + target: storage_dead_block, + unwind, + replace, + }, ); } diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs index 98e7a519c2013..fda0e1023f7c5 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drops.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs @@ -14,7 +14,7 @@ use rustc_mir_dataflow::un_derefer::UnDerefer; use rustc_mir_dataflow::MoveDataParamEnv; use rustc_mir_dataflow::{on_all_children_bits, on_all_drop_children_bits}; use rustc_mir_dataflow::{Analysis, ResultsCursor}; -use rustc_span::{DesugaringKind, Span}; +use rustc_span::Span; use rustc_target::abi::{FieldIdx, VariantIdx}; use std::fmt; @@ -401,7 +401,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { let terminator = data.terminator(); match terminator.kind { - TerminatorKind::Drop { mut place, target, unwind } => { + TerminatorKind::Drop { mut place, target, unwind, replace } => { if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) { place = new_place; } @@ -434,10 +434,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { ) } LookupResult::Parent(..) => { - if !matches!( - terminator.source_info.span.desugaring_kind(), - Some(DesugaringKind::Replace), - ) { + if !replace { self.tcx.sess.delay_span_bug( terminator.source_info.span, format!("drop of untracked value {:?}", bb), diff --git a/compiler/rustc_mir_transform/src/generator.rs b/compiler/rustc_mir_transform/src/generator.rs index 891e446942e01..89567ed0ab882 100644 --- a/compiler/rustc_mir_transform/src/generator.rs +++ b/compiler/rustc_mir_transform/src/generator.rs @@ -1045,7 +1045,10 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { for (block, block_data) in body.basic_blocks.iter_enumerated() { let (target, unwind, source_info) = match block_data.terminator() { - Terminator { source_info, kind: TerminatorKind::Drop { place, target, unwind } } => { + Terminator { + source_info, + kind: TerminatorKind::Drop { place, target, unwind, replace: _ }, + } => { if let Some(local) = place.as_local() { if local == SELF_ARG { (target, unwind, source_info) @@ -1304,6 +1307,7 @@ fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock { place: Place::from(SELF_ARG), target: return_block, unwind: UnwindAction::Continue, + replace: false, }; let source_info = SourceInfo::outermost(body.span); diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 6c2e22a70b943..6c5e182ed06f9 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -450,7 +450,7 @@ impl<'tcx> Inliner<'tcx> { checker.visit_basic_block_data(bb, blk); let term = blk.terminator(); - if let TerminatorKind::Drop { ref place, target, unwind } = term.kind { + if let TerminatorKind::Drop { ref place, target, unwind, replace: _ } = term.kind { work_list.push(target); // If the place doesn't actually need dropping, treat it like a regular goto. @@ -458,8 +458,8 @@ impl<'tcx> Inliner<'tcx> { .callee .subst_mir(self.tcx, ty::EarlyBinder(&place.ty(callee_body, tcx).ty)); if ty.needs_drop(tcx, self.param_env) && let UnwindAction::Cleanup(unwind) = unwind { - work_list.push(unwind); - } + work_list.push(unwind); + } } else if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set && matches!(term.kind, TerminatorKind::InlineAsm { .. }) { diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index 7c47d8814db83..0eb27c23105f5 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -544,6 +544,7 @@ impl<'tcx> CloneShimBuilder<'tcx> { place: dest_field, target: unwind, unwind: UnwindAction::Terminate, + replace: false, }, true, ); @@ -800,6 +801,7 @@ fn build_call_shim<'tcx>( place: rcvr_place(), target: BasicBlock::new(2), unwind: UnwindAction::Continue, + replace: false, }, false, ); @@ -815,6 +817,7 @@ fn build_call_shim<'tcx>( place: rcvr_place(), target: BasicBlock::new(4), unwind: UnwindAction::Terminate, + replace: false, }, true, ); diff --git a/compiler/rustc_smir/src/rustc_smir/mod.rs b/compiler/rustc_smir/src/rustc_smir/mod.rs index 6af43f5d3f358..5572108f49567 100644 --- a/compiler/rustc_smir/src/rustc_smir/mod.rs +++ b/compiler/rustc_smir/src/rustc_smir/mod.rs @@ -309,7 +309,7 @@ fn rustc_terminator_to_terminator( Terminate => Terminator::Abort, Return => Terminator::Return, Unreachable => Terminator::Unreachable, - Drop { place, target, unwind } => Terminator::Drop { + Drop { place, target, unwind, replace: _ } => Terminator::Drop { place: rustc_place_to_place(place), target: target.as_usize(), unwind: rustc_unwind_to_unwind(unwind), diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index c669b64dd2c81..70ddac086850d 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -1151,7 +1151,6 @@ pub enum DesugaringKind { Await, ForLoop, WhileLoop, - Replace, } impl DesugaringKind { @@ -1167,7 +1166,6 @@ impl DesugaringKind { DesugaringKind::OpaqueTy => "`impl Trait`", DesugaringKind::ForLoop => "`for` loop", DesugaringKind::WhileLoop => "`while` loop", - DesugaringKind::Replace => "drop and replace", } } } diff --git a/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs b/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs index 127a3f5b2dc97..1bda7a4971375 100644 --- a/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs +++ b/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs @@ -8,7 +8,6 @@ fn a() { //~^ NOTE `vec[_]` is borrowed here vec[0] = Box::new(4); //~ ERROR cannot assign //~^ NOTE `vec[_]` is assigned to here - //~| NOTE in this expansion of desugaring of drop and replace _a.use_ref(); //~^ NOTE borrow later used here } @@ -23,7 +22,6 @@ fn b() { //~^ `vec[_]` is borrowed here vec[0] = Box::new(4); //~ ERROR cannot assign //~^ NOTE `vec[_]` is assigned to here - //~| NOTE in this expansion of desugaring of drop and replace _b.use_ref(); //~^ NOTE borrow later used here } diff --git a/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr b/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr index 5e1251b059093..70b9e4f4433b3 100644 --- a/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr +++ b/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr @@ -6,24 +6,24 @@ LL | [box ref _a, _, _] => { LL | LL | vec[0] = Box::new(4); | ^^^^^^ `vec[_]` is assigned to here but it was already borrowed -... +LL | LL | _a.use_ref(); | ------------ borrow later used here error[E0506]: cannot assign to `vec[_]` because it is borrowed - --> $DIR/borrowck-vec-pattern-nesting.rs:24:13 + --> $DIR/borrowck-vec-pattern-nesting.rs:23:13 | LL | &mut [ref _b @ ..] => { | ------ `vec[_]` is borrowed here LL | LL | vec[0] = Box::new(4); | ^^^^^^ `vec[_]` is assigned to here but it was already borrowed -... +LL | LL | _b.use_ref(); | ------------ borrow later used here error[E0508]: cannot move out of type `[Box]`, a non-copy slice - --> $DIR/borrowck-vec-pattern-nesting.rs:36:11 + --> $DIR/borrowck-vec-pattern-nesting.rs:34:11 | LL | match vec { | ^^^ cannot move out of here @@ -41,7 +41,7 @@ LL + [_a, | error[E0508]: cannot move out of type `[Box]`, a non-copy slice - --> $DIR/borrowck-vec-pattern-nesting.rs:48:13 + --> $DIR/borrowck-vec-pattern-nesting.rs:46:13 | LL | let a = vec[0]; | ^^^^^^ @@ -55,7 +55,7 @@ LL | let a = &vec[0]; | + error[E0508]: cannot move out of type `[Box]`, a non-copy slice - --> $DIR/borrowck-vec-pattern-nesting.rs:57:11 + --> $DIR/borrowck-vec-pattern-nesting.rs:55:11 | LL | match vec { | ^^^ cannot move out of here @@ -73,7 +73,7 @@ LL + [ | error[E0508]: cannot move out of type `[Box]`, a non-copy slice - --> $DIR/borrowck-vec-pattern-nesting.rs:67:13 + --> $DIR/borrowck-vec-pattern-nesting.rs:65:13 | LL | let a = vec[0]; | ^^^^^^ @@ -87,7 +87,7 @@ LL | let a = &vec[0]; | + error[E0508]: cannot move out of type `[Box]`, a non-copy slice - --> $DIR/borrowck-vec-pattern-nesting.rs:76:11 + --> $DIR/borrowck-vec-pattern-nesting.rs:74:11 | LL | match vec { | ^^^ cannot move out of here @@ -106,7 +106,7 @@ LL + [_a, _b, _c] => {} | error[E0508]: cannot move out of type `[Box]`, a non-copy slice - --> $DIR/borrowck-vec-pattern-nesting.rs:87:13 + --> $DIR/borrowck-vec-pattern-nesting.rs:85:13 | LL | let a = vec[0]; | ^^^^^^ diff --git a/tests/ui/borrowck/issue-45199.rs b/tests/ui/borrowck/issue-45199.rs index 6a6b25541f396..ded46e56e3451 100644 --- a/tests/ui/borrowck/issue-45199.rs +++ b/tests/ui/borrowck/issue-45199.rs @@ -5,7 +5,6 @@ fn test_drop_replace() { b = Box::new(1); //~ NOTE first assignment b = Box::new(2); //~ ERROR cannot assign twice to immutable variable `b` //~| NOTE cannot assign twice to immutable - //~| NOTE in this expansion of desugaring of drop and replace } fn test_call() { @@ -14,14 +13,12 @@ fn test_call() { //~| SUGGESTION mut b b = Box::new(2); //~ ERROR cannot assign twice to immutable variable `b` //~| NOTE cannot assign twice to immutable - //~| NOTE in this expansion of desugaring of drop and replace } fn test_args(b: Box) { //~ HELP consider making this binding mutable //~| SUGGESTION mut b b = Box::new(2); //~ ERROR cannot assign to immutable argument `b` //~| NOTE cannot assign to immutable argument - //~| NOTE in this expansion of desugaring of drop and replace } fn main() {} diff --git a/tests/ui/borrowck/issue-45199.stderr b/tests/ui/borrowck/issue-45199.stderr index 163f2370ba010..47aa30908270d 100644 --- a/tests/ui/borrowck/issue-45199.stderr +++ b/tests/ui/borrowck/issue-45199.stderr @@ -10,7 +10,7 @@ LL | b = Box::new(2); | ^ cannot assign twice to immutable variable error[E0384]: cannot assign twice to immutable variable `b` - --> $DIR/issue-45199.rs:15:5 + --> $DIR/issue-45199.rs:14:5 | LL | let b = Box::new(1); | - @@ -22,7 +22,7 @@ LL | b = Box::new(2); | ^ cannot assign twice to immutable variable error[E0384]: cannot assign to immutable argument `b` - --> $DIR/issue-45199.rs:22:5 + --> $DIR/issue-45199.rs:20:5 | LL | fn test_args(b: Box) { | - help: consider making this binding mutable: `mut b` diff --git a/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs b/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs index 293fdca1cc91b..c9b16e43910e8 100644 --- a/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs +++ b/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs @@ -5,7 +5,6 @@ fn test() { drop(b); b = Box::new(2); //~ ERROR cannot assign twice to immutable variable `b` //~| NOTE cannot assign twice to immutable - //~| NOTE in this expansion of desugaring of drop and replace drop(b); } From e1b8fad66430abd03e611bb18425e085945c7c40 Mon Sep 17 00:00:00 2001 From: Ben Kimock Date: Thu, 25 May 2023 18:24:27 -0400 Subject: [PATCH 04/14] Add #[inline] to array TryFrom impls --- library/core/src/array/mod.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index bdb4c975909e0..e3885d485b77a 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -204,6 +204,7 @@ where { type Error = TryFromSliceError; + #[inline] fn try_from(slice: &[T]) -> Result<[T; N], TryFromSliceError> { <&Self>::try_from(slice).map(|r| *r) } @@ -228,6 +229,7 @@ where { type Error = TryFromSliceError; + #[inline] fn try_from(slice: &mut [T]) -> Result<[T; N], TryFromSliceError> { ::try_from(&*slice) } @@ -249,6 +251,7 @@ where impl<'a, T, const N: usize> TryFrom<&'a [T]> for &'a [T; N] { type Error = TryFromSliceError; + #[inline] fn try_from(slice: &'a [T]) -> Result<&'a [T; N], TryFromSliceError> { if slice.len() == N { let ptr = slice.as_ptr() as *const [T; N]; @@ -276,6 +279,7 @@ impl<'a, T, const N: usize> TryFrom<&'a [T]> for &'a [T; N] { impl<'a, T, const N: usize> TryFrom<&'a mut [T]> for &'a mut [T; N] { type Error = TryFromSliceError; + #[inline] fn try_from(slice: &'a mut [T]) -> Result<&'a mut [T; N], TryFromSliceError> { if slice.len() == N { let ptr = slice.as_mut_ptr() as *mut [T; N]; From cb68c051511fe0583ecf3f39147b8537a26c1e1f Mon Sep 17 00:00:00 2001 From: Pietro Albini Date: Fri, 26 May 2023 09:47:21 +0200 Subject: [PATCH 05/14] address review feedback --- src/bootstrap/metrics.rs | 8 +++----- src/bootstrap/test.rs | 4 ++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs index 9f68eea9a5ebe..8aa821a3afc07 100644 --- a/src/bootstrap/metrics.rs +++ b/src/bootstrap/metrics.rs @@ -114,9 +114,7 @@ impl BuildMetrics { if let Some(test_suite) = step.test_suites.last_mut() { test_suite.tests.push(Test { name: name.to_string(), outcome }); } else { - panic!( - "metrics.record_test() called without calling metrics.record_test_suite() first" - ); + panic!("metrics.record_test() called without calling metrics.begin_test_suite() first"); } } @@ -194,7 +192,7 @@ impl BuildMetrics { fn prepare_json_step(&self, step: StepMetrics) -> JsonNode { let mut children = Vec::new(); children.extend(step.children.into_iter().map(|child| self.prepare_json_step(child))); - children.extend(step.test_suites.into_iter().map(|suite| JsonNode::TestSuite(suite))); + children.extend(step.test_suites.into_iter().map(JsonNode::TestSuite)); JsonNode::RustbuildStep { type_: step.type_, @@ -277,7 +275,7 @@ struct TestSuite { #[derive(Serialize, Deserialize)] #[serde(tag = "kind", rename_all = "snake_case")] pub(crate) enum TestSuiteMetadata { - Crate { + CargoPackage { crates: Vec, target: String, host: String, diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index f64b5f965237e..29edbe5ae417c 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -319,7 +319,7 @@ impl Step for Cargo { #[cfg(feature = "build-metrics")] builder.metrics.begin_test_suite( - crate::metrics::TestSuiteMetadata::Crate { + crate::metrics::TestSuiteMetadata::CargoPackage { crates: vec!["cargo".into()], target: self.host.triple.to_string(), host: self.host.triple.to_string(), @@ -2135,7 +2135,7 @@ fn run_cargo_test( #[cfg(feature = "build-metrics")] builder.metrics.begin_test_suite( - crate::metrics::TestSuiteMetadata::Crate { + crate::metrics::TestSuiteMetadata::CargoPackage { crates: crates.iter().map(|c| c.to_string()).collect(), target: target.triple.to_string(), host: compiler.host.triple.to_string(), From 7040d4102f0f3915944369e8d25ebf91f6a99640 Mon Sep 17 00:00:00 2001 From: Pietro Albini Date: Fri, 26 May 2023 15:21:21 +0200 Subject: [PATCH 06/14] rename metadata_version to format_version The new name is more accurate. --- src/bootstrap/metrics.rs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs index 8aa821a3afc07..405a88d756878 100644 --- a/src/bootstrap/metrics.rs +++ b/src/bootstrap/metrics.rs @@ -19,7 +19,7 @@ use sysinfo::{CpuExt, System, SystemExt}; // Versions: // 0: initial version // 1: replaced JsonNode::Test with JsonNode::TestSuite -const CURRENT_METADATA_VERSION: usize = 1; +const CURRENT_FORMAT_VERSION: usize = 1; pub(crate) struct BuildMetrics { state: RefCell, @@ -151,15 +151,15 @@ impl BuildMetrics { // previous invocations are still present in the resulting file. let mut invocations = match std::fs::read(&dest) { Ok(contents) => { - // We first parse just the metadata_version field to have the check succeed even if + // We first parse just the format_version field to have the check succeed even if // the rest of the contents are not valid anymore. - let version: OnlyMetadataVersion = t!(serde_json::from_slice(&contents)); - if version.metadata_version == CURRENT_METADATA_VERSION { + let version: OnlyFormatVersion = t!(serde_json::from_slice(&contents)); + if version.format_version == CURRENT_FORMAT_VERSION { t!(serde_json::from_slice::(&contents)).invocations } else { println!( "warning: overriding existing build/metrics.json, as it's not \ - compatible with build metrics format version {CURRENT_METADATA_VERSION}." + compatible with build metrics format version {CURRENT_FORMAT_VERSION}." ); Vec::new() } @@ -181,8 +181,7 @@ impl BuildMetrics { children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(), }); - let json = - JsonRoot { metadata_version: CURRENT_METADATA_VERSION, system_stats, invocations }; + let json = JsonRoot { format_version: CURRENT_FORMAT_VERSION, system_stats, invocations }; t!(std::fs::create_dir_all(dest.parent().unwrap())); let mut file = BufWriter::new(t!(File::create(&dest))); @@ -234,7 +233,7 @@ struct StepMetrics { #[serde(rename_all = "snake_case")] struct JsonRoot { #[serde(default)] // For version 0 the field was not present. - metadata_version: usize, + format_version: usize, system_stats: JsonInvocationSystemStats, invocations: Vec, } @@ -322,7 +321,7 @@ struct JsonStepSystemStats { } #[derive(Deserialize)] -struct OnlyMetadataVersion { +struct OnlyFormatVersion { #[serde(default)] // For version 0 the field was not present. - metadata_version: usize, + format_version: usize, } From c5139b9136c25c71f4c5f71335da9aedb8088cbc Mon Sep 17 00:00:00 2001 From: Pietro Albini Date: Fri, 26 May 2023 15:25:21 +0200 Subject: [PATCH 07/14] add reasoning for introducing a metrics format version --- src/bootstrap/metrics.rs | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs index 405a88d756878..5990f33b9bc6c 100644 --- a/src/bootstrap/metrics.rs +++ b/src/bootstrap/metrics.rs @@ -16,9 +16,21 @@ use sysinfo::{CpuExt, System, SystemExt}; // Update this number whenever a breaking change is made to the build metrics. // -// Versions: -// 0: initial version -// 1: replaced JsonNode::Test with JsonNode::TestSuite +// The output format is versioned for two reasons: +// +// - The metadata is intended to be consumed by external tooling, and exposing a format version +// helps the tools determine whether they're compatible with a metrics file. +// +// - If a developer enables build metrics in their local checkout, making a breaking change to the +// metrics format would result in a hard-to-diagnose error message when an existing metrics file +// is not compatible with the new changes. With a format version number, bootstrap can discard +// incompatible metrics files instead of appending metrics to them. +// +// Version changelog: +// +// - v0: initial version +// - v1: replaced JsonNode::Test with JsonNode::TestSuite +// const CURRENT_FORMAT_VERSION: usize = 1; pub(crate) struct BuildMetrics { From 3802ba0f6a88355396a4b27b922f6f6b494dbd32 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 26 May 2023 17:31:11 +0200 Subject: [PATCH 08/14] Fix re-export of doc hidden macro not showing up --- src/librustdoc/clean/mod.rs | 3 ++- src/librustdoc/visit_ast.rs | 10 ++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 59a3e63172406..03adc19e359c1 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -2592,7 +2592,8 @@ fn clean_use_statement_inner<'tcx>( } else { if inline_attr.is_none() && let Res::Def(DefKind::Mod, did) = path.res - && !did.is_local() && did.is_crate_root() + && !did.is_local() + && did.is_crate_root() { // if we're `pub use`ing an extern crate root, don't inline it unless we // were specifically asked for it diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 8f8dc6b709053..891574eb46647 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -310,6 +310,16 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { } let ret = match tcx.hir().get_by_def_id(res_did) { + // Bang macros are handled a bit on their because of how they are handled by the + // compiler. If they have `#[doc(hidden)]` and the re-export doesn't have + // `#[doc(inline)]`, then we don't inline it. + Node::Item(&hir::Item { kind: hir::ItemKind::Macro(_, MacroKind::Bang), .. }) + if !please_inline + && renamed.is_some() + && self.cx.tcx.is_doc_hidden(ori_res_did) => + { + return false; + } Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => { let prev = mem::replace(&mut self.inlining, true); for &i in m.item_ids { From c908d1e4decdb0a6db8280a48baa64f8344acab7 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 26 May 2023 17:31:54 +0200 Subject: [PATCH 09/14] Update tests for re-exports of doc hidden macros --- tests/rustdoc/reexport-doc-hidden.rs | 3 +-- tests/rustdoc/reexport-hidden-macro.rs | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/rustdoc/reexport-doc-hidden.rs b/tests/rustdoc/reexport-doc-hidden.rs index 3ea5fde72f711..d9ed954868e50 100644 --- a/tests/rustdoc/reexport-doc-hidden.rs +++ b/tests/rustdoc/reexport-doc-hidden.rs @@ -21,6 +21,5 @@ macro_rules! foo { () => {}; } -// This is a bug: https://github.com/rust-lang/rust/issues/59368 -// @!has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' +// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' pub use crate::foo as Macro; diff --git a/tests/rustdoc/reexport-hidden-macro.rs b/tests/rustdoc/reexport-hidden-macro.rs index afcfa979616ec..e498acbab0b52 100644 --- a/tests/rustdoc/reexport-hidden-macro.rs +++ b/tests/rustdoc/reexport-hidden-macro.rs @@ -15,7 +15,7 @@ macro_rules! foo { () => {}; } -/// not displayed +// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' pub use crate::foo as Macro; /// Displayed #[doc(inline)] From 898dfc680f22b0b07eb569384c5e947d2922d0d5 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 26 May 2023 23:53:14 +0200 Subject: [PATCH 10/14] Correctly handle multiple re-exports of bang macros at the same level --- src/librustdoc/visit_ast.rs | 12 +++++-- tests/rustdoc/reexport-hidden-macro.rs | 2 +- tests/rustdoc/reexport-of-doc-hidden.rs | 42 +++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 tests/rustdoc/reexport-of-doc-hidden.rs diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 891574eb46647..eb813af779ef6 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -305,7 +305,12 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { return false; } - if !self.view_item_stack.insert(res_did) { + let is_bang_macro = matches!( + tcx.hir().get_by_def_id(res_did), + Node::Item(&hir::Item { kind: hir::ItemKind::Macro(_, MacroKind::Bang), .. }) + ); + + if !self.view_item_stack.insert(res_did) && !is_bang_macro { return false; } @@ -313,8 +318,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { // Bang macros are handled a bit on their because of how they are handled by the // compiler. If they have `#[doc(hidden)]` and the re-export doesn't have // `#[doc(inline)]`, then we don't inline it. - Node::Item(&hir::Item { kind: hir::ItemKind::Macro(_, MacroKind::Bang), .. }) - if !please_inline + Node::Item(_) + if is_bang_macro + && !please_inline && renamed.is_some() && self.cx.tcx.is_doc_hidden(ori_res_did) => { diff --git a/tests/rustdoc/reexport-hidden-macro.rs b/tests/rustdoc/reexport-hidden-macro.rs index e498acbab0b52..47a21e3946225 100644 --- a/tests/rustdoc/reexport-hidden-macro.rs +++ b/tests/rustdoc/reexport-hidden-macro.rs @@ -5,6 +5,7 @@ // @has 'foo/index.html' // @has - '//*[@id="main-content"]//a[@href="macro.Macro2.html"]' 'Macro2' +// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' // @has 'foo/macro.Macro2.html' // @has - '//*[@class="docblock"]' 'Displayed' @@ -15,7 +16,6 @@ macro_rules! foo { () => {}; } -// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' pub use crate::foo as Macro; /// Displayed #[doc(inline)] diff --git a/tests/rustdoc/reexport-of-doc-hidden.rs b/tests/rustdoc/reexport-of-doc-hidden.rs new file mode 100644 index 0000000000000..b733716c22a3b --- /dev/null +++ b/tests/rustdoc/reexport-of-doc-hidden.rs @@ -0,0 +1,42 @@ +// This test ensures that all re-exports of doc hidden elements are displayed. + +#![crate_name = "foo"] + +#[doc(hidden)] +pub struct Bar; + +#[macro_export] +#[doc(hidden)] +macro_rules! foo { + () => {}; +} + +// @has 'foo/index.html' +// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' +pub use crate::foo as Macro; +// @has - '//*[@id="reexport.Macro2"]/code' 'pub use crate::foo as Macro2;' +pub use crate::foo as Macro2; +// @has - '//*[@id="reexport.Boo"]/code' 'pub use crate::Bar as Boo;' +pub use crate::Bar as Boo; +// @has - '//*[@id="reexport.Boo2"]/code' 'pub use crate::Bar as Boo2;' +pub use crate::Bar as Boo2; + +pub fn fofo() {} + +// @has - '//*[@id="reexport.f1"]/code' 'pub use crate::fofo as f1;' +pub use crate::fofo as f1; +// @has - '//*[@id="reexport.f2"]/code' 'pub use crate::fofo as f2;' +pub use crate::fofo as f2; + +pub mod sub { + // @has 'foo/sub/index.html' + // @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' + pub use crate::foo as Macro; + // @has - '//*[@id="reexport.Macro2"]/code' 'pub use crate::foo as Macro2;' + pub use crate::foo as Macro2; + + // @has - '//*[@id="reexport.f1"]/code' 'pub use crate::fofo as f1;' + pub use crate::fofo as f1; + // @has - '//*[@id="reexport.f2"]/code' 'pub use crate::fofo as f2;' + pub use crate::fofo as f2; +} From 1a77d9a54d1fcf8b2a7aa339fb861d642ad4ff5a Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Fri, 26 May 2023 17:29:08 -0700 Subject: [PATCH 11/14] rustdoc: get unnormalized link destination for suggestions Fixes #110111 This bug, and the workaround in this commit, is closely linked to [raphlinus/pulldown-cmark#441], getting offsets of link components. In particular, pulldown-cmark doesn't provide the offsets of the contents of a link. To work around this, rustdoc parser parts of a link definition itself. [raphlinus/pulldown-cmark#441]: https://github.com/raphlinus/pulldown-cmark/issues/441 --- src/librustdoc/html/markdown.rs | 120 +++++++- .../passes/collect_intra_doc_links.rs | 174 +++++++---- .../issue-110495-suffix-with-space.stderr | 7 +- tests/rustdoc-ui/intra-doc/weird-syntax.rs | 140 +++++++++ .../rustdoc-ui/intra-doc/weird-syntax.stderr | 272 ++++++++++++++++++ 5 files changed, 639 insertions(+), 74 deletions(-) create mode 100644 tests/rustdoc-ui/intra-doc/weird-syntax.rs create mode 100644 tests/rustdoc-ui/intra-doc/weird-syntax.stderr diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 09e7ed293d473..9bb20022cfd4c 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -1237,7 +1237,27 @@ pub(crate) fn plain_text_summary(md: &str, link_names: &[RenderedLink]) -> Strin pub(crate) struct MarkdownLink { pub kind: LinkType, pub link: String, - pub range: Range, + pub range: MarkdownLinkRange, +} + +#[derive(Clone, Debug)] +pub(crate) enum MarkdownLinkRange { + /// Normally, markdown link warnings point only at the destination. + Destination(Range), + /// In some cases, it's not possible to point at the destination. + /// Usually, this happens because backslashes `\\` are used. + /// When that happens, point at the whole link, and don't provide structured suggestions. + WholeLink(Range), +} + +impl MarkdownLinkRange { + /// Extracts the inner range. + pub fn inner_range(&self) -> &Range { + match self { + MarkdownLinkRange::Destination(range) => range, + MarkdownLinkRange::WholeLink(range) => range, + } + } } pub(crate) fn markdown_links( @@ -1257,9 +1277,9 @@ pub(crate) fn markdown_links( if md_start <= s_start && s_end <= md_end { let start = s_start.offset_from(md_start) as usize; let end = s_end.offset_from(md_start) as usize; - start..end + MarkdownLinkRange::Destination(start..end) } else { - fallback + MarkdownLinkRange::WholeLink(fallback) } }; @@ -1267,6 +1287,7 @@ pub(crate) fn markdown_links( // For diagnostics, we want to underline the link's definition but `span` will point at // where the link is used. This is a problem for reference-style links, where the definition // is separate from the usage. + match link { // `Borrowed` variant means the string (the link's destination) may come directly from // the markdown text and we can locate the original link destination. @@ -1275,8 +1296,80 @@ pub(crate) fn markdown_links( CowStr::Borrowed(s) => locate(s, span), // For anything else, we can only use the provided range. - CowStr::Boxed(_) | CowStr::Inlined(_) => span, + CowStr::Boxed(_) | CowStr::Inlined(_) => MarkdownLinkRange::WholeLink(span), + } + }; + + let span_for_offset_backward = |span: Range, open: u8, close: u8| { + let mut open_brace = !0; + let mut close_brace = !0; + for (i, b) in md.as_bytes()[span.clone()].iter().copied().enumerate().rev() { + let i = i + span.start; + if b == close { + close_brace = i; + break; + } + } + if close_brace < span.start || close_brace >= span.end { + return MarkdownLinkRange::WholeLink(span); + } + let mut nesting = 1; + for (i, b) in md.as_bytes()[span.start..close_brace].iter().copied().enumerate().rev() { + let i = i + span.start; + if b == close { + nesting += 1; + } + if b == open { + nesting -= 1; + } + if nesting == 0 { + open_brace = i; + break; + } + } + assert!(open_brace != close_brace); + if open_brace < span.start || open_brace >= span.end { + return MarkdownLinkRange::WholeLink(span); + } + // do not actually include braces in the span + let range = (open_brace + 1)..close_brace; + MarkdownLinkRange::Destination(range.clone()) + }; + + let span_for_offset_forward = |span: Range, open: u8, close: u8| { + let mut open_brace = !0; + let mut close_brace = !0; + for (i, b) in md.as_bytes()[span.clone()].iter().copied().enumerate() { + let i = i + span.start; + if b == open { + open_brace = i; + break; + } + } + if open_brace < span.start || open_brace >= span.end { + return MarkdownLinkRange::WholeLink(span); } + let mut nesting = 0; + for (i, b) in md.as_bytes()[open_brace..span.end].iter().copied().enumerate() { + let i = i + open_brace; + if b == close { + nesting -= 1; + } + if b == open { + nesting += 1; + } + if nesting == 0 { + close_brace = i; + break; + } + } + assert!(open_brace != close_brace); + if open_brace < span.start || open_brace >= span.end { + return MarkdownLinkRange::WholeLink(span); + } + // do not actually include braces in the span + let range = (open_brace + 1)..close_brace; + MarkdownLinkRange::Destination(range.clone()) }; Parser::new_with_broken_link_callback( @@ -1287,11 +1380,20 @@ pub(crate) fn markdown_links( .into_offset_iter() .filter_map(|(event, span)| match event { Event::Start(Tag::Link(link_type, dest, _)) if may_be_doc_link(link_type) => { - preprocess_link(MarkdownLink { - kind: link_type, - range: span_for_link(&dest, span), - link: dest.into_string(), - }) + let range = match link_type { + // Link is pulled from the link itself. + LinkType::ReferenceUnknown | LinkType::ShortcutUnknown => { + span_for_offset_backward(span, b'[', b']') + } + LinkType::CollapsedUnknown => span_for_offset_forward(span, b'[', b']'), + LinkType::Inline => span_for_offset_backward(span, b'(', b')'), + // Link is pulled from elsewhere in the document. + LinkType::Reference | LinkType::Collapsed | LinkType::Shortcut => { + span_for_link(&dest, span) + } + LinkType::Autolink | LinkType::Email => unreachable!(), + }; + preprocess_link(MarkdownLink { kind: link_type, range, link: dest.into_string() }) } _ => None, }) diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 9e6894a77dfa0..417bdd58ad45a 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -31,7 +31,7 @@ use std::ops::Range; use crate::clean::{self, utils::find_nearest_parent_module}; use crate::clean::{Crate, Item, ItemLink, PrimitiveType}; use crate::core::DocContext; -use crate::html::markdown::{markdown_links, MarkdownLink}; +use crate::html::markdown::{markdown_links, MarkdownLink, MarkdownLinkRange}; use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS}; use crate::passes::Pass; use crate::visit::DocVisitor; @@ -248,7 +248,7 @@ struct DiagnosticInfo<'a> { item: &'a Item, dox: &'a str, ori_link: &'a str, - link_range: Range, + link_range: MarkdownLinkRange, } struct LinkCollector<'a, 'tcx> { @@ -833,7 +833,7 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> { enum PreprocessingError { /// User error: `[std#x#y]` is not valid MultipleAnchors, - Disambiguator(Range, String), + Disambiguator(MarkdownLinkRange, String), MalformedGenerics(MalformedGenerics, String), } @@ -873,6 +873,7 @@ pub(crate) struct PreprocessedMarkdownLink( /// `link_buffer` is needed for lifetime reasons; it will always be overwritten and the contents ignored. fn preprocess_link( ori_link: &MarkdownLink, + dox: &str, ) -> Option> { // [] is mostly likely not supposed to be a link if ori_link.link.is_empty() { @@ -906,9 +907,15 @@ fn preprocess_link( Err((err_msg, relative_range)) => { // Only report error if we would not have ignored this link. See issue #83859. if !should_ignore_link_with_disambiguators(link) { - let no_backticks_range = range_between_backticks(ori_link); - let disambiguator_range = (no_backticks_range.start + relative_range.start) - ..(no_backticks_range.start + relative_range.end); + let disambiguator_range = match range_between_backticks(&ori_link.range, dox) { + MarkdownLinkRange::Destination(no_backticks_range) => { + MarkdownLinkRange::Destination( + (no_backticks_range.start + relative_range.start) + ..(no_backticks_range.start + relative_range.end), + ) + } + mdlr @ MarkdownLinkRange::WholeLink(_) => mdlr, + }; return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg))); } else { return None; @@ -947,7 +954,7 @@ fn preprocess_link( fn preprocessed_markdown_links(s: &str) -> Vec { markdown_links(s, |link| { - preprocess_link(&link).map(|pp_link| PreprocessedMarkdownLink(pp_link, link)) + preprocess_link(&link, s).map(|pp_link| PreprocessedMarkdownLink(pp_link, link)) }) } @@ -1060,22 +1067,12 @@ impl LinkCollector<'_, '_> { // valid omission. See https://github.com/rust-lang/rust/pull/80660#discussion_r551585677 // for discussion on the matter. let kind = self.cx.tcx.def_kind(id); - self.verify_disambiguator( - path_str, - ori_link, - kind, - id, - disambiguator, - item, - &diag_info, - )?; + self.verify_disambiguator(path_str, kind, id, disambiguator, item, &diag_info)?; } else { match disambiguator { Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {} Some(other) => { - self.report_disambiguator_mismatch( - path_str, ori_link, other, res, &diag_info, - ); + self.report_disambiguator_mismatch(path_str, other, res, &diag_info); return None; } } @@ -1096,7 +1093,6 @@ impl LinkCollector<'_, '_> { }; self.verify_disambiguator( path_str, - ori_link, kind_for_dis, id_for_dis, disambiguator, @@ -1118,7 +1114,6 @@ impl LinkCollector<'_, '_> { fn verify_disambiguator( &self, path_str: &str, - ori_link: &MarkdownLink, kind: DefKind, id: DefId, disambiguator: Option, @@ -1142,7 +1137,7 @@ impl LinkCollector<'_, '_> { => {} (actual, Some(Disambiguator::Kind(expected))) if actual == expected => {} (_, Some(specified @ Disambiguator::Kind(_) | specified @ Disambiguator::Primitive)) => { - self.report_disambiguator_mismatch(path_str,ori_link,specified, Res::Def(kind, id),diag_info); + self.report_disambiguator_mismatch(path_str, specified, Res::Def(kind, id), diag_info); return None; } } @@ -1164,14 +1159,13 @@ impl LinkCollector<'_, '_> { fn report_disambiguator_mismatch( &self, path_str: &str, - ori_link: &MarkdownLink, specified: Disambiguator, resolved: Res, diag_info: &DiagnosticInfo<'_>, ) { // The resolved item did not match the disambiguator; give a better error than 'not found' let msg = format!("incompatible link kind for `{}`", path_str); - let callback = |diag: &mut Diagnostic, sp: Option| { + let callback = |diag: &mut Diagnostic, sp: Option, link_range| { let note = format!( "this link resolved to {} {}, which is not {} {}", resolved.article(), @@ -1184,14 +1178,24 @@ impl LinkCollector<'_, '_> { } else { diag.note(note); } - suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp); + suggest_disambiguator(resolved, diag, path_str, link_range, sp, diag_info); }; report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, callback); } - fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &Range, item: &Item) { - let span = super::source_span_for_markdown_range(self.cx.tcx, dox, ori_link, &item.attrs) - .unwrap_or_else(|| item.attr_span(self.cx.tcx)); + fn report_rawptr_assoc_feature_gate( + &self, + dox: &str, + ori_link: &MarkdownLinkRange, + item: &Item, + ) { + let span = super::source_span_for_markdown_range( + self.cx.tcx, + dox, + ori_link.inner_range(), + &item.attrs, + ) + .unwrap_or_else(|| item.attr_span(self.cx.tcx)); rustc_session::parse::feature_err( &self.cx.tcx.sess.parse_sess, sym::intra_doc_pointers, @@ -1371,16 +1375,23 @@ impl LinkCollector<'_, '_> { /// [`Foo`] /// ^^^ /// ``` -fn range_between_backticks(ori_link: &MarkdownLink) -> Range { - let after_first_backtick_group = ori_link.link.bytes().position(|b| b != b'`').unwrap_or(0); - let before_second_backtick_group = ori_link - .link +/// +/// This function does nothing if `ori_link.range` is a `MarkdownLinkRange::WholeLink`. +fn range_between_backticks(ori_link_range: &MarkdownLinkRange, dox: &str) -> MarkdownLinkRange { + let range = match ori_link_range { + mdlr @ MarkdownLinkRange::WholeLink(_) => return mdlr.clone(), + MarkdownLinkRange::Destination(inner) => inner.clone(), + }; + let ori_link_text = &dox[range.clone()]; + let after_first_backtick_group = ori_link_text.bytes().position(|b| b != b'`').unwrap_or(0); + let before_second_backtick_group = ori_link_text .bytes() .skip(after_first_backtick_group) .position(|b| b == b'`') - .unwrap_or(ori_link.link.len()); - (ori_link.range.start + after_first_backtick_group) - ..(ori_link.range.start + before_second_backtick_group) + .unwrap_or(ori_link_text.len()); + MarkdownLinkRange::Destination( + (range.start + after_first_backtick_group)..(range.start + before_second_backtick_group), + ) } /// Returns true if we should ignore `link` due to it being unlikely @@ -1530,14 +1541,23 @@ impl Suggestion { sp: rustc_span::Span, ) -> Vec<(rustc_span::Span, String)> { let inner_sp = match ori_link.find('(') { + Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => { + sp.with_hi(sp.lo() + BytePos((index - 1) as _)) + } Some(index) => sp.with_hi(sp.lo() + BytePos(index as _)), None => sp, }; let inner_sp = match ori_link.find('!') { + Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => { + sp.with_hi(sp.lo() + BytePos((index - 1) as _)) + } Some(index) => inner_sp.with_hi(inner_sp.lo() + BytePos(index as _)), None => inner_sp, }; let inner_sp = match ori_link.find('@') { + Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => { + sp.with_hi(sp.lo() + BytePos((index - 1) as _)) + } Some(index) => inner_sp.with_lo(inner_sp.lo() + BytePos(index as u32 + 1)), None => inner_sp, }; @@ -1584,7 +1604,7 @@ fn report_diagnostic( lint: &'static Lint, msg: impl Into + Display, DiagnosticInfo { item, ori_link: _, dox, link_range }: &DiagnosticInfo<'_>, - decorate: impl FnOnce(&mut Diagnostic, Option), + decorate: impl FnOnce(&mut Diagnostic, Option, MarkdownLinkRange), ) { let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id) else { @@ -1596,16 +1616,32 @@ fn report_diagnostic( let sp = item.attr_span(tcx); tcx.struct_span_lint_hir(lint, hir_id, sp, msg, |lint| { - let span = - super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| { - if dox.as_bytes().get(link_range.start) == Some(&b'`') - && dox.as_bytes().get(link_range.end - 1) == Some(&b'`') - { - sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1)) - } else { - sp - } - }); + let (span, link_range) = match link_range { + MarkdownLinkRange::Destination(md_range) => { + let mut md_range = md_range.clone(); + let sp = super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs) + .map(|mut sp| { + while dox.as_bytes().get(md_range.start) == Some(&b' ') + || dox.as_bytes().get(md_range.start) == Some(&b'`') + { + md_range.start += 1; + sp = sp.with_lo(sp.lo() + BytePos(1)); + } + while dox.as_bytes().get(md_range.end - 1) == Some(&b' ') + || dox.as_bytes().get(md_range.end - 1) == Some(&b'`') + { + md_range.end -= 1; + sp = sp.with_hi(sp.hi() - BytePos(1)); + } + sp + }); + (sp, MarkdownLinkRange::Destination(md_range)) + } + MarkdownLinkRange::WholeLink(md_range) => ( + super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs), + link_range.clone(), + ), + }; if let Some(sp) = span { lint.set_span(sp); @@ -1614,21 +1650,22 @@ fn report_diagnostic( // ^ ~~~~ // | link_range // last_new_line_offset - let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1); + let md_range = link_range.inner_range().clone(); + let last_new_line_offset = dox[..md_range.start].rfind('\n').map_or(0, |n| n + 1); let line = dox[last_new_line_offset..].lines().next().unwrap_or(""); - // Print the line containing the `link_range` and manually mark it with '^'s. + // Print the line containing the `md_range` and manually mark it with '^'s. lint.note(format!( "the link appears in this line:\n\n{line}\n\ {indicator: unreachable!("handled above"), ResolutionFailure::WrongNamespace { res, expected_ns } => { - suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp); + suggest_disambiguator( + res, + diag, + path_str, + link_range.clone(), + sp, + &diag_info, + ); format!( "this link resolves to {}, which is not in the {} namespace", @@ -1882,7 +1926,7 @@ fn anchor_failure( msg: String, anchor_idx: usize, ) { - report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp| { + report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp, _link_range| { if let Some(mut sp) = sp { if let Some((fragment_offset, _)) = diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx) @@ -1898,11 +1942,11 @@ fn anchor_failure( fn disambiguator_error( cx: &DocContext<'_>, mut diag_info: DiagnosticInfo<'_>, - disambiguator_range: Range, + disambiguator_range: MarkdownLinkRange, msg: impl Into + Display, ) { diag_info.link_range = disambiguator_range; - report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp| { + report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp, _link_range| { let msg = format!( "see {}/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators", crate::DOC_RUST_LANG_ORG_CHANNEL @@ -1922,7 +1966,7 @@ fn report_malformed_generics( BROKEN_INTRA_DOC_LINKS, format!("unresolved link to `{}`", path_str), &diag_info, - |diag, sp| { + |diag, sp, _link_range| { let note = match err { MalformedGenerics::UnbalancedAngleBrackets => "unbalanced angle brackets", MalformedGenerics::MissingType => "missing type for generic parameters", @@ -1995,7 +2039,7 @@ fn ambiguity_error( } } - report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, |diag, sp| { + report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, link_range| { if let Some(sp) = sp { diag.span_label(sp, "ambiguous link"); } else { @@ -2003,7 +2047,7 @@ fn ambiguity_error( } for res in kinds { - suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp); + suggest_disambiguator(res, diag, path_str, link_range.clone(), sp, diag_info); } }); true @@ -2015,13 +2059,19 @@ fn suggest_disambiguator( res: Res, diag: &mut Diagnostic, path_str: &str, - ori_link: &str, + link_range: MarkdownLinkRange, sp: Option, + diag_info: &DiagnosticInfo<'_>, ) { let suggestion = res.disambiguator_suggestion(); let help = format!("to link to the {}, {}", res.descr(), suggestion.descr()); - if let Some(sp) = sp { + let ori_link = match link_range { + MarkdownLinkRange::Destination(range) => Some(&diag_info.dox[range]), + MarkdownLinkRange::WholeLink(_) => None, + }; + + if let (Some(sp), Some(ori_link)) = (sp, ori_link) { let mut spans = suggestion.as_help_span(path_str, ori_link, sp); if spans.len() > 1 { diag.multipart_suggestion(help, spans, Applicability::MaybeIncorrect); @@ -2047,7 +2097,7 @@ fn privacy_error(cx: &DocContext<'_>, diag_info: &DiagnosticInfo<'_>, path_str: let msg = format!("public documentation for `{}` links to private item `{}`", item_name, path_str); - report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, msg, diag_info, |diag, sp| { + report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, _link_range| { if let Some(sp) = sp { diag.span_label(sp, "this item is private"); } diff --git a/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr b/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr index 8669b0c20865c..6c834fd0a1b61 100644 --- a/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr +++ b/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr @@ -36,7 +36,7 @@ LL | //! [`Clone ()`]. help: to link to the trait, prefix with `trait@` | LL - //! [`Clone ()`]. -LL + //! [`trait@Clone (`]. +LL + //! [`trait@Clone `]. | error: incompatible link kind for `Clone` @@ -47,8 +47,9 @@ LL | //! [`Clone !`]. | help: to link to the derive macro, prefix with `derive@` | -LL | //! [`derive@Clone !`]. - | +++++++ +LL - //! [`Clone !`]. +LL + //! [`derive@Clone `]. + | error: aborting due to 4 previous errors diff --git a/tests/rustdoc-ui/intra-doc/weird-syntax.rs b/tests/rustdoc-ui/intra-doc/weird-syntax.rs new file mode 100644 index 0000000000000..ca18842fb21c5 --- /dev/null +++ b/tests/rustdoc-ui/intra-doc/weird-syntax.rs @@ -0,0 +1,140 @@ +// Many examples are from +// https://github.com/rust-lang/rust/issues/110111#issuecomment-1517800781 +#![deny(rustdoc::broken_intra_doc_links)] + +//! This test case is closely linked to [raphlinus/pulldown-cmark#441], getting offsets of +//! link components. In particular, pulldown-cmark doesn't provide the offsets of the contents +//! of a link. +//! +//! To work around this, rustdoc parses parts of a link definition itself. This is basically a +//! test suite for that link syntax parser. +//! +//! [raphlinus/pulldown-cmark#441]: https://github.com/raphlinus/pulldown-cmark/issues/441 + +use std::clone::Clone; + +// Basic version // + +/// [`struct@Clone`] //~ERROR link +pub struct LinkToCloneWithBackquotes; + +/// [```struct@Clone```] //~ERROR link +pub struct LinkToCloneWithMultipleBackquotes; + +/// [ ` struct@Clone ` ] //~ERROR link +pub struct LinkToCloneWithSpacesAndBackquotes; + +/// [ `Clone ()` ] //~ERROR link +pub struct LinkToCloneWithSpacesBackquotesAndParens; + +/// [`Clone ()` ] //~ERROR link +pub struct LinkToCloneWithSpacesEndBackquotesAndParens; + +/// [ `Clone ()`] //~ERROR link +pub struct LinkToCloneWithSpacesStartBackquotesAndParens; + +/// [```Clone ()```] //~ERROR link +pub struct LinkToCloneWithMultipleBackquotesAndParens; + +/// [```Clone \(\)```] // not URL-shaped enough +pub struct LinkToCloneWithMultipleBackquotesAndEscapedParens; + +/// [ ``` Clone () ``` ] //~ERROR link +pub struct LinkToCloneWithSpacesMultipleBackquotesAndParens; + +/// [ x \] ] // not URL-shaped enough +pub struct LinkWithEscapedCloseBrace; + +/// [ x \[ ] // not URL-shaped enough +pub struct LinkWithEscapedOpenBrace; + +/// [ x \( ] // not URL-shaped enough +pub struct LinkWithEscapedCloseParen; + +/// [ x \) ] // not URL-shaped enough +pub struct LinkWithEscapedOpenParen; + +/// [ Clone \(\) ] // not URL-shaped enough +pub struct LinkWithEscapedParens; + +// [][] version // + +/// [x][ struct@Clone] //~ERROR link +pub struct XLinkToCloneWithStartSpace; + +/// [x][struct@Clone ] //~ERROR link +pub struct XLinkToCloneWithEndSpace; + +/// [x][Clone\(\)] not URL-shaped enough +pub struct XLinkToCloneWithEscapedParens; + +/// [x][`Clone`] not URL-shaped enough +pub struct XLinkToCloneWithBackquotes; + +/// [x][Clone()] //~ERROR link +pub struct XLinkToCloneWithUnescapedParens; + +/// [x][Clone ()] //~ERROR link +pub struct XLinkToCloneWithUnescapedParensAndDoubleSpace; + +/// [x][Clone [] //~ERROR unresolved link to `x` +pub struct XLinkToCloneWithUnmatchedOpenParenAndDoubleSpace; + +/// [x][Clone \[] // not URL-shaped enough +pub struct XLinkToCloneWithUnmatchedEscapedOpenParenAndDoubleSpace; + +/// [x][Clone \]] // not URL-shaped enough +pub struct XLinkToCloneWithUnmatchedEscapedCloseParenAndDoubleSpace; + +// []() version // + +/// [w]( struct@Clone) //~ERROR link +pub struct WLinkToCloneWithStartSpace; + +/// [w](struct@Clone ) //~ERROR link +pub struct WLinkToCloneWithEndSpace; + +/// [w](Clone\(\)) //~ERROR link +pub struct WLinkToCloneWithEscapedParens; + +/// [w](`Clone`) not URL-shaped enough +pub struct WLinkToCloneWithBackquotes; + +/// [w](Clone()) //~ERROR link +pub struct WLinkToCloneWithUnescapedParens; + +/// [w](Clone ()) not URL-shaped enough +pub struct WLinkToCloneWithUnescapedParensAndDoubleSpace; + +/// [w](Clone () //~ERROR unresolved link to `w` +pub struct WLinkToCloneWithUnmatchedOpenParenAndDoubleSpace; + +/// [w](Clone \() //~ERROR unresolved link to `w` +pub struct WLinkToCloneWithUnmatchedEscapedOpenParenAndDoubleSpace; + +/// [w](Clone \)) //~ERROR unresolved link to `w` +pub struct WLinkToCloneWithUnmatchedEscapedCloseParenAndDoubleSpace; + +// References + +/// The [cln][] link here is going to be unresolved, because `Clone()` gets rejected //~ERROR link +/// in Markdown for not being URL-shaped enough. +/// +/// [cln]: Clone() //~ERROR link +pub struct LinkToCloneWithParensInReference; + +/// The [cln][] link here is going to be unresolved, because `struct@Clone` gets //~ERROR link +/// rejected in Markdown for not being URL-shaped enough. +/// +/// [cln]: struct@Clone //~ERROR link +pub struct LinkToCloneWithWrongPrefix; + +/// The [cln][] link here will produce a plain text suggestion //~ERROR link +/// +/// [cln]: Clone\(\) +pub struct LinkToCloneWithEscapedParensInReference; + +/// The [cln][] link here will produce a plain text suggestion //~ERROR link +/// +/// [cln]: struct\@Clone +pub struct LinkToCloneWithEscapedAtsInReference; diff --git a/tests/rustdoc-ui/intra-doc/weird-syntax.stderr b/tests/rustdoc-ui/intra-doc/weird-syntax.stderr new file mode 100644 index 0000000000000..f50feb57fccf2 --- /dev/null +++ b/tests/rustdoc-ui/intra-doc/weird-syntax.stderr @@ -0,0 +1,272 @@ +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:18:7 + | +LL | /// [`struct@Clone`] + | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct + | +note: the lint level is defined here + --> $DIR/weird-syntax.rs:3:9 + | +LL | #![deny(rustdoc::broken_intra_doc_links)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: to link to the trait, prefix with `trait@` + | +LL | /// [`trait@Clone`] + | ~~~~~~ + +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:21:9 + | +LL | /// [```struct@Clone```] + | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct + | +help: to link to the trait, prefix with `trait@` + | +LL | /// [```trait@Clone```] + | ~~~~~~ + +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:24:11 + | +LL | /// [ ` struct@Clone ` ] + | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct + | +help: to link to the trait, prefix with `trait@` + | +LL | /// [ ` trait@Clone ` ] + | ~~~~~~ + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:27:9 + | +LL | /// [ `Clone ()` ] + | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [ `Clone ()` ] +LL + /// [ `trait@Clone ` ] + | + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:30:7 + | +LL | /// [`Clone ()` ] + | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [`Clone ()` ] +LL + /// [`trait@Clone ` ] + | + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:33:9 + | +LL | /// [ `Clone ()`] + | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [ `Clone ()`] +LL + /// [ `trait@Clone `] + | + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:36:9 + | +LL | /// [```Clone ()```] + | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [```Clone ()```] +LL + /// [```trait@Clone ```] + | + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:42:13 + | +LL | /// [ ``` Clone () ``` ] + | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [ ``` Clone () ``` ] +LL + /// [ ``` trait@Clone ``` ] + | + +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:62:10 + | +LL | /// [x][ struct@Clone] + | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct + | +help: to link to the trait, prefix with `trait@` + | +LL | /// [x][ trait@Clone] + | ~~~~~~ + +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:65:9 + | +LL | /// [x][struct@Clone ] + | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct + | +help: to link to the trait, prefix with `trait@` + | +LL | /// [x][trait@Clone ] + | ~~~~~~ + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:74:9 + | +LL | /// [x][Clone()] + | ^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [x][Clone()] +LL + /// [x][trait@Clone] + | + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:77:9 + | +LL | /// [x][Clone ()] + | ^^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [x][Clone ()] +LL + /// [x][trait@Clone ] + | + +error: unresolved link to `x` + --> $DIR/weird-syntax.rs:80:6 + | +LL | /// [x][Clone [] + | ^ no item named `x` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:91:10 + | +LL | /// [w]( struct@Clone) + | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct + | +help: to link to the trait, prefix with `trait@` + | +LL | /// [w]( trait@Clone) + | ~~~~~~ + +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:94:9 + | +LL | /// [w](struct@Clone ) + | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct + | +help: to link to the trait, prefix with `trait@` + | +LL | /// [w](trait@Clone ) + | ~~~~~~ + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:97:9 + | +LL | /// [w](Clone\(\)) + | ^^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [w](Clone\(\)) +LL + /// [w](trait@Clone) + | + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:103:9 + | +LL | /// [w](Clone()) + | ^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | +help: to link to the trait, prefix with `trait@` + | +LL - /// [w](Clone()) +LL + /// [w](trait@Clone) + | + +error: unresolved link to `w` + --> $DIR/weird-syntax.rs:109:6 + | +LL | /// [w](Clone () + | ^ no item named `w` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: unresolved link to `w` + --> $DIR/weird-syntax.rs:112:6 + | +LL | /// [w](Clone \() + | ^ no item named `w` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: unresolved link to `w` + --> $DIR/weird-syntax.rs:115:6 + | +LL | /// [w](Clone \)) + | ^ no item named `w` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: unresolved link to `cln` + --> $DIR/weird-syntax.rs:120:10 + | +LL | /// The [cln][] link here is going to be unresolved, because `Clone()` gets rejected + | ^^^ no item named `cln` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: unresolved link to `cln` + --> $DIR/weird-syntax.rs:123:6 + | +LL | /// [cln]: Clone() + | ^^^ no item named `cln` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: unresolved link to `cln` + --> $DIR/weird-syntax.rs:126:10 + | +LL | /// The [cln][] link here is going to be unresolved, because `struct@Clone` gets + | ^^^ no item named `cln` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: unresolved link to `cln` + --> $DIR/weird-syntax.rs:129:6 + | +LL | /// [cln]: struct@Clone + | ^^^ no item named `cln` in scope + | + = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` + +error: unresolved link to `Clone` + --> $DIR/weird-syntax.rs:132:9 + | +LL | /// The [cln][] link here will produce a plain text suggestion + | ^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace + | + = help: to link to the trait, prefix with `trait@`: trait@Clone + +error: incompatible link kind for `Clone` + --> $DIR/weird-syntax.rs:137:9 + | +LL | /// The [cln][] link here will produce a plain text suggestion + | ^^^^^ this link resolved to a trait, which is not a struct + | + = help: to link to the trait, prefix with `trait@`: trait@Clone + +error: aborting due to 26 previous errors + From a25aee19575d59709e51b5c214fe49af7090e69d Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Thu, 25 May 2023 17:48:19 +0000 Subject: [PATCH 12/14] Perform MIR type ops locally in new solver --- .../traits/query/type_op/ascribe_user_type.rs | 8 ++++++ .../src/traits/query/type_op/eq.rs | 10 ++++++++ .../query/type_op/implied_outlives_bounds.rs | 8 ++++++ .../src/traits/query/type_op/mod.rs | 25 ++++++++++++++++++- .../src/traits/query/type_op/normalize.rs | 10 ++++++++ .../src/traits/query/type_op/outlives.rs | 8 ++++++ .../traits/query/type_op/prove_predicate.rs | 16 ++++++++++++ .../src/traits/query/type_op/subtype.rs | 10 ++++++++ 8 files changed, 94 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs index c61f5454ec52e..a2cfdeefd6f30 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs @@ -1,4 +1,5 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; +use crate::traits::ObligationCtxt; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{ParamEnvAnd, TyCtxt}; @@ -20,4 +21,11 @@ impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> { ) -> Result, NoSolution> { tcx.type_op_ascribe_user_type(canonicalized) } + + fn perform_locally_in_new_solver( + _ocx: &ObligationCtxt<'_, 'tcx>, + _key: ParamEnvAnd<'tcx, Self>, + ) -> Result { + todo!() + } } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs index 40f8ecfd4ce10..f65893088066e 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs @@ -1,5 +1,7 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; +use crate::traits::ObligationCtxt; use rustc_middle::traits::query::NoSolution; +use rustc_middle::traits::ObligationCause; use rustc_middle::ty::{ParamEnvAnd, TyCtxt}; pub use rustc_middle::traits::query::type_op::Eq; @@ -20,4 +22,12 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> { ) -> Result, NoSolution> { tcx.type_op_eq(canonicalized) } + + fn perform_locally_in_new_solver( + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, + ) -> Result { + ocx.eq(&ObligationCause::dummy(), key.param_env, key.value.a, key.value.b)?; + Ok(()) + } } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs index 26f0d554d3508..9054bafc4a67a 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs @@ -1,4 +1,5 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; +use crate::traits::ObligationCtxt; use rustc_infer::traits::query::OutlivesBound; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt}; @@ -39,4 +40,11 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> { tcx.implied_outlives_bounds(canonicalized) } + + fn perform_locally_in_new_solver( + _ocx: &ObligationCtxt<'_, 'tcx>, + _key: ParamEnvAnd<'tcx, Self>, + ) -> Result { + todo!() + } } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs index 6423265984804..642fdec2d9ae3 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs @@ -2,7 +2,7 @@ use crate::infer::canonical::{ Canonical, CanonicalQueryResponse, OriginalQueryValues, QueryRegionConstraints, }; use crate::infer::{InferCtxt, InferOk}; -use crate::traits::ObligationCause; +use crate::traits::{ObligationCause, ObligationCtxt}; use rustc_errors::ErrorGuaranteed; use rustc_infer::infer::canonical::Certainty; use rustc_infer::traits::PredicateObligations; @@ -23,6 +23,8 @@ pub mod subtype; pub use rustc_middle::traits::query::type_op::*; +use self::custom::scrape_region_constraints; + /// "Type ops" are used in NLL to perform some particular action and /// extract out the resulting region constraints (or an error if it /// cannot be completed). @@ -81,6 +83,17 @@ pub trait QueryTypeOp<'tcx>: fmt::Debug + Copy + TypeFoldable> + 't canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>, ) -> Result, NoSolution>; + /// In the new trait solver, we already do caching in the solver itself, + /// so there's no need to canonicalize and cache via the query system. + /// Additionally, even if we were to canonicalize, we'd still need to + /// make sure to feed it predefined opaque types and the defining anchor + /// and that would require duplicating all of the tcx queries. Instead, + /// just perform these ops locally. + fn perform_locally_in_new_solver( + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, + ) -> Result; + fn fully_perform_into( query_key: ParamEnvAnd<'tcx, Self>, infcx: &InferCtxt<'tcx>, @@ -133,6 +146,16 @@ where infcx: &InferCtxt<'tcx>, span: Span, ) -> Result, ErrorGuaranteed> { + if infcx.tcx.trait_solver_next() { + return Ok(scrape_region_constraints( + infcx, + |ocx| QueryTypeOp::perform_locally_in_new_solver(ocx, self), + "query type op", + span, + )? + .0); + } + let mut region_constraints = QueryRegionConstraints::default(); let (output, error_info, mut obligations, _) = Q::fully_perform_into(self, infcx, &mut region_constraints).map_err(|_| { diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs index 776c74fdfae4b..57ca14aa492ff 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs @@ -1,5 +1,7 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; +use crate::traits::ObligationCtxt; use rustc_middle::traits::query::NoSolution; +use rustc_middle::traits::ObligationCause; use rustc_middle::ty::fold::TypeFoldable; use rustc_middle::ty::{self, Lift, ParamEnvAnd, Ty, TyCtxt, TypeVisitableExt}; use std::fmt; @@ -22,6 +24,14 @@ where ) -> Result, NoSolution> { T::type_op_method(tcx, canonicalized) } + + fn perform_locally_in_new_solver( + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, + ) -> Result { + // FIXME(-Ztrait-solver=next): shouldn't be using old normalizer + Ok(ocx.normalize(&ObligationCause::dummy(), key.param_env, key.value.value)) + } } pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable> + Lift<'tcx> + Copy { diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs index 7ce09bbdb7af3..8b3a20a88f03e 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs @@ -1,5 +1,6 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; use crate::traits::query::dropck_outlives::{trivial_dropck_outlives, DropckOutlivesResult}; +use crate::traits::ObligationCtxt; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{ParamEnvAnd, Ty, TyCtxt}; @@ -48,4 +49,11 @@ impl<'tcx> super::QueryTypeOp<'tcx> for DropckOutlives<'tcx> { tcx.dropck_outlives(canonicalized) } + + fn perform_locally_in_new_solver( + _ocx: &ObligationCtxt<'_, 'tcx>, + _key: ParamEnvAnd<'tcx, Self>, + ) -> Result { + todo!() + } } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs index 7c02f363960d7..47850bc330dab 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs @@ -1,5 +1,8 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; +use crate::traits::ObligationCtxt; +use rustc_infer::traits::Obligation; use rustc_middle::traits::query::NoSolution; +use rustc_middle::traits::ObligationCause; use rustc_middle::ty::{self, ParamEnvAnd, TyCtxt}; pub use rustc_middle::traits::query::type_op::ProvePredicate; @@ -36,4 +39,17 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> { ) -> Result, NoSolution> { tcx.type_op_prove_predicate(canonicalized) } + + fn perform_locally_in_new_solver( + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, + ) -> Result { + ocx.register_obligation(Obligation::new( + ocx.infcx.tcx, + ObligationCause::dummy(), + key.param_env, + key.value.predicate, + )); + Ok(()) + } } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs index 2f2b931afcff4..10976d5cd7162 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs @@ -1,5 +1,7 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; +use crate::traits::ObligationCtxt; use rustc_middle::traits::query::NoSolution; +use rustc_middle::traits::ObligationCause; use rustc_middle::ty::{ParamEnvAnd, TyCtxt}; pub use rustc_middle::traits::query::type_op::Subtype; @@ -17,4 +19,12 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> { ) -> Result, NoSolution> { tcx.type_op_subtype(canonicalized) } + + fn perform_locally_in_new_solver( + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, + ) -> Result { + ocx.sub(&ObligationCause::dummy(), key.param_env, key.value.sub, key.value.sup)?; + Ok(()) + } } From d7a2fdd4dba976fddfebe4b3be95a327bae39423 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Thu, 25 May 2023 18:25:44 +0000 Subject: [PATCH 13/14] Uplift complex type ops back into typeck so we can call them locally --- .../src/type_check/liveness/trace.rs | 2 +- .../src/traits/query/dropck_outlives.rs | 269 +++++++++++++++++- .../traits/query/type_op/ascribe_user_type.rs | 117 +++++++- .../query/type_op/implied_outlives_bounds.rs | 177 +++++++++++- .../src/traits/query/type_op/outlives.rs | 12 +- compiler/rustc_traits/src/dropck_outlives.rs | 266 +---------------- .../src/implied_outlives_bounds.rs | 169 +---------- compiler/rustc_traits/src/lib.rs | 3 +- compiler/rustc_traits/src/type_op.rs | 117 +------- 9 files changed, 574 insertions(+), 558 deletions(-) diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index fd94ac86d7dc2..eb02604b9d925 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -3,9 +3,9 @@ use rustc_index::bit_set::HybridBitSet; use rustc_index::interval::IntervalSet; use rustc_infer::infer::canonical::QueryRegionConstraints; use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location}; +use rustc_middle::traits::query::DropckOutlivesResult; use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt}; use rustc_span::DUMMY_SP; -use rustc_trait_selection::traits::query::dropck_outlives::DropckOutlivesResult; use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives; use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput}; use std::rc::Rc; diff --git a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs index 455b53bfb7d8f..4e4172e7f41ec 100644 --- a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs +++ b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs @@ -1,6 +1,11 @@ -use rustc_middle::ty::{self, Ty, TyCtxt}; +use crate::traits::query::normalize::QueryNormalizeExt; +use crate::traits::query::NoSolution; +use crate::traits::{Normalized, ObligationCause, ObligationCtxt}; -pub use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult}; +use rustc_data_structures::fx::FxHashSet; +use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult}; +use rustc_middle::ty::{self, EarlyBinder, ParamEnvAnd, Ty, TyCtxt}; +use rustc_span::source_map::{Span, DUMMY_SP}; /// This returns true if the type `ty` is "trivial" for /// dropck-outlives -- that is, if it doesn't require any types to @@ -71,3 +76,263 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { | ty::Generator(..) => false, } } + +pub fn compute_dropck_outlives_inner<'tcx>( + ocx: &ObligationCtxt<'_, 'tcx>, + goal: ParamEnvAnd<'tcx, Ty<'tcx>>, +) -> Result, NoSolution> { + let tcx = ocx.infcx.tcx; + let ParamEnvAnd { param_env, value: for_ty } = goal; + + let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] }; + + // A stack of types left to process. Each round, we pop + // something from the stack and invoke + // `dtorck_constraint_for_ty_inner`. This may produce new types that + // have to be pushed on the stack. This continues until we have explored + // all the reachable types from the type `for_ty`. + // + // Example: Imagine that we have the following code: + // + // ```rust + // struct A { + // value: B, + // children: Vec, + // } + // + // struct B { + // value: u32 + // } + // + // fn f() { + // let a: A = ...; + // .. + // } // here, `a` is dropped + // ``` + // + // at the point where `a` is dropped, we need to figure out + // which types inside of `a` contain region data that may be + // accessed by any destructors in `a`. We begin by pushing `A` + // onto the stack, as that is the type of `a`. We will then + // invoke `dtorck_constraint_for_ty_inner` which will expand `A` + // into the types of its fields `(B, Vec)`. These will get + // pushed onto the stack. Eventually, expanding `Vec` will + // lead to us trying to push `A` a second time -- to prevent + // infinite recursion, we notice that `A` was already pushed + // once and stop. + let mut ty_stack = vec![(for_ty, 0)]; + + // Set used to detect infinite recursion. + let mut ty_set = FxHashSet::default(); + + let cause = ObligationCause::dummy(); + let mut constraints = DropckConstraint::empty(); + while let Some((ty, depth)) = ty_stack.pop() { + debug!( + "{} kinds, {} overflows, {} ty_stack", + result.kinds.len(), + result.overflows.len(), + ty_stack.len() + ); + dtorck_constraint_for_ty_inner(tcx, DUMMY_SP, for_ty, depth, ty, &mut constraints)?; + + // "outlives" represent types/regions that may be touched + // by a destructor. + result.kinds.append(&mut constraints.outlives); + result.overflows.append(&mut constraints.overflows); + + // If we have even one overflow, we should stop trying to evaluate further -- + // chances are, the subsequent overflows for this evaluation won't provide useful + // information and will just decrease the speed at which we can emit these errors + // (since we'll be printing for just that much longer for the often enormous types + // that result here). + if !result.overflows.is_empty() { + break; + } + + // dtorck types are "types that will get dropped but which + // do not themselves define a destructor", more or less. We have + // to push them onto the stack to be expanded. + for ty in constraints.dtorck_types.drain(..) { + let Normalized { value: ty, obligations } = + ocx.infcx.at(&cause, param_env).query_normalize(ty)?; + ocx.register_obligations(obligations); + + debug!("dropck_outlives: ty from dtorck_types = {:?}", ty); + + match ty.kind() { + // All parameters live for the duration of the + // function. + ty::Param(..) => {} + + // A projection that we couldn't resolve - it + // might have a destructor. + ty::Alias(..) => { + result.kinds.push(ty.into()); + } + + _ => { + if ty_set.insert(ty) { + ty_stack.push((ty, depth + 1)); + } + } + } + } + } + + debug!("dropck_outlives: result = {:#?}", result); + Ok(result) +} + +/// Returns a set of constraints that needs to be satisfied in +/// order for `ty` to be valid for destruction. +pub fn dtorck_constraint_for_ty_inner<'tcx>( + tcx: TyCtxt<'tcx>, + span: Span, + for_ty: Ty<'tcx>, + depth: usize, + ty: Ty<'tcx>, + constraints: &mut DropckConstraint<'tcx>, +) -> Result<(), NoSolution> { + debug!("dtorck_constraint_for_ty_inner({:?}, {:?}, {:?}, {:?})", span, for_ty, depth, ty); + + if !tcx.recursion_limit().value_within_limit(depth) { + constraints.overflows.push(ty); + return Ok(()); + } + + if trivial_dropck_outlives(tcx, ty) { + return Ok(()); + } + + match ty.kind() { + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Str + | ty::Never + | ty::Foreign(..) + | ty::RawPtr(..) + | ty::Ref(..) + | ty::FnDef(..) + | ty::FnPtr(_) + | ty::GeneratorWitness(..) + | ty::GeneratorWitnessMIR(..) => { + // these types never have a destructor + } + + ty::Array(ety, _) | ty::Slice(ety) => { + // single-element containers, behave like their element + rustc_data_structures::stack::ensure_sufficient_stack(|| { + dtorck_constraint_for_ty_inner(tcx, span, for_ty, depth + 1, *ety, constraints) + })?; + } + + ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| { + for ty in tys.iter() { + dtorck_constraint_for_ty_inner(tcx, span, for_ty, depth + 1, ty, constraints)?; + } + Ok::<_, NoSolution>(()) + })?, + + ty::Closure(_, substs) => { + if !substs.as_closure().is_valid() { + // By the time this code runs, all type variables ought to + // be fully resolved. + + tcx.sess.delay_span_bug( + span, + format!("upvar_tys for closure not found. Expected capture information for closure {ty}",), + ); + return Err(NoSolution); + } + + rustc_data_structures::stack::ensure_sufficient_stack(|| { + for ty in substs.as_closure().upvar_tys() { + dtorck_constraint_for_ty_inner(tcx, span, for_ty, depth + 1, ty, constraints)?; + } + Ok::<_, NoSolution>(()) + })? + } + + ty::Generator(_, substs, _movability) => { + // rust-lang/rust#49918: types can be constructed, stored + // in the interior, and sit idle when generator yields + // (and is subsequently dropped). + // + // It would be nice to descend into interior of a + // generator to determine what effects dropping it might + // have (by looking at any drop effects associated with + // its interior). + // + // However, the interior's representation uses things like + // GeneratorWitness that explicitly assume they are not + // traversed in such a manner. So instead, we will + // simplify things for now by treating all generators as + // if they were like trait objects, where its upvars must + // all be alive for the generator's (potential) + // destructor. + // + // In particular, skipping over `_interior` is safe + // because any side-effects from dropping `_interior` can + // only take place through references with lifetimes + // derived from lifetimes attached to the upvars and resume + // argument, and we *do* incorporate those here. + + if !substs.as_generator().is_valid() { + // By the time this code runs, all type variables ought to + // be fully resolved. + tcx.sess.delay_span_bug( + span, + format!("upvar_tys for generator not found. Expected capture information for generator {ty}",), + ); + return Err(NoSolution); + } + + constraints.outlives.extend( + substs + .as_generator() + .upvar_tys() + .map(|t| -> ty::subst::GenericArg<'tcx> { t.into() }), + ); + constraints.outlives.push(substs.as_generator().resume_ty().into()); + } + + ty::Adt(def, substs) => { + let DropckConstraint { dtorck_types, outlives, overflows } = + tcx.at(span).adt_dtorck_constraint(def.did())?; + // FIXME: we can try to recursively `dtorck_constraint_on_ty` + // there, but that needs some way to handle cycles. + constraints + .dtorck_types + .extend(dtorck_types.iter().map(|t| EarlyBinder(*t).subst(tcx, substs))); + constraints + .outlives + .extend(outlives.iter().map(|t| EarlyBinder(*t).subst(tcx, substs))); + constraints + .overflows + .extend(overflows.iter().map(|t| EarlyBinder(*t).subst(tcx, substs))); + } + + // Objects must be alive in order for their destructor + // to be called. + ty::Dynamic(..) => { + constraints.outlives.push(ty.into()); + } + + // Types that can't be resolved. Pass them forward. + ty::Alias(..) | ty::Param(..) => { + constraints.dtorck_types.push(ty); + } + + ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => { + // By the time this code runs, all type variables ought to + // be fully resolved. + return Err(NoSolution); + } + } + + Ok(()) +} diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs index a2cfdeefd6f30..01d7a1e7913b5 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs @@ -1,9 +1,13 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; use crate::traits::ObligationCtxt; +use rustc_hir::def_id::{DefId, CRATE_DEF_ID}; +use rustc_infer::traits::Obligation; use rustc_middle::traits::query::NoSolution; -use rustc_middle::ty::{ParamEnvAnd, TyCtxt}; +use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; +use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt, UserSelfTy, UserSubsts, UserType}; pub use rustc_middle::traits::query::type_op::AscribeUserType; +use rustc_span::{Span, DUMMY_SP}; impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> { type QueryResponse = (); @@ -23,9 +27,114 @@ impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> { } fn perform_locally_in_new_solver( - _ocx: &ObligationCtxt<'_, 'tcx>, - _key: ParamEnvAnd<'tcx, Self>, + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, ) -> Result { - todo!() + type_op_ascribe_user_type_with_span(ocx, key, None) } } + +/// The core of the `type_op_ascribe_user_type` query: for diagnostics purposes in NLL HRTB errors, +/// this query can be re-run to better track the span of the obligation cause, and improve the error +/// message. Do not call directly unless you're in that very specific context. +pub fn type_op_ascribe_user_type_with_span<'tcx>( + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, AscribeUserType<'tcx>>, + span: Option, +) -> Result<(), NoSolution> { + let (param_env, AscribeUserType { mir_ty, user_ty }) = key.into_parts(); + debug!("type_op_ascribe_user_type: mir_ty={:?} user_ty={:?}", mir_ty, user_ty); + let span = span.unwrap_or(DUMMY_SP); + match user_ty { + UserType::Ty(user_ty) => relate_mir_and_user_ty(ocx, param_env, span, mir_ty, user_ty)?, + UserType::TypeOf(def_id, user_substs) => { + relate_mir_and_user_substs(ocx, param_env, span, mir_ty, def_id, user_substs)? + } + }; + Ok(()) +} + +#[instrument(level = "debug", skip(ocx, param_env, span))] +fn relate_mir_and_user_ty<'tcx>( + ocx: &ObligationCtxt<'_, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + span: Span, + mir_ty: Ty<'tcx>, + user_ty: Ty<'tcx>, +) -> Result<(), NoSolution> { + let cause = ObligationCause::dummy_with_span(span); + let user_ty = ocx.normalize(&cause, param_env, user_ty); + ocx.eq(&cause, param_env, mir_ty, user_ty)?; + + // FIXME(#104764): We should check well-formedness before normalization. + let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(user_ty.into())); + ocx.register_obligation(Obligation::new(ocx.infcx.tcx, cause, param_env, predicate)); + Ok(()) +} + +#[instrument(level = "debug", skip(ocx, param_env, span))] +fn relate_mir_and_user_substs<'tcx>( + ocx: &ObligationCtxt<'_, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + span: Span, + mir_ty: Ty<'tcx>, + def_id: DefId, + user_substs: UserSubsts<'tcx>, +) -> Result<(), NoSolution> { + let param_env = param_env.without_const(); + let UserSubsts { user_self_ty, substs } = user_substs; + let tcx = ocx.infcx.tcx; + let cause = ObligationCause::dummy_with_span(span); + + let ty = tcx.type_of(def_id).subst(tcx, substs); + let ty = ocx.normalize(&cause, param_env, ty); + debug!("relate_type_and_user_type: ty of def-id is {:?}", ty); + + ocx.eq(&cause, param_env, mir_ty, ty)?; + + // Prove the predicates coming along with `def_id`. + // + // Also, normalize the `instantiated_predicates` + // because otherwise we wind up with duplicate "type + // outlives" error messages. + let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, substs); + + debug!(?instantiated_predicates); + for (instantiated_predicate, predicate_span) in instantiated_predicates { + let span = if span == DUMMY_SP { predicate_span } else { span }; + let cause = ObligationCause::new( + span, + CRATE_DEF_ID, + ObligationCauseCode::AscribeUserTypeProvePredicate(predicate_span), + ); + let instantiated_predicate = + ocx.normalize(&cause.clone(), param_env, instantiated_predicate); + + ocx.register_obligation(Obligation::new(tcx, cause, param_env, instantiated_predicate)); + } + + if let Some(UserSelfTy { impl_def_id, self_ty }) = user_self_ty { + let self_ty = ocx.normalize(&cause, param_env, self_ty); + let impl_self_ty = tcx.type_of(impl_def_id).subst(tcx, substs); + let impl_self_ty = ocx.normalize(&cause, param_env, impl_self_ty); + + ocx.eq(&cause, param_env, self_ty, impl_self_ty)?; + let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(impl_self_ty.into())); + ocx.register_obligation(Obligation::new(tcx, cause.clone(), param_env, predicate)); + } + + // In addition to proving the predicates, we have to + // prove that `ty` is well-formed -- this is because + // the WF of `ty` is predicated on the substs being + // well-formed, and we haven't proven *that*. We don't + // want to prove the WF of types from `substs` directly because they + // haven't been normalized. + // + // FIXME(nmatsakis): Well, perhaps we should normalize + // them? This would only be relevant if some input + // type were ill-formed but did not appear in `ty`, + // which...could happen with normalization... + let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into())); + ocx.register_obligation(Obligation::new(tcx, cause, param_env, predicate)); + Ok(()) +} diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs index 9054bafc4a67a..9989fc9c479a3 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs @@ -1,8 +1,15 @@ -use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; +use crate::traits::query::NoSolution; +use crate::traits::wf; use crate::traits::ObligationCtxt; + +use rustc_infer::infer::canonical::Canonical; +use rustc_infer::infer::outlives::components::{push_outlives_components, Component}; use rustc_infer::traits::query::OutlivesBound; -use rustc_middle::traits::query::NoSolution; -use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt}; +use rustc_middle::infer::canonical::CanonicalQueryResponse; +use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt, TypeVisitableExt}; +use rustc_span::def_id::CRATE_DEF_ID; +use rustc_span::source_map::DUMMY_SP; +use smallvec::{smallvec, SmallVec}; #[derive(Copy, Clone, Debug, HashStable, TypeFoldable, TypeVisitable, Lift)] pub struct ImpliedOutlivesBounds<'tcx> { @@ -42,9 +49,167 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> { } fn perform_locally_in_new_solver( - _ocx: &ObligationCtxt<'_, 'tcx>, - _key: ParamEnvAnd<'tcx, Self>, + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, ) -> Result { - todo!() + compute_implied_outlives_bounds_inner(ocx, key.param_env, key.value.ty) + } +} + +pub fn compute_implied_outlives_bounds_inner<'tcx>( + ocx: &ObligationCtxt<'_, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + ty: Ty<'tcx>, +) -> Result>, NoSolution> { + let tcx = ocx.infcx.tcx; + + // Sometimes when we ask what it takes for T: WF, we get back that + // U: WF is required; in that case, we push U onto this stack and + // process it next. Because the resulting predicates aren't always + // guaranteed to be a subset of the original type, so we need to store the + // WF args we've computed in a set. + let mut checked_wf_args = rustc_data_structures::fx::FxHashSet::default(); + let mut wf_args = vec![ty.into()]; + + let mut outlives_bounds: Vec, ty::Region<'tcx>>> = + vec![]; + + while let Some(arg) = wf_args.pop() { + if !checked_wf_args.insert(arg) { + continue; + } + + // Compute the obligations for `arg` to be well-formed. If `arg` is + // an unresolved inference variable, just substituted an empty set + // -- because the return type here is going to be things we *add* + // to the environment, it's always ok for this set to be smaller + // than the ultimate set. (Note: normally there won't be + // unresolved inference variables here anyway, but there might be + // during typeck under some circumstances.) + // + // FIXME(@lcnr): It's not really "always fine", having fewer implied + // bounds can be backward incompatible, e.g. #101951 was caused by + // us not dealing with inference vars in `TypeOutlives` predicates. + let obligations = wf::obligations(ocx.infcx, param_env, CRATE_DEF_ID, 0, arg, DUMMY_SP) + .unwrap_or_default(); + + for obligation in obligations { + debug!(?obligation); + assert!(!obligation.has_escaping_bound_vars()); + + // While these predicates should all be implied by other parts of + // the program, they are still relevant as they may constrain + // inference variables, which is necessary to add the correct + // implied bounds in some cases, mostly when dealing with projections. + // + // Another important point here: we only register `Projection` + // predicates, since otherwise we might register outlives + // predicates containing inference variables, and we don't + // learn anything new from those. + if obligation.predicate.has_non_region_infer() { + match obligation.predicate.kind().skip_binder() { + ty::PredicateKind::Clause(ty::Clause::Projection(..)) + | ty::PredicateKind::AliasRelate(..) => { + ocx.register_obligation(obligation.clone()); + } + _ => {} + } + } + + let pred = match obligation.predicate.kind().no_bound_vars() { + None => continue, + Some(pred) => pred, + }; + match pred { + ty::PredicateKind::Clause(ty::Clause::Trait(..)) + // FIXME(const_generics): Make sure that `<'a, 'b, const N: &'a &'b u32>` is sound + // if we ever support that + | ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..)) + | ty::PredicateKind::Subtype(..) + | ty::PredicateKind::Coerce(..) + | ty::PredicateKind::Clause(ty::Clause::Projection(..)) + | ty::PredicateKind::ClosureKind(..) + | ty::PredicateKind::ObjectSafe(..) + | ty::PredicateKind::ConstEvaluatable(..) + | ty::PredicateKind::ConstEquate(..) + | ty::PredicateKind::Ambiguous + | ty::PredicateKind::AliasRelate(..) + | ty::PredicateKind::TypeWellFormedFromEnv(..) => {} + + // We need to search through *all* WellFormed predicates + ty::PredicateKind::WellFormed(arg) => { + wf_args.push(arg); + } + + // We need to register region relationships + ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate( + r_a, + r_b, + ))) => outlives_bounds.push(ty::OutlivesPredicate(r_a.into(), r_b)), + + ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate( + ty_a, + r_b, + ))) => outlives_bounds.push(ty::OutlivesPredicate(ty_a.into(), r_b)), + } + } + } + + // This call to `select_all_or_error` is necessary to constrain inference variables, which we + // use further down when computing the implied bounds. + match ocx.select_all_or_error().as_slice() { + [] => (), + _ => return Err(NoSolution), } + + // We lazily compute the outlives components as + // `select_all_or_error` constrains inference variables. + let implied_bounds = outlives_bounds + .into_iter() + .flat_map(|ty::OutlivesPredicate(a, r_b)| match a.unpack() { + ty::GenericArgKind::Lifetime(r_a) => vec![OutlivesBound::RegionSubRegion(r_b, r_a)], + ty::GenericArgKind::Type(ty_a) => { + let ty_a = ocx.infcx.resolve_vars_if_possible(ty_a); + let mut components = smallvec![]; + push_outlives_components(tcx, ty_a, &mut components); + implied_bounds_from_components(r_b, components) + } + ty::GenericArgKind::Const(_) => unreachable!(), + }) + .collect(); + + Ok(implied_bounds) +} + +/// When we have an implied bound that `T: 'a`, we can further break +/// this down to determine what relationships would have to hold for +/// `T: 'a` to hold. We get to assume that the caller has validated +/// those relationships. +fn implied_bounds_from_components<'tcx>( + sub_region: ty::Region<'tcx>, + sup_components: SmallVec<[Component<'tcx>; 4]>, +) -> Vec> { + sup_components + .into_iter() + .filter_map(|component| { + match component { + Component::Region(r) => Some(OutlivesBound::RegionSubRegion(sub_region, r)), + Component::Param(p) => Some(OutlivesBound::RegionSubParam(sub_region, p)), + Component::Alias(p) => Some(OutlivesBound::RegionSubAlias(sub_region, p)), + Component::EscapingAlias(_) => + // If the projection has escaping regions, don't + // try to infer any implied bounds even for its + // free components. This is conservative, because + // the caller will still have to prove that those + // free components outlive `sub_region`. But the + // idea is that the WAY that the caller proves + // that may change in the future and we want to + // give ourselves room to get smarter here. + { + None + } + Component::UnresolvedInferenceVariable(..) => None, + } + }) + .collect() } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs index 8b3a20a88f03e..9889426337476 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs @@ -1,7 +1,9 @@ use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; -use crate::traits::query::dropck_outlives::{trivial_dropck_outlives, DropckOutlivesResult}; +use crate::traits::query::dropck_outlives::{ + compute_dropck_outlives_inner, trivial_dropck_outlives, +}; use crate::traits::ObligationCtxt; -use rustc_middle::traits::query::NoSolution; +use rustc_middle::traits::query::{DropckOutlivesResult, NoSolution}; use rustc_middle::ty::{ParamEnvAnd, Ty, TyCtxt}; #[derive(Copy, Clone, Debug, HashStable, TypeFoldable, TypeVisitable, Lift)] @@ -51,9 +53,9 @@ impl<'tcx> super::QueryTypeOp<'tcx> for DropckOutlives<'tcx> { } fn perform_locally_in_new_solver( - _ocx: &ObligationCtxt<'_, 'tcx>, - _key: ParamEnvAnd<'tcx, Self>, + ocx: &ObligationCtxt<'_, 'tcx>, + key: ParamEnvAnd<'tcx, Self>, ) -> Result { - todo!() + compute_dropck_outlives_inner(ocx, key.param_env.and(key.value.dropped_ty)) } } diff --git a/compiler/rustc_traits/src/dropck_outlives.rs b/compiler/rustc_traits/src/dropck_outlives.rs index 83f6c7d07fe78..f35c14eeac801 100644 --- a/compiler/rustc_traits/src/dropck_outlives.rs +++ b/compiler/rustc_traits/src/dropck_outlives.rs @@ -3,17 +3,14 @@ use rustc_hir::def_id::DefId; use rustc_infer::infer::canonical::{Canonical, QueryResponse}; use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::query::Providers; +use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult}; use rustc_middle::ty::InternalSubsts; -use rustc_middle::ty::{self, EarlyBinder, ParamEnvAnd, Ty, TyCtxt}; -use rustc_span::source_map::{Span, DUMMY_SP}; +use rustc_middle::ty::TyCtxt; use rustc_trait_selection::infer::InferCtxtBuilderExt; -use rustc_trait_selection::traits::query::dropck_outlives::trivial_dropck_outlives; use rustc_trait_selection::traits::query::dropck_outlives::{ - DropckConstraint, DropckOutlivesResult, + compute_dropck_outlives_inner, dtorck_constraint_for_ty_inner, }; -use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt; use rustc_trait_selection::traits::query::{CanonicalTyGoal, NoSolution}; -use rustc_trait_selection::traits::{Normalized, ObligationCause}; pub(crate) fn provide(p: &mut Providers) { *p = Providers { dropck_outlives, adt_dtorck_constraint, ..*p }; @@ -26,263 +23,10 @@ fn dropck_outlives<'tcx>( debug!("dropck_outlives(goal={:#?})", canonical_goal); tcx.infer_ctxt().enter_canonical_trait_query(&canonical_goal, |ocx, goal| { - let tcx = ocx.infcx.tcx; - let ParamEnvAnd { param_env, value: for_ty } = goal; - - let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] }; - - // A stack of types left to process. Each round, we pop - // something from the stack and invoke - // `dtorck_constraint_for_ty`. This may produce new types that - // have to be pushed on the stack. This continues until we have explored - // all the reachable types from the type `for_ty`. - // - // Example: Imagine that we have the following code: - // - // ```rust - // struct A { - // value: B, - // children: Vec, - // } - // - // struct B { - // value: u32 - // } - // - // fn f() { - // let a: A = ...; - // .. - // } // here, `a` is dropped - // ``` - // - // at the point where `a` is dropped, we need to figure out - // which types inside of `a` contain region data that may be - // accessed by any destructors in `a`. We begin by pushing `A` - // onto the stack, as that is the type of `a`. We will then - // invoke `dtorck_constraint_for_ty` which will expand `A` - // into the types of its fields `(B, Vec)`. These will get - // pushed onto the stack. Eventually, expanding `Vec` will - // lead to us trying to push `A` a second time -- to prevent - // infinite recursion, we notice that `A` was already pushed - // once and stop. - let mut ty_stack = vec![(for_ty, 0)]; - - // Set used to detect infinite recursion. - let mut ty_set = FxHashSet::default(); - - let cause = ObligationCause::dummy(); - let mut constraints = DropckConstraint::empty(); - while let Some((ty, depth)) = ty_stack.pop() { - debug!( - "{} kinds, {} overflows, {} ty_stack", - result.kinds.len(), - result.overflows.len(), - ty_stack.len() - ); - dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty, &mut constraints)?; - - // "outlives" represent types/regions that may be touched - // by a destructor. - result.kinds.append(&mut constraints.outlives); - result.overflows.append(&mut constraints.overflows); - - // If we have even one overflow, we should stop trying to evaluate further -- - // chances are, the subsequent overflows for this evaluation won't provide useful - // information and will just decrease the speed at which we can emit these errors - // (since we'll be printing for just that much longer for the often enormous types - // that result here). - if !result.overflows.is_empty() { - break; - } - - // dtorck types are "types that will get dropped but which - // do not themselves define a destructor", more or less. We have - // to push them onto the stack to be expanded. - for ty in constraints.dtorck_types.drain(..) { - let Normalized { value: ty, obligations } = - ocx.infcx.at(&cause, param_env).query_normalize(ty)?; - ocx.register_obligations(obligations); - - debug!("dropck_outlives: ty from dtorck_types = {:?}", ty); - - match ty.kind() { - // All parameters live for the duration of the - // function. - ty::Param(..) => {} - - // A projection that we couldn't resolve - it - // might have a destructor. - ty::Alias(..) => { - result.kinds.push(ty.into()); - } - - _ => { - if ty_set.insert(ty) { - ty_stack.push((ty, depth + 1)); - } - } - } - } - } - - debug!("dropck_outlives: result = {:#?}", result); - Ok(result) + compute_dropck_outlives_inner(ocx, goal) }) } -/// Returns a set of constraints that needs to be satisfied in -/// order for `ty` to be valid for destruction. -fn dtorck_constraint_for_ty<'tcx>( - tcx: TyCtxt<'tcx>, - span: Span, - for_ty: Ty<'tcx>, - depth: usize, - ty: Ty<'tcx>, - constraints: &mut DropckConstraint<'tcx>, -) -> Result<(), NoSolution> { - debug!("dtorck_constraint_for_ty({:?}, {:?}, {:?}, {:?})", span, for_ty, depth, ty); - - if !tcx.recursion_limit().value_within_limit(depth) { - constraints.overflows.push(ty); - return Ok(()); - } - - if trivial_dropck_outlives(tcx, ty) { - return Ok(()); - } - - match ty.kind() { - ty::Bool - | ty::Char - | ty::Int(_) - | ty::Uint(_) - | ty::Float(_) - | ty::Str - | ty::Never - | ty::Foreign(..) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(_) - | ty::GeneratorWitness(..) - | ty::GeneratorWitnessMIR(..) => { - // these types never have a destructor - } - - ty::Array(ety, _) | ty::Slice(ety) => { - // single-element containers, behave like their element - rustc_data_structures::stack::ensure_sufficient_stack(|| { - dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, *ety, constraints) - })?; - } - - ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| { - for ty in tys.iter() { - dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?; - } - Ok::<_, NoSolution>(()) - })?, - - ty::Closure(_, substs) => { - if !substs.as_closure().is_valid() { - // By the time this code runs, all type variables ought to - // be fully resolved. - - tcx.sess.delay_span_bug( - span, - format!("upvar_tys for closure not found. Expected capture information for closure {ty}",), - ); - return Err(NoSolution); - } - - rustc_data_structures::stack::ensure_sufficient_stack(|| { - for ty in substs.as_closure().upvar_tys() { - dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?; - } - Ok::<_, NoSolution>(()) - })? - } - - ty::Generator(_, substs, _movability) => { - // rust-lang/rust#49918: types can be constructed, stored - // in the interior, and sit idle when generator yields - // (and is subsequently dropped). - // - // It would be nice to descend into interior of a - // generator to determine what effects dropping it might - // have (by looking at any drop effects associated with - // its interior). - // - // However, the interior's representation uses things like - // GeneratorWitness that explicitly assume they are not - // traversed in such a manner. So instead, we will - // simplify things for now by treating all generators as - // if they were like trait objects, where its upvars must - // all be alive for the generator's (potential) - // destructor. - // - // In particular, skipping over `_interior` is safe - // because any side-effects from dropping `_interior` can - // only take place through references with lifetimes - // derived from lifetimes attached to the upvars and resume - // argument, and we *do* incorporate those here. - - if !substs.as_generator().is_valid() { - // By the time this code runs, all type variables ought to - // be fully resolved. - tcx.sess.delay_span_bug( - span, - format!("upvar_tys for generator not found. Expected capture information for generator {ty}",), - ); - return Err(NoSolution); - } - - constraints.outlives.extend( - substs - .as_generator() - .upvar_tys() - .map(|t| -> ty::subst::GenericArg<'tcx> { t.into() }), - ); - constraints.outlives.push(substs.as_generator().resume_ty().into()); - } - - ty::Adt(def, substs) => { - let DropckConstraint { dtorck_types, outlives, overflows } = - tcx.at(span).adt_dtorck_constraint(def.did())?; - // FIXME: we can try to recursively `dtorck_constraint_on_ty` - // there, but that needs some way to handle cycles. - constraints - .dtorck_types - .extend(dtorck_types.iter().map(|t| EarlyBinder(*t).subst(tcx, substs))); - constraints - .outlives - .extend(outlives.iter().map(|t| EarlyBinder(*t).subst(tcx, substs))); - constraints - .overflows - .extend(overflows.iter().map(|t| EarlyBinder(*t).subst(tcx, substs))); - } - - // Objects must be alive in order for their destructor - // to be called. - ty::Dynamic(..) => { - constraints.outlives.push(ty.into()); - } - - // Types that can't be resolved. Pass them forward. - ty::Alias(..) | ty::Param(..) => { - constraints.dtorck_types.push(ty); - } - - ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => { - // By the time this code runs, all type variables ought to - // be fully resolved. - return Err(NoSolution); - } - } - - Ok(()) -} - /// Calculates the dtorck constraint for a type. pub(crate) fn adt_dtorck_constraint( tcx: TyCtxt<'_>, @@ -311,7 +55,7 @@ pub(crate) fn adt_dtorck_constraint( let mut result = DropckConstraint::empty(); for field in def.all_fields() { let fty = tcx.type_of(field.did).subst_identity(); - dtorck_constraint_for_ty(tcx, span, fty, 0, fty, &mut result)?; + dtorck_constraint_for_ty_inner(tcx, span, fty, 0, fty, &mut result)?; } result.outlives.extend(tcx.destructor_constraints(def)); dedup_dtorck_constraint(&mut result); diff --git a/compiler/rustc_traits/src/implied_outlives_bounds.rs b/compiler/rustc_traits/src/implied_outlives_bounds.rs index 49cbf9efa7493..959838ab348fd 100644 --- a/compiler/rustc_traits/src/implied_outlives_bounds.rs +++ b/compiler/rustc_traits/src/implied_outlives_bounds.rs @@ -3,18 +3,13 @@ //! [`rustc_trait_selection::traits::query::type_op::implied_outlives_bounds`]. use rustc_infer::infer::canonical::{self, Canonical}; -use rustc_infer::infer::outlives::components::{push_outlives_components, Component}; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::query::OutlivesBound; use rustc_middle::query::Providers; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; -use rustc_span::def_id::CRATE_DEF_ID; -use rustc_span::source_map::DUMMY_SP; +use rustc_middle::ty::TyCtxt; use rustc_trait_selection::infer::InferCtxtBuilderExt; +use rustc_trait_selection::traits::query::type_op::implied_outlives_bounds::compute_implied_outlives_bounds_inner; use rustc_trait_selection::traits::query::{CanonicalTyGoal, NoSolution}; -use rustc_trait_selection::traits::wf; -use rustc_trait_selection::traits::ObligationCtxt; -use smallvec::{smallvec, SmallVec}; pub(crate) fn provide(p: &mut Providers) { *p = Providers { implied_outlives_bounds, ..*p }; @@ -29,164 +24,6 @@ fn implied_outlives_bounds<'tcx>( > { tcx.infer_ctxt().enter_canonical_trait_query(&goal, |ocx, key| { let (param_env, ty) = key.into_parts(); - compute_implied_outlives_bounds(ocx, param_env, ty) + compute_implied_outlives_bounds_inner(ocx, param_env, ty) }) } - -fn compute_implied_outlives_bounds<'tcx>( - ocx: &ObligationCtxt<'_, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - ty: Ty<'tcx>, -) -> Result>, NoSolution> { - let tcx = ocx.infcx.tcx; - - // Sometimes when we ask what it takes for T: WF, we get back that - // U: WF is required; in that case, we push U onto this stack and - // process it next. Because the resulting predicates aren't always - // guaranteed to be a subset of the original type, so we need to store the - // WF args we've computed in a set. - let mut checked_wf_args = rustc_data_structures::fx::FxHashSet::default(); - let mut wf_args = vec![ty.into()]; - - let mut outlives_bounds: Vec, ty::Region<'tcx>>> = - vec![]; - - while let Some(arg) = wf_args.pop() { - if !checked_wf_args.insert(arg) { - continue; - } - - // Compute the obligations for `arg` to be well-formed. If `arg` is - // an unresolved inference variable, just substituted an empty set - // -- because the return type here is going to be things we *add* - // to the environment, it's always ok for this set to be smaller - // than the ultimate set. (Note: normally there won't be - // unresolved inference variables here anyway, but there might be - // during typeck under some circumstances.) - // - // FIXME(@lcnr): It's not really "always fine", having fewer implied - // bounds can be backward incompatible, e.g. #101951 was caused by - // us not dealing with inference vars in `TypeOutlives` predicates. - let obligations = wf::obligations(ocx.infcx, param_env, CRATE_DEF_ID, 0, arg, DUMMY_SP) - .unwrap_or_default(); - - for obligation in obligations { - debug!(?obligation); - assert!(!obligation.has_escaping_bound_vars()); - - // While these predicates should all be implied by other parts of - // the program, they are still relevant as they may constrain - // inference variables, which is necessary to add the correct - // implied bounds in some cases, mostly when dealing with projections. - // - // Another important point here: we only register `Projection` - // predicates, since otherwise we might register outlives - // predicates containing inference variables, and we don't - // learn anything new from those. - if obligation.predicate.has_non_region_infer() { - match obligation.predicate.kind().skip_binder() { - ty::PredicateKind::Clause(ty::Clause::Projection(..)) - | ty::PredicateKind::AliasRelate(..) => { - ocx.register_obligation(obligation.clone()); - } - _ => {} - } - } - - let pred = match obligation.predicate.kind().no_bound_vars() { - None => continue, - Some(pred) => pred, - }; - match pred { - ty::PredicateKind::Clause(ty::Clause::Trait(..)) - // FIXME(const_generics): Make sure that `<'a, 'b, const N: &'a &'b u32>` is sound - // if we ever support that - | ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..)) - | ty::PredicateKind::Subtype(..) - | ty::PredicateKind::Coerce(..) - | ty::PredicateKind::Clause(ty::Clause::Projection(..)) - | ty::PredicateKind::ClosureKind(..) - | ty::PredicateKind::ObjectSafe(..) - | ty::PredicateKind::ConstEvaluatable(..) - | ty::PredicateKind::ConstEquate(..) - | ty::PredicateKind::Ambiguous - | ty::PredicateKind::AliasRelate(..) - | ty::PredicateKind::TypeWellFormedFromEnv(..) => {} - - // We need to search through *all* WellFormed predicates - ty::PredicateKind::WellFormed(arg) => { - wf_args.push(arg); - } - - // We need to register region relationships - ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate( - r_a, - r_b, - ))) => outlives_bounds.push(ty::OutlivesPredicate(r_a.into(), r_b)), - - ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate( - ty_a, - r_b, - ))) => outlives_bounds.push(ty::OutlivesPredicate(ty_a.into(), r_b)), - } - } - } - - // This call to `select_all_or_error` is necessary to constrain inference variables, which we - // use further down when computing the implied bounds. - match ocx.select_all_or_error().as_slice() { - [] => (), - _ => return Err(NoSolution), - } - - // We lazily compute the outlives components as - // `select_all_or_error` constrains inference variables. - let implied_bounds = outlives_bounds - .into_iter() - .flat_map(|ty::OutlivesPredicate(a, r_b)| match a.unpack() { - ty::GenericArgKind::Lifetime(r_a) => vec![OutlivesBound::RegionSubRegion(r_b, r_a)], - ty::GenericArgKind::Type(ty_a) => { - let ty_a = ocx.infcx.resolve_vars_if_possible(ty_a); - let mut components = smallvec![]; - push_outlives_components(tcx, ty_a, &mut components); - implied_bounds_from_components(r_b, components) - } - ty::GenericArgKind::Const(_) => unreachable!(), - }) - .collect(); - - Ok(implied_bounds) -} - -/// When we have an implied bound that `T: 'a`, we can further break -/// this down to determine what relationships would have to hold for -/// `T: 'a` to hold. We get to assume that the caller has validated -/// those relationships. -fn implied_bounds_from_components<'tcx>( - sub_region: ty::Region<'tcx>, - sup_components: SmallVec<[Component<'tcx>; 4]>, -) -> Vec> { - sup_components - .into_iter() - .filter_map(|component| { - match component { - Component::Region(r) => Some(OutlivesBound::RegionSubRegion(sub_region, r)), - Component::Param(p) => Some(OutlivesBound::RegionSubParam(sub_region, p)), - Component::Alias(p) => Some(OutlivesBound::RegionSubAlias(sub_region, p)), - Component::EscapingAlias(_) => - // If the projection has escaping regions, don't - // try to infer any implied bounds even for its - // free components. This is conservative, because - // the caller will still have to prove that those - // free components outlive `sub_region`. But the - // idea is that the WAY that the caller proves - // that may change in the future and we want to - // give ourselves room to get smarter here. - { - None - } - Component::UnresolvedInferenceVariable(..) => None, - } - }) - .collect() -} diff --git a/compiler/rustc_traits/src/lib.rs b/compiler/rustc_traits/src/lib.rs index b0f9c57154f1b..907e2d39c518f 100644 --- a/compiler/rustc_traits/src/lib.rs +++ b/compiler/rustc_traits/src/lib.rs @@ -21,7 +21,8 @@ mod normalize_erasing_regions; mod normalize_projection_ty; mod type_op; -pub use type_op::{type_op_ascribe_user_type_with_span, type_op_prove_predicate_with_cause}; +pub use rustc_trait_selection::traits::query::type_op::ascribe_user_type::type_op_ascribe_user_type_with_span; +pub use type_op::type_op_prove_predicate_with_cause; use rustc_middle::query::Providers; diff --git a/compiler/rustc_traits/src/type_op.rs b/compiler/rustc_traits/src/type_op.rs index faf985169deff..9904acb1c0d51 100644 --- a/compiler/rustc_traits/src/type_op.rs +++ b/compiler/rustc_traits/src/type_op.rs @@ -1,17 +1,15 @@ -use rustc_hir as hir; use rustc_infer::infer::canonical::{Canonical, QueryResponse}; use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::query::Providers; use rustc_middle::traits::query::NoSolution; -use rustc_middle::traits::{DefiningAnchor, ObligationCauseCode}; -use rustc_middle::ty::{self, FnSig, Lift, PolyFnSig, Ty, TyCtxt, TypeFoldable}; +use rustc_middle::traits::DefiningAnchor; +use rustc_middle::ty::{FnSig, Lift, PolyFnSig, Ty, TyCtxt, TypeFoldable}; use rustc_middle::ty::{ParamEnvAnd, Predicate}; -use rustc_middle::ty::{UserSelfTy, UserSubsts, UserType}; -use rustc_span::def_id::CRATE_DEF_ID; -use rustc_span::{Span, DUMMY_SP}; use rustc_trait_selection::infer::InferCtxtBuilderExt; use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt; -use rustc_trait_selection::traits::query::type_op::ascribe_user_type::AscribeUserType; +use rustc_trait_selection::traits::query::type_op::ascribe_user_type::{ + type_op_ascribe_user_type_with_span, AscribeUserType, +}; use rustc_trait_selection::traits::query::type_op::eq::Eq; use rustc_trait_selection::traits::query::type_op::normalize::Normalize; use rustc_trait_selection::traits::query::type_op::prove_predicate::ProvePredicate; @@ -42,111 +40,6 @@ fn type_op_ascribe_user_type<'tcx>( }) } -/// The core of the `type_op_ascribe_user_type` query: for diagnostics purposes in NLL HRTB errors, -/// this query can be re-run to better track the span of the obligation cause, and improve the error -/// message. Do not call directly unless you're in that very specific context. -pub fn type_op_ascribe_user_type_with_span<'tcx>( - ocx: &ObligationCtxt<'_, 'tcx>, - key: ParamEnvAnd<'tcx, AscribeUserType<'tcx>>, - span: Option, -) -> Result<(), NoSolution> { - let (param_env, AscribeUserType { mir_ty, user_ty }) = key.into_parts(); - debug!("type_op_ascribe_user_type: mir_ty={:?} user_ty={:?}", mir_ty, user_ty); - let span = span.unwrap_or(DUMMY_SP); - match user_ty { - UserType::Ty(user_ty) => relate_mir_and_user_ty(ocx, param_env, span, mir_ty, user_ty)?, - UserType::TypeOf(def_id, user_substs) => { - relate_mir_and_user_substs(ocx, param_env, span, mir_ty, def_id, user_substs)? - } - }; - Ok(()) -} - -#[instrument(level = "debug", skip(ocx, param_env, span))] -fn relate_mir_and_user_ty<'tcx>( - ocx: &ObligationCtxt<'_, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - span: Span, - mir_ty: Ty<'tcx>, - user_ty: Ty<'tcx>, -) -> Result<(), NoSolution> { - let cause = ObligationCause::dummy_with_span(span); - let user_ty = ocx.normalize(&cause, param_env, user_ty); - ocx.eq(&cause, param_env, mir_ty, user_ty)?; - - // FIXME(#104764): We should check well-formedness before normalization. - let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(user_ty.into())); - ocx.register_obligation(Obligation::new(ocx.infcx.tcx, cause, param_env, predicate)); - Ok(()) -} - -#[instrument(level = "debug", skip(ocx, param_env, span))] -fn relate_mir_and_user_substs<'tcx>( - ocx: &ObligationCtxt<'_, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - span: Span, - mir_ty: Ty<'tcx>, - def_id: hir::def_id::DefId, - user_substs: UserSubsts<'tcx>, -) -> Result<(), NoSolution> { - let param_env = param_env.without_const(); - let UserSubsts { user_self_ty, substs } = user_substs; - let tcx = ocx.infcx.tcx; - let cause = ObligationCause::dummy_with_span(span); - - let ty = tcx.type_of(def_id).subst(tcx, substs); - let ty = ocx.normalize(&cause, param_env, ty); - debug!("relate_type_and_user_type: ty of def-id is {:?}", ty); - - ocx.eq(&cause, param_env, mir_ty, ty)?; - - // Prove the predicates coming along with `def_id`. - // - // Also, normalize the `instantiated_predicates` - // because otherwise we wind up with duplicate "type - // outlives" error messages. - let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, substs); - - debug!(?instantiated_predicates); - for (instantiated_predicate, predicate_span) in instantiated_predicates { - let span = if span == DUMMY_SP { predicate_span } else { span }; - let cause = ObligationCause::new( - span, - CRATE_DEF_ID, - ObligationCauseCode::AscribeUserTypeProvePredicate(predicate_span), - ); - let instantiated_predicate = - ocx.normalize(&cause.clone(), param_env, instantiated_predicate); - - ocx.register_obligation(Obligation::new(tcx, cause, param_env, instantiated_predicate)); - } - - if let Some(UserSelfTy { impl_def_id, self_ty }) = user_self_ty { - let self_ty = ocx.normalize(&cause, param_env, self_ty); - let impl_self_ty = tcx.type_of(impl_def_id).subst(tcx, substs); - let impl_self_ty = ocx.normalize(&cause, param_env, impl_self_ty); - - ocx.eq(&cause, param_env, self_ty, impl_self_ty)?; - let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(impl_self_ty.into())); - ocx.register_obligation(Obligation::new(tcx, cause.clone(), param_env, predicate)); - } - - // In addition to proving the predicates, we have to - // prove that `ty` is well-formed -- this is because - // the WF of `ty` is predicated on the substs being - // well-formed, and we haven't proven *that*. We don't - // want to prove the WF of types from `substs` directly because they - // haven't been normalized. - // - // FIXME(nmatsakis): Well, perhaps we should normalize - // them? This would only be relevant if some input - // type were ill-formed but did not appear in `ty`, - // which...could happen with normalization... - let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into())); - ocx.register_obligation(Obligation::new(tcx, cause, param_env, predicate)); - Ok(()) -} - fn type_op_eq<'tcx>( tcx: TyCtxt<'tcx>, canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>, From c4e8a86d9ee6b83bacadd3983122e56c323fd7b2 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Thu, 25 May 2023 19:46:52 +0000 Subject: [PATCH 14/14] Don't use outlives type op outside of MIR typeck --- .../src/traits/outlives_bounds.rs | 37 ++++++++++++------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs index 0e797a1cb60d5..f8d056e321e65 100644 --- a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs +++ b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs @@ -1,9 +1,9 @@ use crate::infer::InferCtxt; -use crate::traits::query::type_op::{self, TypeOp, TypeOpOutput}; use crate::traits::{ObligationCause, ObligationCtxt}; use rustc_data_structures::fx::FxIndexSet; -use rustc_errors::ErrorGuaranteed; use rustc_infer::infer::resolve::OpportunisticRegionResolver; +use rustc_infer::infer::InferOk; +use rustc_middle::infer::canonical::{OriginalQueryValues, QueryRegionConstraints}; use rustc_middle::ty::{self, ParamEnv, Ty, TypeFolder, TypeVisitableExt}; use rustc_span::def_id::LocalDefId; @@ -68,20 +68,29 @@ impl<'a, 'tcx: 'a> InferCtxtExt<'a, 'tcx> for InferCtxt<'tcx> { return vec![]; } - let span = self.tcx.def_span(body_id); - let result: Result<_, ErrorGuaranteed> = param_env - .and(type_op::implied_outlives_bounds::ImpliedOutlivesBounds { ty }) - .fully_perform(self, span); - let result = match result { - Ok(r) => r, - Err(_) => { - return vec![]; - } + let mut canonical_var_values = OriginalQueryValues::default(); + let canonical_ty = + self.canonicalize_query_keep_static(param_env.and(ty), &mut canonical_var_values); + let Ok(canonical_result) = self.tcx.implied_outlives_bounds(canonical_ty) else { + return vec![]; + }; + + let mut constraints = QueryRegionConstraints::default(); + let Ok(InferOk { value, obligations }) = self + .instantiate_nll_query_response_and_region_obligations( + &ObligationCause::dummy(), + param_env, + &canonical_var_values, + canonical_result, + &mut constraints, + ) else { + return vec![]; }; + assert_eq!(&obligations, &[]); - let TypeOpOutput { output, constraints, .. } = result; + if !constraints.is_empty() { + let span = self.tcx.def_span(body_id); - if let Some(constraints) = constraints { debug!(?constraints); if !constraints.member_constraints.is_empty() { span_bug!(span, "{:#?}", constraints.member_constraints); @@ -108,7 +117,7 @@ impl<'a, 'tcx: 'a> InferCtxtExt<'a, 'tcx> for InferCtxt<'tcx> { } }; - output + value } fn implied_bounds_tys(