Skip to content

Commit

Permalink
Auto merge of rust-lang#133566 - lcnr:fast-reject-perf, r=compiler-er…
Browse files Browse the repository at this point in the history
…rors

fast-reject: add cache

slightly modified version of rust-lang#133524

I tried a few alternatives:
- simply bail after recursion for a certain amount of times, however, looking at the number of steps taken while compiling different crates we get the following results[^1]:
- add a cache: results in a bigger performance impact

typenum
```rust
1098842 counts
(  1)   670511 (61.0%, 61.0%): dropping after 1
(  2)   358785 (32.7%, 93.7%): dropping after 0
(  3)    25191 ( 2.3%, 96.0%): dropping after 2
(  4)    10912 ( 1.0%, 97.0%): dropping after 4
(  5)     6461 ( 0.6%, 97.5%): dropping after 3
(  6)     5239 ( 0.5%, 98.0%): dropping after 5
(  7)     2528 ( 0.2%, 98.3%): dropping after 8
(  8)     2188 ( 0.2%, 98.5%): dropping after 1094
(  9)     2097 ( 0.2%, 98.6%): dropping after 6
( 10)     1179 ( 0.1%, 98.7%): dropping after 34
( 11)     1148 ( 0.1%, 98.9%): dropping after 7
( 12)      822 ( 0.1%, 98.9%): dropping after 10
```
bitmaps
```rust
533346 counts
(  1)   526166 (98.7%, 98.7%): dropping after 1
(  2)     4562 ( 0.9%, 99.5%): dropping after 0
(  3)     2072 ( 0.4%, 99.9%): dropping after 1024
(  4)      305 ( 0.1%,100.0%): dropping after 2
(  5)      106 ( 0.0%,100.0%): dropping after 4
(  6)       30 ( 0.0%,100.0%): dropping after 8
(  7)       18 ( 0.0%,100.0%): dropping after 3
(  8)       17 ( 0.0%,100.0%): dropping after 44
(  9)       15 ( 0.0%,100.0%): dropping after 168
( 10)        8 ( 0.0%,100.0%): dropping after 14
( 11)        7 ( 0.0%,100.0%): dropping after 13
( 12)        7 ( 0.0%,100.0%): dropping after 24
```
stage 2 compiler is mostly trivial, but has a few cases where we get >5000
```rust
12987156 counts
(  1)  9280476 (71.5%, 71.5%): dropping after 0
(  2)  2277841 (17.5%, 89.0%): dropping after 1
(  3)   724888 ( 5.6%, 94.6%): dropping after 2
(  4)   204005 ( 1.6%, 96.2%): dropping after 4
(  5)   146537 ( 1.1%, 97.3%): dropping after 3
(  6)    64287 ( 0.5%, 97.8%): dropping after 5
(  7)    43938 ( 0.3%, 98.1%): dropping after 6
(  8)    43758 ( 0.3%, 98.4%): dropping after 8
(  9)    27220 ( 0.2%, 98.7%): dropping after 7
( 10)    17374 ( 0.1%, 98.8%): dropping after 9
( 11)    16015 ( 0.1%, 98.9%): dropping after 10
( 12)    12855 ( 0.1%, 99.0%): dropping after 12
( 13)    10494 ( 0.1%, 99.1%): dropping after 11
( 14)     7553 ( 0.1%, 99.2%): dropping after 14
```

[^1]: i've incremented a counter in the place I now decrement the depth at and then printed it on drop

r? `@compiler-errors`
  • Loading branch information
bors committed Dec 2, 2024
2 parents caa8172 + 3465ce5 commit bd36e69
Showing 1 changed file with 53 additions and 17 deletions.
70 changes: 53 additions & 17 deletions compiler/rustc_type_ir/src/fast_reject.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,27 +214,47 @@ impl<I: Interner> DeepRejectCtxt<I, false, true> {
impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_WITH_INFER: bool>
DeepRejectCtxt<I, INSTANTIATE_LHS_WITH_INFER, INSTANTIATE_RHS_WITH_INFER>
{
// Quite arbitrary. Large enough to only affect a very tiny amount of impls/crates
// and small enough to prevent hangs.
const STARTING_DEPTH: usize = 8;

pub fn args_may_unify(
self,
obligation_args: I::GenericArgs,
impl_args: I::GenericArgs,
) -> bool {
self.args_may_unify_inner(obligation_args, impl_args, Self::STARTING_DEPTH)
}

pub fn types_may_unify(self, lhs: I::Ty, rhs: I::Ty) -> bool {
self.types_may_unify_inner(lhs, rhs, Self::STARTING_DEPTH)
}

fn args_may_unify_inner(
self,
obligation_args: I::GenericArgs,
impl_args: I::GenericArgs,
depth: usize,
) -> bool {
// No need to decrement the depth here as this function is only
// recursively reachable via `types_may_unify_inner` which already
// increments the depth for us.
iter::zip(obligation_args.iter(), impl_args.iter()).all(|(obl, imp)| {
match (obl.kind(), imp.kind()) {
// We don't fast reject based on regions.
(ty::GenericArgKind::Lifetime(_), ty::GenericArgKind::Lifetime(_)) => true,
(ty::GenericArgKind::Type(obl), ty::GenericArgKind::Type(imp)) => {
self.types_may_unify(obl, imp)
self.types_may_unify_inner(obl, imp, depth)
}
(ty::GenericArgKind::Const(obl), ty::GenericArgKind::Const(imp)) => {
self.consts_may_unify(obl, imp)
self.consts_may_unify_inner(obl, imp)
}
_ => panic!("kind mismatch: {obl:?} {imp:?}"),
}
})
}

pub fn types_may_unify(self, lhs: I::Ty, rhs: I::Ty) -> bool {
fn types_may_unify_inner(self, lhs: I::Ty, rhs: I::Ty, depth: usize) -> bool {
match rhs.kind() {
// Start by checking whether the `rhs` type may unify with
// pretty much everything. Just return `true` in that case.
Expand Down Expand Up @@ -273,18 +293,31 @@ impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_
| ty::Placeholder(_) => {}
};

// The type system needs to support exponentially large types
// as long as they are self-similar. While most other folders
// use caching to handle them, this folder exists purely as a
// perf optimization and is incredibly hot. In pretty much all
// uses checking the cache is slower than simply recursing, so
// we instead just add an arbitrary depth cutoff.
//
// We only decrement the depth here as the match on `rhs`
// does not recurse.
let Some(depth) = depth.checked_sub(1) else {
return true;
};

// For purely rigid types, use structural equivalence.
match lhs.kind() {
ty::Ref(_, lhs_ty, lhs_mutbl) => match rhs.kind() {
ty::Ref(_, rhs_ty, rhs_mutbl) => {
lhs_mutbl == rhs_mutbl && self.types_may_unify(lhs_ty, rhs_ty)
lhs_mutbl == rhs_mutbl && self.types_may_unify_inner(lhs_ty, rhs_ty, depth)
}
_ => false,
},

ty::Adt(lhs_def, lhs_args) => match rhs.kind() {
ty::Adt(rhs_def, rhs_args) => {
lhs_def == rhs_def && self.args_may_unify(lhs_args, rhs_args)
lhs_def == rhs_def && self.args_may_unify_inner(lhs_args, rhs_args, depth)
}
_ => false,
},
Expand Down Expand Up @@ -326,27 +359,28 @@ impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_
ty::Tuple(rhs) => {
lhs.len() == rhs.len()
&& iter::zip(lhs.iter(), rhs.iter())
.all(|(lhs, rhs)| self.types_may_unify(lhs, rhs))
.all(|(lhs, rhs)| self.types_may_unify_inner(lhs, rhs, depth))
}
_ => false,
},

ty::Array(lhs_ty, lhs_len) => match rhs.kind() {
ty::Array(rhs_ty, rhs_len) => {
self.types_may_unify(lhs_ty, rhs_ty) && self.consts_may_unify(lhs_len, rhs_len)
self.types_may_unify_inner(lhs_ty, rhs_ty, depth)
&& self.consts_may_unify_inner(lhs_len, rhs_len)
}
_ => false,
},

ty::RawPtr(lhs_ty, lhs_mutbl) => match rhs.kind() {
ty::RawPtr(rhs_ty, rhs_mutbl) => {
lhs_mutbl == rhs_mutbl && self.types_may_unify(lhs_ty, rhs_ty)
lhs_mutbl == rhs_mutbl && self.types_may_unify_inner(lhs_ty, rhs_ty, depth)
}
_ => false,
},

ty::Slice(lhs_ty) => {
matches!(rhs.kind(), ty::Slice(rhs_ty) if self.types_may_unify(lhs_ty, rhs_ty))
matches!(rhs.kind(), ty::Slice(rhs_ty) if self.types_may_unify_inner(lhs_ty, rhs_ty, depth))
}

ty::Dynamic(lhs_preds, ..) => {
Expand All @@ -366,7 +400,7 @@ impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_
lhs_hdr == rhs_hdr
&& lhs_sig_tys.len() == rhs_sig_tys.len()
&& iter::zip(lhs_sig_tys.iter(), rhs_sig_tys.iter())
.all(|(lhs, rhs)| self.types_may_unify(lhs, rhs))
.all(|(lhs, rhs)| self.types_may_unify_inner(lhs, rhs, depth))
}
_ => false,
},
Expand All @@ -375,49 +409,51 @@ impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_

ty::FnDef(lhs_def_id, lhs_args) => match rhs.kind() {
ty::FnDef(rhs_def_id, rhs_args) => {
lhs_def_id == rhs_def_id && self.args_may_unify(lhs_args, rhs_args)
lhs_def_id == rhs_def_id && self.args_may_unify_inner(lhs_args, rhs_args, depth)
}
_ => false,
},

ty::Closure(lhs_def_id, lhs_args) => match rhs.kind() {
ty::Closure(rhs_def_id, rhs_args) => {
lhs_def_id == rhs_def_id && self.args_may_unify(lhs_args, rhs_args)
lhs_def_id == rhs_def_id && self.args_may_unify_inner(lhs_args, rhs_args, depth)
}
_ => false,
},

ty::CoroutineClosure(lhs_def_id, lhs_args) => match rhs.kind() {
ty::CoroutineClosure(rhs_def_id, rhs_args) => {
lhs_def_id == rhs_def_id && self.args_may_unify(lhs_args, rhs_args)
lhs_def_id == rhs_def_id && self.args_may_unify_inner(lhs_args, rhs_args, depth)
}
_ => false,
},

ty::Coroutine(lhs_def_id, lhs_args) => match rhs.kind() {
ty::Coroutine(rhs_def_id, rhs_args) => {
lhs_def_id == rhs_def_id && self.args_may_unify(lhs_args, rhs_args)
lhs_def_id == rhs_def_id && self.args_may_unify_inner(lhs_args, rhs_args, depth)
}
_ => false,
},

ty::CoroutineWitness(lhs_def_id, lhs_args) => match rhs.kind() {
ty::CoroutineWitness(rhs_def_id, rhs_args) => {
lhs_def_id == rhs_def_id && self.args_may_unify(lhs_args, rhs_args)
lhs_def_id == rhs_def_id && self.args_may_unify_inner(lhs_args, rhs_args, depth)
}
_ => false,
},

ty::Pat(lhs_ty, _) => {
// FIXME(pattern_types): take pattern into account
matches!(rhs.kind(), ty::Pat(rhs_ty, _) if self.types_may_unify(lhs_ty, rhs_ty))
matches!(rhs.kind(), ty::Pat(rhs_ty, _) if self.types_may_unify_inner(lhs_ty, rhs_ty, depth))
}

ty::Error(..) => true,
}
}

pub fn consts_may_unify(self, lhs: I::Const, rhs: I::Const) -> bool {
// Unlike `types_may_unify_inner`, this does not take a depth as
// we never recurse from this function.
fn consts_may_unify_inner(self, lhs: I::Const, rhs: I::Const) -> bool {
match rhs.kind() {
ty::ConstKind::Param(_) => {
if INSTANTIATE_RHS_WITH_INFER {
Expand Down

0 comments on commit bd36e69

Please sign in to comment.