Skip to content

Commit

Permalink
Extend name resolution to support qualified paths
Browse files Browse the repository at this point in the history
This is the first part needed for us to add support to stubbing
trait implementations and primitive types.

After this change, we need to change stubs to accept a `FnResolution`
instead of `FnDef`. We also need to find out how to retrieve methods
of primitive types.
  • Loading branch information
celinval committed Aug 22, 2024
1 parent 0ed9a62 commit b590998
Show file tree
Hide file tree
Showing 11 changed files with 411 additions and 138 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -467,12 +467,14 @@ dependencies = [
"kani_metadata",
"lazy_static",
"num",
"quote",
"regex",
"serde",
"serde_json",
"shell-words",
"strum",
"strum_macros",
"syn 2.0.74",
"tracing",
"tracing-subscriber",
]
Expand Down
10 changes: 6 additions & 4 deletions kani-compiler/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,19 @@ cbmc = { path = "../cprover_bindings", package = "cprover_bindings", optional =
clap = { version = "4.4.11", features = ["derive", "cargo"] }
home = "0.5"
itertools = "0.13"
kani_metadata = {path = "../kani_metadata"}
kani_metadata = { path = "../kani_metadata" }
lazy_static = "1.4.0"
num = { version = "0.4.0", optional = true }
quote = "1.0.36"
regex = "1.7.0"
serde = { version = "1", optional = true }
serde_json = "1"
strum = "0.26"
strum_macros = "0.26"
syn = { version = "2.0.72", features = ["parsing", "extra-traits"] }
shell-words = "1.0.0"
tracing = {version = "0.1", features = ["max_level_trace", "release_max_level_debug"]}
tracing-subscriber = {version = "0.3.8", features = ["env-filter", "json", "fmt"]}
tracing = { version = "0.1", features = ["max_level_trace", "release_max_level_debug"] }
tracing-subscriber = { version = "0.3.8", features = ["env-filter", "json", "fmt"] }

# Future proofing: enable backend dependencies using feature.
[features]
Expand All @@ -33,4 +35,4 @@ write_json_symtab = []

[package.metadata.rust-analyzer]
# This package uses rustc crates.
rustc_private=true
rustc_private = true
162 changes: 107 additions & 55 deletions kani-compiler/src/kani_middle/attributes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
use std::collections::BTreeMap;

use kani_metadata::{CbmcSolver, HarnessAttributes, HarnessKind, Stub};
use quote::ToTokens;
use rustc_ast::{
attr, AttrArgs, AttrArgsEq, AttrKind, Attribute, ExprKind, LitKind, MetaItem, MetaItemKind,
NestedMetaItem,
};
use rustc_errors::ErrorGuaranteed;
use rustc_hir::{def::DefKind, def_id::DefId};
Expand All @@ -19,10 +19,13 @@ use stable_mir::mir::mono::Instance as InstanceStable;
use stable_mir::{CrateDef, DefId as StableDefId};
use std::str::FromStr;
use strum_macros::{AsRefStr, EnumString};
use syn::parse::Parser;
use syn::punctuated::Punctuated;
use syn::{PathSegment, TypePath};

use tracing::{debug, trace};

use super::resolve::{self, resolve_fn, ResolveError};
use super::resolve::{resolve_fn, resolve_fn_path, FnResolution, ResolveError};

#[derive(Debug, Clone, Copy, AsRefStr, EnumString, PartialEq, Eq, PartialOrd, Ord)]
#[strum(serialize_all = "snake_case")]
Expand Down Expand Up @@ -555,19 +558,19 @@ impl<'tcx> KaniAttributes<'tcx> {
for (name, def_id, span) in self.interpret_stub_verified_attribute() {
if KaniAttributes::for_item(self.tcx, def_id).contract_attributes().is_none() {
dcx.struct_span_err(
span,
span,
format!(
"Failed to generate verified stub: Function `{}` has no contract.",
self.item_name(),
),
)
.with_span_note(
self.tcx.def_span(def_id),
format!(
"Failed to generate verified stub: Function `{}` has no contract.",
self.item_name(),
"Try adding a contract to this function or use the unsound `{}` attribute instead.",
KaniAttributeKind::Stub.as_ref(),
),
)
.with_span_note(
self.tcx.def_span(def_id),
format!(
"Try adding a contract to this function or use the unsound `{}` attribute instead.",
KaniAttributeKind::Stub.as_ref(),
),
)
.emit();
return;
}
Expand Down Expand Up @@ -787,20 +790,47 @@ fn parse_unwind(tcx: TyCtxt, attr: &Attribute) -> Option<u32> {

fn parse_stubs(tcx: TyCtxt, harness: DefId, attributes: &[&Attribute]) -> Vec<Stub> {
let current_module = tcx.parent_module_from_def_id(harness.expect_local());
let check_resolve = |attr: &Attribute, name: &str| {
let result = resolve::resolve_fn(tcx, current_module.to_local_def_id(), name);
if let Err(err) = result {
tcx.dcx().span_err(attr.span, format!("failed to resolve `{name}`: {err}"));
let check_resolve = |attr: &Attribute, path: &TypePath| {
let result = resolve_fn_path(tcx, current_module.to_local_def_id(), path);
match result {
Ok(FnResolution::Fn(_)) => { /* no-op */ }
Ok(FnResolution::FnImpl { .. }) => {
tcx.dcx().span_err(
attr.span,
"Kani currently does not support stubbing trait implementation.",
);
}
Err(err) => {
tcx.dcx().span_err(
attr.span,
format!("failed to resolve `{}`: {err}", pretty_type_path(path)),
);
}
}
};
attributes
.iter()
.filter_map(|attr| match parse_paths(attr) {
Ok(paths) => match paths.as_slice() {
.filter_map(|attr| {
let paths = parse_paths(attr).unwrap_or_else(|_| {
tcx.dcx().span_err(
attr.span,
"attribute `kani::stub` takes two path arguments; found argument that is not \
a path",
);
vec![]
});
match paths.as_slice() {
[orig, replace] => {
check_resolve(attr, orig);
check_resolve(attr, replace);
Some(Stub { original: orig.clone(), replacement: replace.clone() })
Some(Stub {
original: orig.to_token_stream().to_string(),
replacement: replace.to_token_stream().to_string(),
})
}
[] => {
/* Error was already emitted */
None
}
_ => {
tcx.dcx().span_err(
Expand All @@ -812,13 +842,6 @@ fn parse_stubs(tcx: TyCtxt, harness: DefId, attributes: &[&Attribute]) -> Vec<St
);
None
}
},
Err(error_span) => {
tcx.dcx().span_err(
error_span,
"attribute `kani::stub` takes two path arguments; found argument that is not a path",
);
None
}
})
.collect()
Expand Down Expand Up @@ -896,35 +919,13 @@ fn parse_integer(attr: &Attribute) -> Option<u128> {
}

/// Extracts a vector with the path arguments of an attribute.
/// Emits an error if it couldn't convert any of the arguments.
fn parse_paths(attr: &Attribute) -> Result<Vec<String>, Span> {
let attr_args = attr.meta_item_list();
attr_args
.unwrap_or_default()
.iter()
.map(|arg| match arg {
NestedMetaItem::Lit(item) => Err(item.span),
NestedMetaItem::MetaItem(item) => parse_path(item).ok_or(item.span),
})
.collect()
}

/// Extracts a path from an attribute item, returning `None` if the item is not
/// syntactically a path.
fn parse_path(meta_item: &MetaItem) -> Option<String> {
if meta_item.is_word() {
Some(
meta_item
.path
.segments
.iter()
.map(|seg| seg.ident.as_str())
.collect::<Vec<&str>>()
.join("::"),
)
} else {
None
}
///
/// Emits an error if it couldn't convert any of the arguments and return an empty vector.
fn parse_paths(attr: &Attribute) -> Result<Vec<TypePath>, syn::Error> {
let syn_attr = syn_attr(attr);
let parser = Punctuated::<TypePath, syn::Token![,]>::parse_terminated;
let paths = syn_attr.parse_args_with(parser)?;
Ok(paths.into_iter().collect())
}

/// Parse the arguments of the attribute into a (key, value) map.
Expand Down Expand Up @@ -989,3 +990,54 @@ pub fn matches_diagnostic<T: CrateDef>(tcx: TyCtxt, def: T, attr_name: &str) ->
}
false
}

/// Parse an attribute using `syn`.
///
/// This provides a user-friendly interface to manipulate than the internal compiler AST.
fn syn_attr(attr: &Attribute) -> syn::Attribute {
let attr_str = rustc_ast_pretty::pprust::attribute_to_string(attr);
let parser = syn::Attribute::parse_outer;
parser.parse_str(&attr_str).unwrap().pop().unwrap()
}

/// Return a more user-friendly string for path by trying to remove unneeded whitespace.
///
/// `quote!()` and `TokenString::to_string()` introduce unnecessary space around separators.
/// This happens because these methods end up using TokenStream display, which has no
/// guarantees on the format printed.
/// <https://doc.rust-lang.org/proc_macro/struct.TokenStream.html#impl-Display-for-TokenStream>
///
/// E.g.: The path `<[char; 10]>::foo` printed with token stream becomes `< [ char ; 10 ] > :: foo`.
/// while this function turns this into `<[char ; 10]>::foo`.
///
/// Thus, this can still be improved to handle the `qself.ty`.
///
/// We also don't handle path segments, but users shouldn't pass generic arguments to our
/// attributes.
fn pretty_type_path(path: &TypePath) -> String {
fn segments_str<'a, I>(segments: I) -> String
where
I: IntoIterator<Item = &'a PathSegment>,
{
// We don't bother with path arguments for now since users shouldn't provide them.
segments
.into_iter()
.map(|segment| segment.to_token_stream().to_string())
.intersperse("::".to_string())
.collect()
}
let leading = if path.path.leading_colon.is_some() { "::" } else { "" };
if let Some(qself) = &path.qself {
let pos = qself.position;
let qself_str = qself.ty.to_token_stream().to_string();
if pos == 0 {
format!("<{qself_str}>::{}", segments_str(&path.path.segments))
} else {
let before = segments_str(path.path.segments.iter().take(pos));
let after = segments_str(path.path.segments.iter().skip(pos));
format!("<{qself_str} as {before}>::{after}")
}
} else {
format!("{leading}{}", segments_str(&path.path.segments))
}
}
20 changes: 13 additions & 7 deletions kani-compiler/src/kani_middle/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
use std::collections::HashSet;

use crate::kani_queries::QueryDb;
use rustc_hir::{def::DefKind, def_id::LOCAL_CRATE};
use rustc_hir::{def::DefKind, def_id::DefId as InternalDefId, def_id::LOCAL_CRATE};
use rustc_middle::span_bug;
use rustc_middle::ty::layout::{
FnAbiError, FnAbiOf, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError,
Expand Down Expand Up @@ -225,10 +225,16 @@ fn find_fn_def(tcx: TyCtxt, diagnostic: &str) -> Option<FnDef> {
.all_diagnostic_items(())
.name_to_id
.get(&rustc_span::symbol::Symbol::intern(diagnostic))?;
let TyKind::RigidTy(RigidTy::FnDef(def, _)) =
rustc_internal::stable(tcx.type_of(attr_id)).value.kind()
else {
return None;
};
Some(def)
stable_fn_def(tcx, *attr_id)
}

/// Try to convert a internal DefId to a FnDef.
pub fn stable_fn_def(tcx: TyCtxt, def_id: InternalDefId) -> Option<FnDef> {
if let TyKind::RigidTy(RigidTy::FnDef(def, _)) =
rustc_internal::stable(tcx.type_of(def_id)).value.kind()
{
Some(def)
} else {
None
}
}
Loading

0 comments on commit b590998

Please sign in to comment.