diff --git a/src/cargo/ops/cargo_compile/compile_filter.rs b/src/cargo/ops/cargo_compile/compile_filter.rs index a537a9a8a82..2cbb149aba2 100644 --- a/src/cargo/ops/cargo_compile/compile_filter.rs +++ b/src/cargo/ops/cargo_compile/compile_filter.rs @@ -1,9 +1,8 @@ //! Filters and their rules to select which Cargo targets will be built. use crate::core::compiler::CompileMode; -use crate::core::dependency::DepKind; -use crate::core::resolver::HasDevUnits; -use crate::core::{Package, Target, TargetKind}; + +use crate::core::{Target, TargetKind}; use crate::util::restricted_names::is_glob_pattern; #[derive(Debug, PartialEq, Eq, Clone)] @@ -301,67 +300,4 @@ impl CompileFilter { } } } - - /// Generate a CompileFilter that represents the maximal set of targets - /// that should be considered for scraped examples. - pub(super) fn refine_for_docscrape( - &self, - to_builds: &[&Package], - has_dev_units: HasDevUnits, - ) -> CompileFilter { - // In general, the goal is to scrape examples from (a) whatever targets - // the user is documenting, and (b) Example targets. However, if the user - // is documenting a library with dev-dependencies, those dev-deps are not - // needed for the library, while dev-deps are needed for the examples. - // - // If scrape-examples caused `cargo doc` to start requiring dev-deps, this - // would be a breaking change to crates whose dev-deps don't compile. - // Therefore we ONLY want to scrape Example targets if either: - // (1) No package has dev-dependencies, so this is a moot issue, OR - // (2) The provided CompileFilter requires dev-dependencies anyway. - // - // The next two variables represent these two conditions. - - let no_pkg_has_dev_deps = to_builds.iter().all(|pkg| { - pkg.summary() - .dependencies() - .iter() - .all(|dep| !matches!(dep.kind(), DepKind::Development)) - }); - - let reqs_dev_deps = matches!(has_dev_units, HasDevUnits::Yes); - - let example_filter = if no_pkg_has_dev_deps || reqs_dev_deps { - FilterRule::All - } else { - FilterRule::none() - }; - - match self { - CompileFilter::Only { - all_targets, - lib, - bins, - tests, - benches, - .. - } => CompileFilter::Only { - all_targets: *all_targets, - lib: lib.clone(), - bins: bins.clone(), - examples: example_filter, - tests: tests.clone(), - benches: benches.clone(), - }, - - CompileFilter::Default { .. } => CompileFilter::Only { - all_targets: false, - lib: LibRule::Default, - bins: FilterRule::none(), - examples: example_filter, - tests: FilterRule::none(), - benches: FilterRule::none(), - }, - } - } } diff --git a/src/cargo/ops/cargo_compile/mod.rs b/src/cargo/ops/cargo_compile/mod.rs index 622255f893c..b451b3b734d 100644 --- a/src/cargo/ops/cargo_compile/mod.rs +++ b/src/cargo/ops/cargo_compile/mod.rs @@ -41,6 +41,7 @@ use crate::core::compiler::{standard_lib, CrateType, TargetInfo}; use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; +use crate::core::dependency::DepKind; use crate::core::profiles::{Profiles, UnitFor}; use crate::core::resolver::features::{self, CliFeatures, FeaturesFor}; use crate::core::resolver::{HasDevUnits, Resolve}; @@ -361,6 +362,7 @@ pub fn create_bcx<'a, 'cfg>( &pkg_set, &profiles, interner, + has_dev_units, )?; if let Some(args) = target_rustc_crate_types { @@ -369,11 +371,10 @@ pub fn create_bcx<'a, 'cfg>( let should_scrape = build_config.mode.is_doc() && config.cli_unstable().rustdoc_scrape_examples; let mut scrape_units = if should_scrape { - let scrape_filter = filter.refine_for_docscrape(&to_builds, has_dev_units); let all_units = generate_targets( ws, &to_builds, - &scrape_filter, + &filter, &build_config.requested_kinds, explicit_host_kind, CompileMode::Docscrape, @@ -383,24 +384,17 @@ pub fn create_bcx<'a, 'cfg>( &pkg_set, &profiles, interner, + has_dev_units, )?; - // The set of scraped targets should be a strict subset of the set of documented targets, - // except in the special case of examples targets. - if cfg!(debug_assertions) { - let valid_targets = units.iter().map(|u| &u.target).collect::>(); - for unit in &all_units { - assert!(unit.target.is_example() || valid_targets.contains(&unit.target)); - } - } - let valid_units = all_units .into_iter() .filter(|unit| { - !matches!( - unit.target.doc_scrape_examples(), - RustdocScrapeExamples::Disabled - ) + ws.unit_needs_doc_scrape(unit) + && !matches!( + unit.target.doc_scrape_examples(), + RustdocScrapeExamples::Disabled + ) }) .collect::>(); valid_units @@ -597,6 +591,7 @@ fn generate_targets( package_set: &PackageSet<'_>, profiles: &Profiles, interner: &UnitInterner, + has_dev_units: HasDevUnits, ) -> CargoResult> { let config = ws.config(); // Helper for creating a list of `Unit` structures @@ -828,6 +823,52 @@ fn generate_targets( } } + if mode.is_doc_scrape() { + // In general, the goal is to scrape examples from (a) whatever targets + // the user is documenting, and (b) Example targets. However, if the user + // is documenting a library with dev-dependencies, those dev-deps are not + // needed for the library, while dev-deps are needed for the examples. + // + // If scrape-examples caused `cargo doc` to start requiring dev-deps, this + // would be a breaking change to crates whose dev-deps don't compile. + // Therefore we ONLY want to scrape Example targets if either: + // (1) No package has dev-dependencies, so this is a moot issue, OR + // (2) The provided CompileFilter requires dev-dependencies anyway. + // + // The next two variables represent these two conditions. + let no_pkg_has_dev_deps = packages.iter().all(|pkg| { + pkg.summary() + .dependencies() + .iter() + .all(|dep| !matches!(dep.kind(), DepKind::Development)) + }); + let reqs_dev_deps = matches!(has_dev_units, HasDevUnits::Yes); + let safe_to_scrape_example_targets = no_pkg_has_dev_deps || reqs_dev_deps; + + let proposed_targets: HashSet<&Target> = proposals.iter().map(|p| p.target).collect(); + let can_scrape = |target: &Target| { + let not_redundant = !proposed_targets.contains(target); + not_redundant + && match (target.doc_scrape_examples(), target.is_example()) { + // Targets configured by the user to not be scraped should never be scraped + (RustdocScrapeExamples::Disabled, _) => false, + // Targets configured by the user to be scraped should always be scraped + (RustdocScrapeExamples::Enabled, _) => true, + // Example targets with no configuration should be conditionally scraped if + // it's guaranteed not to break the build + (RustdocScrapeExamples::Unset, true) => safe_to_scrape_example_targets, + // All other targets are ignored for now. This may change in the future! + (RustdocScrapeExamples::Unset, false) => false, + } + }; + proposals.extend(filter_targets( + packages, + can_scrape, + false, + CompileMode::Docscrape, + )); + } + // Only include targets that are libraries or have all required // features available. // @@ -1074,7 +1115,7 @@ fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> .iter() .filter(|t| t.is_bin() || t.is_lib()) .collect(), - CompileMode::Doc { .. } => { + CompileMode::Doc { .. } | CompileMode::Docscrape => { // `doc` does lib and bins (bin with same name as lib is skipped). targets .iter() @@ -1085,7 +1126,7 @@ fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> }) .collect() } - CompileMode::Doctest | CompileMode::Docscrape | CompileMode::RunCustomBuild => { + CompileMode::Doctest | CompileMode::RunCustomBuild => { panic!("Invalid mode {:?}", mode) } } diff --git a/tests/testsuite/docscrape.rs b/tests/testsuite/docscrape.rs index f205e8b61e6..1c55d12b839 100644 --- a/tests/testsuite/docscrape.rs +++ b/tests/testsuite/docscrape.rs @@ -196,7 +196,7 @@ fn configure_target() { ) .build(); - p.cargo("doc --lib --bins -Zunstable-options -Zrustdoc-scrape-examples") + p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .run(); @@ -519,3 +519,64 @@ fn use_dev_deps_if_explicitly_enabled() { ) .run(); } + +#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] +fn only_scrape_documented_targets() { + // package bar has doc = false and should not be eligible for documtation. + let run_with_config = |config: &str, should_scrape: bool| { + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + {config} + + [workspace] + members = ["foo"] + + [dependencies] + foo = {{ path = "foo" }} + "# + ), + ) + .file("src/lib.rs", "pub fn bar() { foo::foo(); }") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "pub fn foo() {}") + .build(); + + p.cargo("doc --workspace -Zunstable-options -Zrustdoc-scrape-examples") + .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) + .run(); + + let doc_html = p.read_file("target/doc/foo/fn.foo.html"); + let example_found = doc_html.contains("Examples found in repository"); + if should_scrape { + assert!(example_found); + } else { + assert!(!example_found); + } + }; + + // By default, bar should be scraped. + run_with_config("", true); + // If bar isn't supposed to be documented, then it is not eligible + // for scraping. + run_with_config("doc = false", false); + // But if the user explicitly says bar should be scraped, then it should + // be scraped. + run_with_config("doc = false\ndoc-scrape-examples = true", true); +}