Skip to content

Commit

Permalink
feat: add rust benchmark case for code splitting
Browse files Browse the repository at this point in the history
  • Loading branch information
JSerFeng committed Jan 27, 2025
1 parent efc8cba commit c00591e
Show file tree
Hide file tree
Showing 12 changed files with 10,266 additions and 134 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions crates/rspack_core/src/build_chunk_graph/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ pub(crate) mod incremental;
pub(crate) mod new_code_splitter;

#[instrument("Compilation:build_chunk_graph", skip_all)]
pub(crate) fn build_chunk_graph(compilation: &mut Compilation) -> rspack_error::Result<()> {
pub fn build_chunk_graph(compilation: &mut Compilation) -> rspack_error::Result<()> {
let enable_incremental = compilation
.incremental
.can_read_mutations(IncrementalPasses::BUILD_CHUNK_GRAPH);
Expand All @@ -19,6 +19,7 @@ pub(crate) fn build_chunk_graph(compilation: &mut Compilation) -> rspack_error::
} else {
Default::default()
};

splitter.update_with_compilation(compilation)?;

if !enable_incremental || splitter.chunk_group_infos.is_empty() {
Expand Down Expand Up @@ -49,7 +50,7 @@ pub(crate) fn build_chunk_graph(compilation: &mut Compilation) -> rspack_error::
}

#[instrument(skip_all)]
pub(crate) fn build_chunk_graph_new(compilation: &mut Compilation) -> rspack_error::Result<()> {
pub fn build_chunk_graph_new(compilation: &mut Compilation) -> rspack_error::Result<()> {
new_code_splitter::code_split(compilation)?;
Ok(())
}
120 changes: 1 addition & 119 deletions crates/rspack_core/src/build_chunk_graph/new_code_splitter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -828,127 +828,9 @@ impl CodeSplitter {

fn create_chunks(&mut self, compilation: &mut Compilation) -> Result<()> {
let mut errors = vec![];
// let mut roots: Vec<CreateChunkRoot> = vec![];
// for (idx, (name, data)) in entries.iter().enumerate() {
// name_to_idx.insert(name, idx);
// let runtime = if let Some(depend_on) = &data.options.depend_on {
// deps.push((name, depend_on.clone()));
// None
// } else {
// Some(RuntimeSpec::from_entry_options(&data.options).expect("should have runtime"))
// };

// // set runtime later
// roots.push(CreateChunkRoot::Entry(name.clone(), data.clone(), runtime));
// }

// let mut entry_to_deps = HashMap::default();
// for (entry, deps) in deps {
// entry_to_deps.insert(
// entry.as_str(),
// deps
// .into_iter()
// .map(|dep| *name_to_idx.get(&dep).expect("should have idx"))
// .collect::<Vec<_>>(),
// );
// }

// for (entry, _) in entries.iter() {
// let curr = *name_to_idx.get(entry).expect("unreachable");
// if roots[curr].get_runtime().is_some() {
// // already set
// continue;
// }
// let mut visited = Default::default();
// if let Err(dep) =
// set_entry_runtime_and_depend_on(curr, &mut roots, &entry_to_deps, &mut visited, &mut vec![])
// {
// let dep_name = roots[dep].entry_name();
// let dep_rt = RuntimeSpec::from_entry(dep_name, None);
// error_roots.push((curr, Diagnostic::from(error!("Entrypoints '{entry}' and '{dep_name}' use 'dependOn' to depend on each other in a circular way."))));
// roots[dep].set_runtime(dep_rt);
// roots[curr].set_runtime(RuntimeSpec::from_entry(entry, None));
// }
// }

// let mut entry_module_runtime = IdentifierMap::<RuntimeSpec>::default();
// let module_graph: ModuleGraph = compilation.get_module_graph();
// for root in &roots {
// if let CreateChunkRoot::Entry(_name, data, runtime) = root {
// let runtime = runtime
// .as_ref()
// .expect("should have runtime after calculated depend on");
// for dep_id in compilation
// .global_entry
// .all_dependencies()
// .chain(data.all_dependencies())
// {
// let Some(module) = module_graph.module_identifier_by_dependency_id(dep_id) else {
// continue;
// };
// match entry_module_runtime.entry(*module) {
// std::collections::hash_map::Entry::Occupied(mut existing) => {
// let new_runtime = merge_runtime(existing.get(), &runtime);
// existing.insert(new_runtime);
// }
// std::collections::hash_map::Entry::Vacant(vacant) => {
// vacant.insert(runtime.clone());
// }
// }
// }
// }
// }

// let module_graph = compilation.get_module_graph();
// let module_cache = DashMap::default();

// roots.extend(
// self
// .blocks
// .par_iter()
// .map(|(block_id, origin)| {
// let visited = IdentifierDashSet::default();
// let block = module_graph
// .block_by_id(block_id)
// .expect("should have block");
// let runtime = if let Some(group_options) = block.get_group_options()
// && let Some(entry_options) = group_options.entry_options()
// {
// RuntimeSpec::from_entry_options(entry_options).or_else(|| {
// determine_runtime(
// *origin,
// &module_graph,
// &entry_module_runtime,
// &module_cache,
// &visited,
// )
// })
// } else {
// determine_runtime(
// *origin,
// &module_graph,
// &entry_module_runtime,
// &module_cache,
// &visited,
// )
// };

// if runtime.is_none() {
// dbg!(block.identifier());
// panic!()
// }
// CreateChunkRoot::Block(*origin, *block_id, runtime)
// })
// .collect::<Vec<_>>(),
// );

// for root in &roots {
// if let Some(runtime) = root.get_runtime() {
// self.module_deps.insert(runtime.clone(), Default::default());
// }
// }

let roots = self.analyze_module_graph(compilation)?;

// fill chunk with modules in parallel
let chunks = roots
.into_par_iter()
Expand Down
2 changes: 1 addition & 1 deletion crates/rspack_core/src/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -484,7 +484,7 @@ impl Compiler {
Ok(())
}

fn new_compilation_params(&self) -> CompilationParams {
pub fn new_compilation_params(&self) -> CompilationParams {
CompilationParams {
normal_module_factory: Arc::new(NormalModuleFactory::new(
self.options.clone(),
Expand Down
2 changes: 1 addition & 1 deletion crates/rspack_core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ use ustr::Ustr;
pub use utils::*;
mod chunk_graph;
pub use chunk_graph::*;
mod build_chunk_graph;
pub mod build_chunk_graph;
mod stats;
pub use stats::*;
mod runtime;
Expand Down
4 changes: 2 additions & 2 deletions crates/rspack_plugin_split_chunks/src/plugin/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ impl SplitChunksPlugin {
tracing::trace!("prepared module_group_map {:#?}", module_group_map);
logger.time_end(start);

let start = logger.time("ensure min size fit");
let start: rspack_core::StartTime = logger.time("ensure min size fit");
self.ensure_min_size_fit(compilation, &mut module_group_map);
logger.time_end(start);

Expand Down Expand Up @@ -162,7 +162,7 @@ impl Debug for SplitChunksPlugin {
}

#[plugin_hook(CompilationOptimizeChunks for SplitChunksPlugin, stage = Compilation::OPTIMIZE_CHUNKS_STAGE_ADVANCED)]
fn optimize_chunks(&self, compilation: &mut Compilation) -> Result<Option<bool>> {
pub fn optimize_chunks(&self, compilation: &mut Compilation) -> Result<Option<bool>> {
self.inner_impl(compilation)?;
Ok(None)
}
Expand Down
3 changes: 2 additions & 1 deletion tasks/benchmark/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ rspack = { workspace = true }
rspack_fs = { workspace = true }
rspack_core = { workspace = true }
tokio = { workspace = true }
serde_json = { workspace = true }

[[bench]]
name = "basic"
name = "benches"
harness = false
17 changes: 9 additions & 8 deletions tasks/benchmark/benches/basic.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
#![allow(clippy::unwrap_used)]
#![feature(trait_upcasting)]
#![allow(unused_attributes)]
#![allow(clippy::unwrap_used)]

use std::sync::Arc;

use criterion::criterion_group;
use rspack::builder::{Builder as _, Devtool};
use rspack_benchmark::{criterion_group, criterion_main, Criterion};
use rspack_benchmark::Criterion;
use rspack_core::Compiler;
use rspack_fs::{MemoryFileSystem, ReadableFileSystem, WritableFileSystem};
use tokio::runtime::Builder;

trait FileSystem: ReadableFileSystem + WritableFileSystem + Send + Sync {}
impl<T: ReadableFileSystem + WritableFileSystem + Send + Sync> FileSystem for T {}

async fn basic(fs: Arc<dyn FileSystem>, sm: bool) {
async fn basic_compile(fs: Arc<dyn FileSystem>, sm: bool) {
let mut builder = Compiler::builder();

builder
Expand All @@ -36,7 +38,7 @@ async fn basic(fs: Arc<dyn FileSystem>, sm: bool) {
.is_empty());
}

pub fn criterion_benchmark(c: &mut Criterion) {
pub fn basic_benchmark(c: &mut Criterion) {
let rt = Builder::new_multi_thread().build().unwrap();

let fs = MemoryFileSystem::default();
Expand All @@ -59,17 +61,16 @@ pub fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("basic", |b| {
b.to_async(&rt).iter(|| {
let fs = fs.clone();
basic(fs, false)
basic_compile(fs, false)
});
});

c.bench_function("basic_sourcemap", |b| {
b.to_async(&rt).iter(|| {
let fs = fs.clone();
basic(fs, true)
basic_compile(fs, true)
});
});
}

criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
criterion_group!(basic, basic_benchmark);
11 changes: 11 additions & 0 deletions tasks/benchmark/benches/benches.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#![feature(trait_upcasting)]
#![allow(clippy::unwrap_used)]

use basic::basic;
use build_chunk_graph::chunk_graph;
use criterion::criterion_main;

mod basic;
mod build_chunk_graph;

criterion_main!(basic, chunk_graph);
Loading

0 comments on commit c00591e

Please sign in to comment.