Skip to content

Commit

Permalink
feat: persistent cache expose error to compilation.diagnostic (#8765)
Browse files Browse the repository at this point in the history
  • Loading branch information
jerrykingxyz authored Dec 18, 2024
1 parent 8aeb507 commit 0e749f0
Show file tree
Hide file tree
Showing 12 changed files with 167 additions and 132 deletions.
5 changes: 4 additions & 1 deletion crates/rspack_core/src/cache/disable.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use rspack_error::Result;

use super::Cache;
use crate::make::MakeArtifact;

Expand All @@ -9,7 +11,8 @@ pub struct DisableCache;

#[async_trait::async_trait]
impl Cache for DisableCache {
async fn before_make(&self, make_artifact: &mut MakeArtifact) {
async fn before_make(&self, make_artifact: &mut MakeArtifact) -> Result<()> {
*make_artifact = Default::default();
Ok(())
}
}
39 changes: 20 additions & 19 deletions crates/rspack_core/src/cache/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ pub mod persistent;

use std::{fmt::Debug, sync::Arc};

use rspack_error::Result;
use rspack_fs::{FileSystem, IntermediateFileSystem};

pub use self::{disable::DisableCache, memory::MemoryCache, persistent::PersistentCache};
Expand All @@ -23,11 +24,19 @@ use crate::{make::MakeArtifact, Compilation, CompilerOptions, ExperimentCacheOpt
/// We can consider change to Hook when we need to open the API to js side.
#[async_trait::async_trait]
pub trait Cache: Debug + Send + Sync {
async fn before_compile(&self, _compilation: &mut Compilation) {}
fn after_compile(&self, _compilation: &Compilation) {}
async fn before_compile(&self, _compilation: &mut Compilation) -> Result<()> {
Ok(())
}
async fn after_compile(&self, _compilation: &Compilation) -> Result<()> {
Ok(())
}

async fn before_make(&self, _make_artifact: &mut MakeArtifact) {}
fn after_make(&self, _make_artifact: &MakeArtifact) {}
async fn before_make(&self, _make_artifact: &mut MakeArtifact) -> Result<()> {
Ok(())
}
async fn after_make(&self, _make_artifact: &MakeArtifact) -> Result<()> {
Ok(())
}
}

pub fn new_cache(
Expand All @@ -39,20 +48,12 @@ pub fn new_cache(
match &compiler_option.experiments.cache {
ExperimentCacheOptions::Disabled => Arc::new(DisableCache),
ExperimentCacheOptions::Memory => Arc::new(MemoryCache),
ExperimentCacheOptions::Persistent(option) => {
match PersistentCache::new(
compiler_path,
option,
compiler_option.clone(),
input_filesystem,
intermediate_filesystem,
) {
Ok(cache) => Arc::new(cache),
Err(e) => {
tracing::warn!("create persistent cache failed {e:?}");
Arc::new(MemoryCache)
}
}
}
ExperimentCacheOptions::Persistent(option) => Arc::new(PersistentCache::new(
compiler_path,
option,
compiler_option.clone(),
input_filesystem,
intermediate_filesystem,
)),
}
}
36 changes: 19 additions & 17 deletions crates/rspack_core/src/cache/persistent/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ use std::{path::PathBuf, sync::Arc};

pub use cacheable_context::{CacheableContext, FromContext};
use occasion::MakeOccasion;
use rspack_fs::{FileSystem, IntermediateFileSystem, Result};
use rspack_error::Result;
use rspack_fs::{FileSystem, IntermediateFileSystem};
use rspack_macros::rspack_version;
use rspack_paths::ArcPath;
use rustc_hash::FxHashSet as HashSet;
Expand Down Expand Up @@ -42,39 +43,40 @@ impl PersistentCache {
compiler_options: Arc<CompilerOptions>,
input_filesystem: Arc<dyn FileSystem>,
intermediate_filesystem: Arc<dyn IntermediateFileSystem>,
) -> Result<Self> {
) -> Self {
let version = version::get_version(
input_filesystem.clone(),
&option.build_dependencies,
vec![compiler_path, &option.version, rspack_version!()],
)?;
);
let storage = create_storage(option.storage.clone(), version, intermediate_filesystem);
let context = Arc::new(CacheableContext {
options: compiler_options,
input_filesystem: input_filesystem.clone(),
});
let make_occasion = MakeOccasion::new(storage.clone(), context);
Ok(Self {
Self {
snapshot: Snapshot::new(option.snapshot.clone(), input_filesystem, storage.clone()),
storage,
make_occasion,
})
}
}
}

#[async_trait::async_trait]
impl Cache for PersistentCache {
async fn before_compile(&self, compilation: &mut Compilation) {
async fn before_compile(&self, compilation: &mut Compilation) -> Result<()> {
if compilation.modified_files.is_empty() && compilation.removed_files.is_empty() {
// inject modified_files and removed_files
let (modified_paths, removed_paths) = self.snapshot.calc_modified_paths().await;
let (modified_paths, removed_paths) = self.snapshot.calc_modified_paths().await?;
tracing::info!("cache::snapshot recovery {modified_paths:?} {removed_paths:?}",);
compilation.modified_files = modified_paths;
compilation.removed_files = removed_paths;
}
Ok(())
}

fn after_compile(&self, compilation: &Compilation) {
async fn after_compile(&self, compilation: &Compilation) -> Result<()> {
// TODO add a all_dependencies to collect dependencies
let (_, file_added, file_removed) = compilation.file_dependencies();
let (_, context_added, context_removed) = compilation.context_dependencies();
Expand Down Expand Up @@ -103,24 +105,24 @@ impl Cache for PersistentCache {
.remove(removed_paths.iter().map(|item| item.as_ref()));
self
.snapshot
.add(modified_paths.iter().map(|item| item.as_ref()));
.add(modified_paths.iter().map(|item| item.as_ref()))
.await;

// TODO listen for storage finish in build mode
let _ = self.storage.trigger_save();

Ok(())
}

async fn before_make(&self, make_artifact: &mut MakeArtifact) {
async fn before_make(&self, make_artifact: &mut MakeArtifact) -> Result<()> {
if !make_artifact.initialized {
match self.make_occasion.recovery().await {
Ok(artifact) => *make_artifact = artifact,
Err(err) => {
tracing::warn!("recovery error with {err:?}")
}
}
*make_artifact = self.make_occasion.recovery().await?;
}
Ok(())
}

fn after_make(&self, make_artifact: &MakeArtifact) {
async fn after_make(&self, make_artifact: &MakeArtifact) -> Result<()> {
self.make_occasion.save(make_artifact);
Ok(())
}
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use std::sync::{Arc, Mutex};

use rayon::prelude::*;
use rspack_cacheable::{
cacheable, from_bytes, to_bytes, with::Inline, DeserializeError, SerializeError,
};
use rspack_cacheable::{cacheable, from_bytes, to_bytes, with::Inline};
use rspack_error::Result;
use rspack_paths::ArcPath;
use rustc_hash::FxHashMap as HashMap;

Expand Down Expand Up @@ -45,7 +44,7 @@ pub fn save_dependencies_info(
missing_dependencies: &FileCounter,
build_dependencies: &FileCounter,
storage: &Arc<dyn Storage>,
) -> Result<(), SerializeError> {
) {
let f = file_dependencies
.updated_files_count_info()
.map(|(path, count)| {
Expand Down Expand Up @@ -98,37 +97,35 @@ pub fn save_dependencies_info(
.chain(m)
.chain(b)
.par_bridge()
.try_for_each(|(dep_ref, count)| {
let dep_ref = to_bytes(&dep_ref, &())?;
.for_each(|(dep_ref, count)| {
let dep_ref = to_bytes(&dep_ref, &()).expect("should to bytes success");
if count == 0 {
storage.remove(SCOPE, &dep_ref);
} else {
storage.set(SCOPE, dep_ref, count.to_ne_bytes().to_vec());
}
Ok(())
})
});
}

pub async fn recovery_dependencies_info(
storage: &Arc<dyn Storage>,
) -> Result<(FileCounter, FileCounter, FileCounter, FileCounter), DeserializeError> {
) -> Result<(FileCounter, FileCounter, FileCounter, FileCounter)> {
let file_dep = Mutex::new(HashMap::default());
let context_dep = Mutex::new(HashMap::default());
let missing_dep = Mutex::new(HashMap::default());
let build_dep = Mutex::new(HashMap::default());
storage
.load(SCOPE)
.await
.unwrap_or_default()
.await?
.into_par_iter()
.try_for_each(|(k, v)| {
.for_each(|(k, v)| {
let count = usize::from_ne_bytes(
v.as_ref()
.clone()
.try_into()
.map_err(|_| DeserializeError::MessageError("deserialize count failed"))?,
.expect("should parse count success"),
);
let Dependency { r#type, path } = from_bytes(&k, &())?;
let Dependency { r#type, path } = from_bytes(&k, &()).expect("should from bytes success");
match r#type {
DepType::File => file_dep
.lock()
Expand All @@ -147,8 +144,7 @@ pub async fn recovery_dependencies_info(
.expect("should get build dep")
.insert(path, count),
};
Ok(())
})?;
});

Ok((
FileCounter::new(file_dep.into_inner().expect("into_inner should be success")),
Expand Down
22 changes: 12 additions & 10 deletions crates/rspack_core/src/cache/persistent/occasion/make/meta.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use std::sync::{atomic::Ordering::Relaxed, Arc};

use rspack_cacheable::{
cacheable, from_bytes, to_bytes, with::Inline, DeserializeError, SerializeError,
};
use rspack_cacheable::{cacheable, from_bytes, to_bytes, with::Inline};
use rspack_collections::IdentifierSet;
use rspack_error::Result;
use rustc_hash::FxHashSet as HashSet;

use super::Storage;
Expand Down Expand Up @@ -34,23 +33,26 @@ pub fn save_meta(
make_failed_dependencies: &HashSet<BuildDependency>,
make_failed_module: &IdentifierSet,
storage: &Arc<dyn Storage>,
) -> Result<(), SerializeError> {
) {
let meta = MetaRef {
make_failed_dependencies,
make_failed_module,
next_dependencies_id: DEPENDENCY_ID.load(Relaxed),
};
storage.set(SCOPE, "default".as_bytes().to_vec(), to_bytes(&meta, &())?);
Ok(())
storage.set(
SCOPE,
"default".as_bytes().to_vec(),
to_bytes(&meta, &()).expect("should to bytes success"),
);
}

pub async fn recovery_meta(
storage: &Arc<dyn Storage>,
) -> Result<(HashSet<BuildDependency>, IdentifierSet), DeserializeError> {
let Some((_, value)) = storage.load(SCOPE).await.unwrap_or_default().pop() else {
return Err(DeserializeError::MessageError("can not get meta data"));
) -> Result<(HashSet<BuildDependency>, IdentifierSet)> {
let Some((_, value)) = storage.load(SCOPE).await?.pop() else {
return Ok(Default::default());
};
let meta: Meta = from_bytes(&value, &())?;
let meta: Meta = from_bytes(&value, &()).expect("should from bytes success");
// TODO make dependency id to string like module id
if DEPENDENCY_ID.load(Relaxed) < meta.next_dependencies_id {
DEPENDENCY_ID.store(meta.next_dependencies_id, Relaxed);
Expand Down
12 changes: 6 additions & 6 deletions crates/rspack_core/src/cache/persistent/occasion/make/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ mod module_graph;

use std::sync::Arc;

use rspack_cacheable::DeserializeError;
use rspack_error::Result;

use super::super::{cacheable_context::CacheableContext, Storage};
use crate::make::MakeArtifact;
Expand Down Expand Up @@ -48,8 +48,7 @@ impl MakeOccasion {
missing_dependencies,
build_dependencies,
&self.storage,
)
.expect("should save dependencies success");
);

module_graph::save_module_graph(
module_graph_partial,
Expand All @@ -59,14 +58,15 @@ impl MakeOccasion {
&self.context,
);

meta::save_meta(make_failed_dependencies, make_failed_module, &self.storage)
.expect("should save make meta");
meta::save_meta(make_failed_dependencies, make_failed_module, &self.storage);
}

#[tracing::instrument(name = "MakeOccasion::recovery", skip_all)]
pub async fn recovery(&self) -> Result<MakeArtifact, DeserializeError> {
pub async fn recovery(&self) -> Result<MakeArtifact> {
let mut artifact = MakeArtifact::default();

// TODO can call recovery with multi thread
// TODO return DeserializeError not panic
let (file_dependencies, context_dependencies, missing_dependencies, build_dependencies) =
dependencies::recovery_dependencies_info(&self.storage).await?;
artifact.file_dependencies = file_dependencies;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ use rayon::prelude::*;
use rspack_cacheable::{
cacheable, from_bytes, to_bytes,
with::{AsOption, AsTuple2, AsVec, Inline},
DeserializeError, SerializeError,
SerializeError,
};
use rspack_collections::IdentifierSet;
use rspack_error::Result;
use rustc_hash::FxHashSet as HashSet;

use super::Storage;
Expand Down Expand Up @@ -125,11 +126,11 @@ pub fn save_module_graph(
pub async fn recovery_module_graph(
storage: &Arc<dyn Storage>,
context: &CacheableContext,
) -> Result<(ModuleGraphPartial, HashSet<BuildDependency>), DeserializeError> {
) -> Result<(ModuleGraphPartial, HashSet<BuildDependency>)> {
let mut need_check_dep = vec![];
let mut partial = ModuleGraphPartial::default();
let mut mg = ModuleGraph::new(vec![], Some(&mut partial));
for (_, v) in storage.load(SCOPE).await.unwrap_or_default() {
for (_, v) in storage.load(SCOPE).await? {
let mut node: Node =
from_bytes(&v, context).expect("unexpected module graph deserialize failed");
for (dep, parent_block) in node.dependencies {
Expand Down
Loading

2 comments on commit 0e749f0

@rspack-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

📝 Ran ecosystem CI: Open

suite result
modernjs ❌ failure
_selftest ✅ success
rsdoctor ❌ failure
rspress ✅ success
rslib ✅ success
rsbuild ❌ failure
examples ❌ failure
devserver ✅ success
nuxt ✅ success

@rspack-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

📝 Benchmark detail: Open

Name Base (2024-12-18 8a7084e) Current Change
10000_big_production-mode_disable-minimize + exec 37.9 s ± 525 ms 37.9 s ± 373 ms +0.08 %
10000_development-mode + exec 1.86 s ± 24 ms 1.84 s ± 24 ms -1.29 %
10000_development-mode_hmr + exec 683 ms ± 26 ms 684 ms ± 25 ms +0.18 %
10000_production-mode + exec 2.45 s ± 80 ms 2.39 s ± 44 ms -2.26 %
arco-pro_development-mode + exec 1.79 s ± 95 ms 1.8 s ± 140 ms +0.82 %
arco-pro_development-mode_hmr + exec 378 ms ± 1.1 ms 378 ms ± 2.4 ms -0.04 %
arco-pro_production-mode + exec 3.26 s ± 89 ms 3.28 s ± 71 ms +0.48 %
arco-pro_production-mode_generate-package-json-webpack-plugin + exec 3.32 s ± 126 ms 3.3 s ± 63 ms -0.33 %
arco-pro_production-mode_traverse-chunk-modules + exec 3.29 s ± 91 ms 3.27 s ± 60 ms -0.68 %
threejs_development-mode_10x + exec 1.64 s ± 20 ms 1.64 s ± 17 ms +0.04 %
threejs_development-mode_10x_hmr + exec 817 ms ± 17 ms 818 ms ± 36 ms +0.14 %
threejs_production-mode_10x + exec 5.4 s ± 134 ms 5.42 s ± 203 ms +0.38 %
10000_big_production-mode_disable-minimize + rss memory 9531 MiB ± 380 MiB 9446 MiB ± 52.8 MiB -0.89 %
10000_development-mode + rss memory 671 MiB ± 19.2 MiB 702 MiB ± 41.1 MiB +4.70 %
10000_development-mode_hmr + rss memory 1494 MiB ± 296 MiB 1555 MiB ± 275 MiB +4.09 %
10000_production-mode + rss memory 644 MiB ± 24.1 MiB 683 MiB ± 34.4 MiB +5.91 %
arco-pro_development-mode + rss memory 572 MiB ± 28.6 MiB 607 MiB ± 45.6 MiB +6.12 %
arco-pro_development-mode_hmr + rss memory 623 MiB ± 53.8 MiB 631 MiB ± 53.7 MiB +1.25 %
arco-pro_production-mode + rss memory 774 MiB ± 49.9 MiB 778 MiB ± 44.3 MiB +0.50 %
arco-pro_production-mode_generate-package-json-webpack-plugin + rss memory 752 MiB ± 54.9 MiB 750 MiB ± 35.9 MiB -0.31 %
arco-pro_production-mode_traverse-chunk-modules + rss memory 774 MiB ± 82.3 MiB 772 MiB ± 51.5 MiB -0.23 %
threejs_development-mode_10x + rss memory 654 MiB ± 29.2 MiB 675 MiB ± 40.1 MiB +3.29 %
threejs_development-mode_10x_hmr + rss memory 1203 MiB ± 127 MiB 1176 MiB ± 226 MiB -2.20 %
threejs_production-mode_10x + rss memory 971 MiB ± 66.9 MiB 978 MiB ± 76.4 MiB +0.75 %

Please sign in to comment.