Skip to content

Commit

Permalink
Merge branch 'swc_loader_options_0422' of github.com:modern-js-dev/rs…
Browse files Browse the repository at this point in the history
…pack into swc_loader_options_0422
  • Loading branch information
chenjiahan committed Apr 22, 2024
2 parents b866441 + 8857eef commit 797a8da
Show file tree
Hide file tree
Showing 5 changed files with 57 additions and 46 deletions.
22 changes: 20 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,8 @@ jobs:
if: github.ref_name == 'main' && github.repository_owner == 'web-infra-dev'
steps:
- name: Run Ecosystem CI
id: eco_ci
continue-on-error: true
uses: convictional/trigger-workflow-and-wait@v1.6.5
with:
owner: ${{ github.repository_owner }}
Expand All @@ -338,6 +340,23 @@ jobs:
ref: "main"
client_payload: '{"commitSHA":"${{ github.sha }}","repo":"web-infra-dev/rspack","suite":"-","suiteRefType":"precoded","suiteRef":"precoded"}'

- if: steps.eco_ci.outcome == 'failure'
uses: actions/checkout@v4
- if: steps.eco_ci.outcome == 'failure'
uses: actions/setup-node@v4
with:
node-version: 20
- name: Send Failure Notification
if: steps.eco_ci.outcome == 'failure'
shell: bash
run: ./scripts/alert/lark.js
env:
TITLE: Ecosystem CI failed on main branch
DESCRIPTION: |
commitID: [${{github.sha}}](${{github.server_url}}/${{github.repository}}/commit/${{github.sha}})
URL: ${{github.server_url}}/${{github.repository}}/actions/runs/${{github.run_id}}
LARK_WEBHOOK_URL: ${{secrets.LARK_WEBHOOK_URL}}

failure_notification:
name: Failure Notification
needs:
Expand All @@ -347,8 +366,7 @@ jobs:
test-mac,
rust_check,
rust_test,
run_benchmark,
run_ecosystem_ci
run_benchmark
]
if: ${{ failure() && !cancelled() && github.ref_name == 'main' && github.repository_owner == 'web-infra-dev' }}
runs-on: ubuntu-latest
Expand Down
9 changes: 3 additions & 6 deletions crates/rspack_binding_values/src/compilation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ impl JsCompilation {
#[allow(clippy::too_many_arguments)]
#[napi]
pub fn import_module(
&'static mut self,
&'static self,
env: Env,
request: String,
public_path: Option<String>,
Expand All @@ -452,7 +452,7 @@ impl JsCompilation {
let module_executor = self
.0
.module_executor
.as_mut()
.as_ref()
.expect("should have module executor");
let result = module_executor
.import_module(
Expand Down Expand Up @@ -491,12 +491,9 @@ impl JsCompilation {
.into_iter()
.map(|d| d.to_string_lossy().to_string())
.collect(),
assets: res.assets.keys().cloned().collect(),
assets: res.assets.into_iter().collect(),
id: res.id,
};
for (filename, asset) in res.assets {
self.0.emit_asset(filename, asset)
}
Ok(js_result)
}
Err(e) => Err(Error::new(napi::Status::GenericFailure, format!("{e}"))),
Expand Down
10 changes: 10 additions & 0 deletions crates/rspack_core/src/compiler/compilation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1009,6 +1009,16 @@ impl Compilation {
self.create_chunk_assets(plugin_driver.clone()).await?;
logger.time_end(start);

// sync assets to compilation from module_executor
let assets = self
.module_executor
.as_mut()
.map(|module_executor| std::mem::take(&mut module_executor.assets))
.unwrap_or_default();
for (filename, asset) in assets {
self.emit_asset(filename, asset)
}

let start = logger.time("process assets");
plugin_driver
.compilation_hooks
Expand Down
4 changes: 2 additions & 2 deletions crates/rspack_core/src/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ where
let (plugin_driver, options) = PluginDriver::new(options, plugins, resolver_factory.clone());
let cache = Arc::new(Cache::new(options.clone()));
assert!(!(options.is_new_tree_shaking() && options.builtins.tree_shaking.enable()), "Can't enable builtins.tree_shaking and `experiments.rspack_future.new_treeshaking` at the same time");
let module_executor = ModuleExecutor::new(options.is_new_tree_shaking());
let module_executor = ModuleExecutor::default();
Self {
options: options.clone(),
compilation: Compilation::new(
Expand Down Expand Up @@ -121,7 +121,7 @@ where
// TODO: maybe it's better to use external entries.
self.plugin_driver.resolver_factory.clear_cache();

let module_executor = ModuleExecutor::new(self.options.is_new_tree_shaking());
let module_executor = ModuleExecutor::default();
fast_set(
&mut self.compilation,
Compilation::new(
Expand Down
58 changes: 22 additions & 36 deletions crates/rspack_core/src/compiler/module_executor.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::sync::{atomic::AtomicU32, Arc};
use std::{collections::hash_map, hash::BuildHasherDefault, iter::once};
use std::{hash::BuildHasherDefault, iter::once};

use dashmap::DashMap;
use rayon::prelude::*;
use rspack_error::Result;
use rspack_identifier::{Identifiable, IdentifierSet};
Expand All @@ -9,10 +10,10 @@ use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet, FxHasher};
use crate::cache::Cache;
use crate::{
Chunk, ChunkGraph, ChunkKind, CodeGenerationDataAssetInfo, CodeGenerationDataFilename,
CodeGenerationResult, CompilationAssets, Dependency, DependencyType, EntryDependency,
EntryOptions, Entrypoint, ModuleFactory, ModuleGraphPartial, SourceType,
CodeGenerationResult, Dependency, DependencyType, EntryDependency, EntryOptions, Entrypoint,
ModuleFactory, SourceType,
};
use crate::{Compilation, CompilationAsset, DependencyId, MakeParam};
use crate::{Compilation, CompilationAsset, MakeParam};
use crate::{CompilerOptions, Context, ResolverFactory, SharedPluginDriver};

static EXECUTE_MODULE_ID: AtomicU32 = AtomicU32::new(0);
Expand All @@ -24,27 +25,19 @@ pub struct ExecuteModuleResult {
pub context_dependencies: HashSet<std::path::PathBuf>,
pub missing_dependencies: HashSet<std::path::PathBuf>,
pub build_dependencies: HashSet<std::path::PathBuf>,
pub assets: CompilationAssets,
pub assets: HashSet<String>,
pub id: ExecuteModuleId,
}

#[derive(Debug)]
#[derive(Debug, Default)]
pub struct ModuleExecutor {
pub make_module_graph: ModuleGraphPartial,
request_dep_map: HashMap<String, DependencyId>,
pub assets: DashMap<String, CompilationAsset>,
}

impl ModuleExecutor {
pub fn new(is_new_treeshaking: bool) -> Self {
Self {
make_module_graph: ModuleGraphPartial::new(is_new_treeshaking),
request_dep_map: HashMap::default(),
}
}

#[allow(clippy::too_many_arguments)]
pub async fn import_module(
&mut self,
&self,
options: Arc<CompilerOptions>,
plugin_driver: SharedPluginDriver,
resolver_factory: Arc<ResolverFactory>,
Expand All @@ -67,21 +60,16 @@ impl ModuleExecutor {
None,
);
compilation.dependency_factories = dependency_factories;
compilation.swap_make_module_graph(&mut self.make_module_graph);

let mut mg = compilation.get_module_graph_mut();
let dep_id = match self.request_dep_map.entry(request.clone()) {
hash_map::Entry::Vacant(v) => {
let dep = EntryDependency::new(
request,
original_module_context.unwrap_or(Context::from("")),
);
let dep_id = *dep.id();
mg.add_dependency(Box::new(dep));
v.insert(dep_id);
dep_id
}
hash_map::Entry::Occupied(v) => *v.get(),
let dep_id = {
let dep = EntryDependency::new(
request,
original_module_context.unwrap_or(Context::from("")),
);
let dep_id = *dep.id();
mg.add_dependency(Box::new(dep));
dep_id
};

compilation
Expand Down Expand Up @@ -278,18 +266,16 @@ impl ModuleExecutor {
};

if let Ok(ref mut result) = execute_result {
std::mem::swap(&mut result.assets, compilation.assets_mut());
let assets = std::mem::take(compilation.assets_mut());
for (key, value) in assets {
result.assets.insert(key.clone());
self.assets.insert(key, value);
}
}

let mut has_error = false;
for error in compilation.get_errors() {
has_error = true;
error.render_report(true)?;
}
if !has_error {
// save make module_graph for next import_module
compilation.swap_make_module_graph(&mut self.make_module_graph);
}

execute_result
}
Expand Down

0 comments on commit 797a8da

Please sign in to comment.