From 57ebb5b38065c50e956222319c7e684fcae4aa47 Mon Sep 17 00:00:00 2001 From: jserfeng <1114550440@qq.com> Date: Mon, 20 Nov 2023 19:59:46 +0800 Subject: [PATCH] feat(compilation): add pluginImport and executeModule --- .../rspack_core/src/compiler/compilation.rs | 482 +++++++++++++----- crates/rspack_core/src/compiler/queue.rs | 37 ++ .../dependency/loader_import_dependency.rs | 45 ++ crates/rspack_core/src/dependency/mod.rs | 2 + .../rspack_core/src/loader/loader_runner.rs | 42 +- crates/rspack_core/src/logger.rs | 1 + crates/rspack_core/src/utils/queue.rs | 2 +- crates/rspack_loader_sass/tests/fixtures.rs | 1 + crates/rspack_loader_swc/tests/fixtures.rs | 1 + 9 files changed, 497 insertions(+), 116 deletions(-) create mode 100644 crates/rspack_core/src/dependency/loader_import_dependency.rs diff --git a/crates/rspack_core/src/compiler/compilation.rs b/crates/rspack_core/src/compiler/compilation.rs index 4a98da7b24e4..11e1103ada6d 100644 --- a/crates/rspack_core/src/compiler/compilation.rs +++ b/crates/rspack_core/src/compiler/compilation.rs @@ -1,6 +1,7 @@ use std::{ fmt::Debug, hash::{BuildHasherDefault, Hash}, + iter::once, path::PathBuf, sync::{ atomic::{AtomicBool, Ordering}, @@ -15,7 +16,8 @@ use rayon::prelude::{ IntoParallelIterator, IntoParallelRefIterator, ParallelBridge, ParallelIterator, }; use rspack_error::{ - internal_error, CatchUnwindFuture, Diagnostic, Result, Severity, TWithDiagnosticArray, + internal_error, CatchUnwindFuture, Diagnostic, Error, InternalError, Result, Severity, + TWithDiagnosticArray, }; use rspack_futures::FuturesResults; use rspack_hash::{RspackHash, RspackHashDigest}; @@ -23,7 +25,7 @@ use rspack_identifier::{Identifiable, IdentifierMap, IdentifierSet}; use rspack_sources::{BoxSource, CachedSource, SourceExt}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet, FxHasher}; use swc_core::ecma::ast::ModuleItem; -use tokio::sync::mpsc::error::TryRecvError; +use tokio::sync::mpsc::{error::TryRecvError, UnboundedSender}; use tracing::instrument; use super::{ @@ -40,12 +42,13 @@ use crate::{ BuildTask, BuildTaskResult, CacheCount, CacheOptions, Chunk, ChunkByUkey, ChunkContentHash, ChunkGraph, ChunkGroupByUkey, ChunkGroupUkey, ChunkHashArgs, ChunkKind, ChunkUkey, CleanQueue, CleanTask, CleanTaskResult, CodeGenerationResult, CodeGenerationResults, CompilationLogger, - CompilationLogging, CompilerOptions, ContentHashArgs, ContextDependency, DependencyId, - DependencyParents, Entry, EntryData, EntryOptions, Entrypoint, FactorizeQueue, FactorizeTask, - FactorizeTaskResult, Filename, Logger, Module, ModuleGraph, ModuleIdentifier, ModuleProfile, - ModuleType, PathData, ProcessAssetsArgs, ProcessDependenciesQueue, ProcessDependenciesResult, - ProcessDependenciesTask, RenderManifestArgs, Resolve, ResolverFactory, RuntimeGlobals, - RuntimeModule, RuntimeSpec, SharedPluginDriver, SourceType, Stats, TaskResult, WorkerTask, + CompilationLogging, CompilerOptions, ContentHashArgs, ContextDependency, Dependency, + DependencyId, DependencyParents, Entry, EntryData, EntryOptions, Entrypoint, FactorizeQueue, + FactorizeTask, FactorizeTaskResult, Filename, LoaderImportDependency, Logger, Module, + ModuleGraph, ModuleIdentifier, ModuleProfile, ModuleType, PathData, ProcessAssetsArgs, + ProcessDependenciesQueue, ProcessDependenciesResult, ProcessDependenciesTask, RenderManifestArgs, + Resolve, ResolverFactory, RuntimeGlobals, RuntimeModule, RuntimeSpec, SharedPluginDriver, + SourceType, Stats, TaskResult, WorkerTask, }; use crate::{tree_shaking::visitor::OptimizeAnalyzeResult, Context}; @@ -54,6 +57,8 @@ pub type BuildDependency = ( Option, /* parent module */ ); +pub type TaskSender = UnboundedSender>; + #[derive(Debug)] pub struct Compilation { // Mark compilation status, because the hash of `[hash].hot-update.js/json` is previous compilation hash. @@ -109,6 +114,10 @@ pub struct Compilation { pub build_dependencies: IndexSet>, pub side_effects_free_modules: IdentifierSet, pub module_item_map: IdentifierMap>, + + // webpack has build queue in compilation, and it can push task to + // build queue, we use result_tx to send task to build queue + build_queue_task_sender: Option>>, } impl Compilation { @@ -169,6 +178,8 @@ impl Compilation { side_effects_free_modules: IdentifierSet::default(), module_item_map: IdentifierMap::default(), include_module_ids: IdentifierSet::default(), + + build_queue_task_sender: None, } } @@ -427,7 +438,10 @@ impl Compilation { let mut active_task_count = 0usize; let is_expected_shutdown = Arc::new(AtomicBool::new(false)); + let (result_tx, mut result_rx) = tokio::sync::mpsc::unbounded_channel::>(); + self.build_queue_task_sender = Some(result_tx.clone()); + let mut factorize_queue = FactorizeQueue::new(); let mut add_queue = AddQueue::new(); let mut build_queue = BuildQueue::new(); @@ -485,13 +499,16 @@ impl Compilation { factorize_cache_counter = Some(logger.cache("module factorize cache")); } + let mut loader_import_modules: HashMap>> = + Default::default(); + tokio::task::block_in_place(|| loop { let start = factorize_time.start(); while let Some(task) = factorize_queue.get_task() { + active_task_count += 1; tokio::spawn({ let result_tx = result_tx.clone(); let is_expected_shutdown = is_expected_shutdown.clone(); - active_task_count += 1; async move { if is_expected_shutdown.load(Ordering::SeqCst) { @@ -520,10 +537,10 @@ impl Compilation { let start = build_time.start(); while let Some(task) = build_queue.get_task() { + active_task_count += 1; tokio::spawn({ let result_tx = result_tx.clone(); let is_expected_shutdown = is_expected_shutdown.clone(); - active_task_count += 1; async move { if is_expected_shutdown.load(Ordering::SeqCst) { @@ -621,6 +638,7 @@ impl Compilation { .send(Ok(TaskResult::ProcessDependencies(Box::new( ProcessDependenciesResult { module_identifier: task.original_module_identifier, + original_dependency: task.original_dependency, }, )))) .expect("Failed to send process dependencies result"); @@ -641,6 +659,7 @@ impl Compilation { current_profile, exports_info_related, from_cache, + original_dependency, } = task_result; if let Some(counter) = &mut factorize_cache_counter { @@ -691,12 +710,14 @@ impl Compilation { dependencies, is_entry, current_profile, + original_dependency, }); } Ok(TaskResult::Add(box task_result)) => match task_result { AddTaskResult::ModuleAdded { module, current_profile, + original_dependency, } => { tracing::trace!("Module added: {}", module.identifier()); build_queue.add_task(BuildTask { @@ -706,12 +727,15 @@ impl Compilation { plugin_driver: self.plugin_driver.clone(), cache: self.cache.clone(), current_profile, + original_dependency, + task_sender: result_tx.clone(), }); } AddTaskResult::ModuleReused { module, .. } => { tracing::trace!("Module reused: {}, skipping build", module.identifier()); } }, + Ok(TaskResult::Build(box task_result)) => { let BuildTaskResult { mut module, @@ -719,6 +743,7 @@ impl Compilation { diagnostics, current_profile, from_cache, + original_dependency, } = task_result; if let Some(counter) = &mut build_cache_counter { @@ -826,6 +851,7 @@ impl Compilation { dependencies: all_dependencies, original_module_identifier: module.identifier(), resolve_options: module.get_resolve_options(), + original_dependency, }); self.module_graph.set_module_build_info_and_meta( &module.identifier(), @@ -835,11 +861,52 @@ impl Compilation { self.module_graph.add_module(module); } Ok(TaskResult::ProcessDependencies(task_result)) => { + if let Some(dep) = task_result.original_dependency + && loader_import_modules.contains_key(&dep) + { + let result_rx = loader_import_modules + .remove(&dep) + .expect("loader import module must exist"); + + let module = self + .module_graph + .module_identifier_by_dependency_id(&dep) + .expect("should have module"); + + if self.execute_module(*module, result_rx).is_err() { + make_failed_module.insert(task_result.module_identifier); + } + } + tracing::trace!( "Processing dependencies of {} finished", task_result.module_identifier ); } + Ok(TaskResult::ImportModule(import_module_task)) => { + active_task_count += 1; + let request = import_module_task.request; + + let dep = LoaderImportDependency::new(request.clone()); + let dep_id = *dep.id(); + + loader_import_modules.insert(dep_id, import_module_task.sender); + + self.module_graph.add_dependency(Box::new(dep)); + + self.handle_module_creation( + &mut factorize_queue, + import_module_task.original_module, + import_module_task.original_module_context, + vec![dep_id], + false, + None, + None, + Some(Box::new(self.options.resolve.clone())), + Default::default(), + None, + ); + } Err(err) => { // Severe internal error encountered, we should end the compiling here. errored = Some(err); @@ -933,8 +1000,6 @@ impl Compilation { }) }; - // dbg!(&self.module_graph.module_identifier_to_module_graph_module); - // add context module and context element module to bailout_module_identifiers if self.options.builtins.tree_shaking.enable() { self.bailout_module_identifiers = self @@ -990,6 +1055,7 @@ impl Compilation { issuer: Option>, ) { let current_profile = self.options.profile.then(Box::::default); + queue.add_task(FactorizeTask { original_module_identifier, issuer, @@ -1011,8 +1077,10 @@ impl Compilation { } #[instrument(name = "compilation:code_generation", skip(self))] - async fn code_generation(&mut self) -> Result<()> { + fn code_generation(&mut self) -> Result<()> { let logger = self.get_logger("rspack.Compilation"); + let mut code_generation_results = std::mem::take(&mut self.code_generation_results); + let mut codegen_cache_counter = match self.options.cache { CacheOptions::Disabled => None, _ => Some(logger.cache("module code generation cache")), @@ -1020,6 +1088,7 @@ impl Compilation { fn run_iteration( compilation: &mut Compilation, + code_generation_results: &mut CodeGenerationResults, codegen_cache_counter: &mut Option, filter_op: impl Fn(&(&ModuleIdentifier, &Box)) -> bool + Sync + Send, ) -> Result<()> { @@ -1027,92 +1096,43 @@ impl Compilation { // Else, share same codegen result for all runtimes. let used_exports_optimization = compilation.options.is_new_tree_shaking() && compilation.options.optimization.used_exports.is_true(); - let results = compilation - .module_graph - .modules() - .par_iter() - .filter(filter_op) - .filter_map(|(module_identifier, module)| { - let runtimes = compilation - .chunk_graph - .get_module_runtimes(*module_identifier, &compilation.chunk_by_ukey); - if runtimes.is_empty() { - return None; - } - let res = compilation - .cache - .code_generate_occasion - .use_cache(module, runtimes, compilation, |module, runtimes| { - let take_length = if used_exports_optimization { - runtimes.len() - } else { - // Only codegen once - 1 - }; - let mut codegen_list = vec![]; - for runtime in runtimes.into_values().take(take_length) { - codegen_list.push(( - module.code_generation(compilation, Some(&runtime))?, - runtime, - )); - } - Ok(codegen_list) - }) - .map(|(result, from_cache)| (*module_identifier, result, from_cache)); - Some(res) - }) - .collect::, - bool, - )>, - >>()?; - results - .into_iter() - .for_each(|(module_identifier, item, from_cache)| { - item.into_iter().for_each(|(result, runtime)| { - if let Some(counter) = codegen_cache_counter { - if from_cache { - counter.hit(); - } else { - counter.miss(); - } - } - compilation.code_generated_modules.insert(module_identifier); + let results = compilation.code_generation_modules( + &compilation.chunk_graph, + codegen_cache_counter, + used_exports_optimization, + code_generation_results, + compilation + .module_graph + .modules() + .iter() + .filter(filter_op) + .map(|(id, _)| *id) + .par_bridge(), + )?; + + for module in results { + compilation.code_generated_modules.insert(module); + } - let runtimes = compilation - .chunk_graph - .get_module_runtimes(module_identifier, &compilation.chunk_by_ukey); - let result_id = result.id; - compilation - .code_generation_results - .module_generation_result_map - .insert(result.id, result); - if used_exports_optimization { - compilation - .code_generation_results - .add(module_identifier, runtime, result_id); - } else { - for runtime in runtimes.into_values() { - compilation - .code_generation_results - .add(module_identifier, runtime, result_id); - } - } - }) - }); Ok(()) } - run_iteration(self, &mut codegen_cache_counter, |(_, module)| { - module.get_code_generation_dependencies().is_none() - })?; + run_iteration( + self, + &mut code_generation_results, + &mut codegen_cache_counter, + |(_, module)| module.get_code_generation_dependencies().is_none(), + )?; + + run_iteration( + self, + &mut code_generation_results, + &mut codegen_cache_counter, + |(_, module)| module.get_code_generation_dependencies().is_some(), + )?; - run_iteration(self, &mut codegen_cache_counter, |(_, module)| { - module.get_code_generation_dependencies().is_some() - })?; + self.code_generation_results = code_generation_results; if let Some(counter) = codegen_cache_counter { logger.cache_end(counter); @@ -1121,6 +1141,90 @@ impl Compilation { Ok(()) } + fn code_generation_modules( + &self, + chunk_graph: &ChunkGraph, + codegen_cache_counter: &mut Option, + used_exports_optimization: bool, + code_generation_results: &mut CodeGenerationResults, + modules: impl ParallelIterator, + ) -> Result> { + #[allow(clippy::type_complexity)] + let results: Vec<( + rspack_identifier::Identifier, + Vec<( + CodeGenerationResult, + std::collections::HashSet, BuildHasherDefault>, + )>, + bool, + )> = modules + .filter_map(|module_identifier| { + let runtimes = chunk_graph.get_module_runtimes(module_identifier, &self.chunk_by_ukey); + if runtimes.is_empty() { + return None; + } + + let module = self + .module_graph + .module_by_identifier(&module_identifier) + .expect("module should exist"); + let res = self + .cache + .code_generate_occasion + .use_cache(module, runtimes, self, |module, runtimes| { + let take_length = if used_exports_optimization { + runtimes.len() + } else { + // Only codegen once + 1 + }; + let mut codegen_list = vec![]; + for runtime in runtimes.into_values().take(take_length) { + codegen_list.push((module.code_generation(self, Some(&runtime))?, runtime)); + } + Ok(codegen_list) + }) + .map(|(result, from_cache)| (module_identifier, result, from_cache)); + Some(res) + }) + .collect::, + bool, + )>, + >>()?; + let results = results + .into_iter() + .map(|(module_identifier, item, from_cache)| { + item.into_iter().for_each(|(result, runtime)| { + if let Some(counter) = codegen_cache_counter { + if from_cache { + counter.hit(); + } else { + counter.miss(); + } + } + + let runtimes = chunk_graph.get_module_runtimes(module_identifier, &self.chunk_by_ukey); + let result_id = result.id; + code_generation_results + .module_generation_result_map + .insert(result.id, result); + if used_exports_optimization { + code_generation_results.add(module_identifier, runtime, result_id); + } else { + for runtime in runtimes.into_values() { + code_generation_results.add(module_identifier, runtime, result_id); + } + } + }); + module_identifier + }); + + Ok(results.collect()) + } + #[instrument(skip_all)] async fn create_chunk_assets(&mut self, plugin_driver: SharedPluginDriver) { let results = self @@ -1281,28 +1385,26 @@ impl Compilation { logger.time_end(start); let start = logger.time("code generation"); - self.code_generation().await?; + self.code_generation()?; logger.time_end(start); let start = logger.time("runtime requirements"); - self - .process_runtime_requirements( - self - .module_graph - .modules() - .keys() - .copied() - .collect::>(), - self - .chunk_by_ukey - .keys() - .copied() - .collect::>() - .into_iter(), - self.get_chunk_graph_entries().into_iter(), - plugin_driver.clone(), - ) - .await?; + self.process_runtime_requirements( + self + .module_graph + .modules() + .keys() + .copied() + .collect::>(), + self + .chunk_by_ukey + .keys() + .copied() + .collect::>() + .into_iter(), + self.get_chunk_graph_entries().into_iter(), + plugin_driver.clone(), + )?; logger.time_end(start); let start = logger.time("hashing"); @@ -1338,8 +1440,8 @@ impl Compilation { HashSet::from_iter(entries.chain(async_entries)) } - #[instrument(name = "compilation:process_runtime_requirements", skip_all)] - pub async fn process_runtime_requirements( + #[allow(clippy::unwrap_in_result)] + pub fn process_runtime_requirements( &mut self, modules: impl IntoParallelIterator, chunks: impl Iterator, @@ -1697,6 +1799,158 @@ impl Compilation { pub fn get_logger(&self, name: impl Into) -> CompilationLogger { CompilationLogger::new(name.into(), self.logging.clone()) } + + #[allow(clippy::unwrap_in_result)] + pub fn execute_module( + &mut self, + module: ModuleIdentifier, + result_rx: UnboundedSender>, + ) -> Result<()> { + let mut modules: std::collections::HashSet< + rspack_identifier::Identifier, + BuildHasherDefault, + > = HashSet::default(); + let mut queue = vec![module]; + + while let Some(m) = queue.pop() { + modules.insert(m); + let m = self + .module_graph + .module_by_identifier(&m) + .expect("should have module"); + for m in self.module_graph.get_outgoing_connections(m) { + queue.push(m.module_identifier); + } + } + + let mut chunk_graph = ChunkGraph::default(); + + let mut chunk = Chunk::new(Some("build time chunk".into()), ChunkKind::Normal); + + chunk.id = chunk.name.clone(); + chunk.ids = vec![chunk.id.clone().expect("id is set")]; + let mut runtime = HashSet::default(); + runtime.insert("build time".into()); + chunk.runtime = runtime.clone(); + + let mut entrypoint = Entrypoint::new( + crate::ChunkGroupKind::Entrypoint { + initial: true, + options: Box::new(EntryOptions { + name: Some("build time".into()), + runtime: Some("runtime".into()), + chunk_loading: Some(crate::ChunkLoading::Disable), + async_chunks: None, + public_path: None, + base_uri: None, + filename: None, + library: None, + }), + }, + crate::ChunkGroupInfo { + chunk_loading: false, + async_chunks: false, + runtime: runtime.clone(), + }, + ); + + // add chunk to this compilation + let chunk_by_ukey = ChunkByUkey::default(); + let old_chunk_by_ukey = std::mem::replace(&mut self.chunk_by_ukey, chunk_by_ukey); + let chunk = Compilation::add_chunk(&mut self.chunk_by_ukey); + let chunk_ukey = chunk.ukey; + + chunk_graph.connect_chunk_and_entry_module(chunk.ukey, module, entrypoint.ukey); + entrypoint.connect_chunk(chunk); + entrypoint.set_runtime_chunk(chunk.ukey); + entrypoint.set_entry_point_chunk(chunk.ukey); + + let entry_ukey = entrypoint.ukey; + self.chunk_group_by_ukey.add(entrypoint); + + // Assign ids to modules and modules to the chunk + for m in &modules { + let module = self + .module_graph + .module_by_identifier(m) + .expect("should have module"); + + let id = module.identifier(); + chunk_graph.set_module_id(*m, id.to_string()); + chunk_graph.connect_chunk_and_module(chunk_ukey, *m); + } + + let mut codegen_results = Default::default(); + + self.code_generation_modules( + &chunk_graph, + &mut None, + false, + &mut codegen_results, + modules.par_iter().copied(), + )?; + + // @Jsereng: the webpack uses this trick to make sure process_runtime_requirements access + // the new chunk_graph + // in rspack, if we decouple compilation and chunk_graph, we can't get exclusive ref + // to the chunk_graph in API that receives both compilation and chunk_graph + // + // replace code_generation_results is the same reason + let old_chunk_graph = std::mem::replace(&mut self.chunk_graph, chunk_graph); + let old_codegen_results = std::mem::replace(&mut self.code_generation_results, codegen_results); + + self.process_runtime_requirements( + modules.clone(), + once(chunk_ukey), + once(chunk_ukey), + self.plugin_driver.clone(), + )?; + + let runtime_modules = self + .chunk_graph + .get_chunk_runtime_modules_iterable(&chunk_ukey) + .copied() + .collect::>(); + + // restore code_generation_results and chunk_graph + let mut codegen_results = + std::mem::replace(&mut self.code_generation_results, old_codegen_results); + self.chunk_graph = old_chunk_graph; + + for runtime_id in runtime_modules { + let runtime_module = self + .runtime_modules + .get(&runtime_id) + .expect("runtime module exist"); + + let result = CodeGenerationResult::default().with_javascript(runtime_module.generate(self)); + let result_id = result.id; + + codegen_results + .module_generation_result_map + .insert(result.id, result); + codegen_results.add(runtime_id, runtime.clone(), result_id); + } + + // we add a new fake chunk in the above step, remove it + self.chunk_by_ukey = old_chunk_by_ukey; + self.chunk_group_by_ukey.remove(&entry_ukey); + + let mut result = String::new(); + + for codegen_result in codegen_results.module_generation_result_map.values() { + if let Some(source) = codegen_result.get(&SourceType::JavaScript) { + result += &format!("{}\n\n", source.source()); + } + } + + result_rx.send(Ok(result)).map_err(|err| { + Error::InternalError(InternalError::new( + format!("Failed to invoke importModule: {}", err), + Severity::Error, + )) + }) + } } pub type CompilationAssets = HashMap; diff --git a/crates/rspack_core/src/compiler/queue.rs b/crates/rspack_core/src/compiler/queue.rs index ab48ea72f545..2103bfae77bc 100644 --- a/crates/rspack_core/src/compiler/queue.rs +++ b/crates/rspack_core/src/compiler/queue.rs @@ -1,6 +1,7 @@ use std::sync::Arc; use rspack_error::{Diagnostic, Result}; +use tokio::sync::mpsc::UnboundedSender; use crate::{ cache::Cache, BoxDependency, BuildContext, BuildResult, Compilation, CompilerContext, @@ -17,6 +18,7 @@ pub enum TaskResult { Add(Box), Build(Box), ProcessDependencies(Box), + ImportModule(Box), } #[async_trait::async_trait] @@ -24,6 +26,7 @@ pub trait WorkerTask { async fn run(self) -> Result; } +#[derive(Debug)] pub struct FactorizeTask { pub original_module_identifier: Option, pub original_module_context: Option>, @@ -61,6 +64,7 @@ pub struct FactorizeTaskResult { pub current_profile: Option>, pub exports_info_related: ExportsInfoRelated, pub from_cache: bool, + pub original_dependency: Option, } #[async_trait::async_trait] @@ -69,7 +73,9 @@ impl WorkerTask for FactorizeTask { if let Some(current_profile) = &self.current_profile { current_profile.mark_factory_start(); } + let dependency = self.dependency; + let dependency_id = *dependency.id(); let context = if let Some(context) = dependency.get_context() { context @@ -153,12 +159,14 @@ impl WorkerTask for FactorizeTask { other_exports_info, side_effects_info: side_effects_only_info, }, + original_dependency: Some(dependency_id), }))) } } pub type FactorizeQueue = WorkerQueue; +#[derive(Debug)] pub struct AddTask { pub original_module_identifier: Option, pub module: Box, @@ -166,16 +174,19 @@ pub struct AddTask { pub dependencies: Vec, pub is_entry: bool, pub current_profile: Option>, + pub original_dependency: Option, } #[derive(Debug)] pub enum AddTaskResult { ModuleReused { module: Box, + original_dependency: Option, }, ModuleAdded { module: Box, current_profile: Option>, + original_dependency: Option, }, } @@ -200,6 +211,7 @@ impl AddTask { return Ok(TaskResult::Add(Box::new(AddTaskResult::ModuleReused { module: self.module, + original_dependency: self.original_dependency, }))); } @@ -227,6 +239,7 @@ impl AddTask { Ok(TaskResult::Add(Box::new(AddTaskResult::ModuleAdded { module: self.module, current_profile: self.current_profile, + original_dependency: self.original_dependency, }))) } } @@ -245,6 +258,7 @@ fn set_resolved_module( pub type AddQueue = WorkerQueue; +#[derive(Debug)] pub struct BuildTask { pub module: Box, pub resolver_factory: Arc, @@ -252,6 +266,8 @@ pub struct BuildTask { pub plugin_driver: SharedPluginDriver, pub cache: Arc, pub current_profile: Option>, + pub original_dependency: Option, + pub task_sender: UnboundedSender>, } #[derive(Debug)] @@ -261,6 +277,7 @@ pub struct BuildTaskResult { pub diagnostics: Vec, pub current_profile: Option>, pub from_cache: bool, + pub original_dependency: Option, } #[async_trait::async_trait] @@ -291,6 +308,7 @@ impl WorkerTask for BuildTask { resolver_factory: resolver_factory.clone(), module: Some(module.identifier()), module_context: module.as_normal_module().and_then(|m| m.get_context()), + task_sender: Some(self.task_sender.clone()), }, plugin_driver: plugin_driver.clone(), compiler_options: &compiler_options, @@ -327,6 +345,7 @@ impl WorkerTask for BuildTask { diagnostics, current_profile: self.current_profile, from_cache: is_cache_valid, + original_dependency: self.original_dependency, })) }) } @@ -334,19 +353,23 @@ impl WorkerTask for BuildTask { pub type BuildQueue = WorkerQueue; +#[derive(Debug)] pub struct ProcessDependenciesTask { pub original_module_identifier: ModuleIdentifier, pub dependencies: Vec, pub resolve_options: Option>, + pub original_dependency: Option, } #[derive(Debug)] pub struct ProcessDependenciesResult { pub module_identifier: ModuleIdentifier, + pub original_dependency: Option, } pub type ProcessDependenciesQueue = WorkerQueue; +#[derive(Debug)] pub struct CleanTask { pub module_identifier: ModuleIdentifier, } @@ -398,3 +421,17 @@ impl CleanTask { } pub type CleanQueue = WorkerQueue; + +#[derive(Debug)] +pub struct ImportModuleResult { + pub request: String, + pub sender: UnboundedSender>, + pub original_module: Option, + pub original_module_context: Option>, + pub options: ImportModuleOption, +} + +#[derive(Debug)] +pub struct ImportModuleOption { + pub public_path: String, +} diff --git a/crates/rspack_core/src/dependency/loader_import_dependency.rs b/crates/rspack_core/src/dependency/loader_import_dependency.rs new file mode 100644 index 000000000000..a0e80bb29377 --- /dev/null +++ b/crates/rspack_core/src/dependency/loader_import_dependency.rs @@ -0,0 +1,45 @@ +use crate::{ + AsContextDependency, AsDependencyTemplate, Dependency, DependencyId, ModuleDependency, +}; + +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +pub struct LoaderImportDependency { + request: String, + id: DependencyId, +} + +impl LoaderImportDependency { + pub fn new(request: String) -> Self { + Self { + request, + id: DependencyId::new(), + } + } +} + +impl AsDependencyTemplate for LoaderImportDependency {} +impl AsContextDependency for LoaderImportDependency {} + +impl Dependency for LoaderImportDependency { + fn dependency_debug_name(&self) -> &'static str { + "LoaderImportDependency" + } + + fn id(&self) -> &crate::DependencyId { + &self.id + } +} + +impl ModuleDependency for LoaderImportDependency { + fn request(&self) -> &str { + &self.request + } + + fn user_request(&self) -> &str { + &self.request + } + + fn set_request(&mut self, request: String) { + self.request = request + } +} diff --git a/crates/rspack_core/src/dependency/mod.rs b/crates/rspack_core/src/dependency/mod.rs index 0107dcbe9c21..3f132b9a2f29 100644 --- a/crates/rspack_core/src/dependency/mod.rs +++ b/crates/rspack_core/src/dependency/mod.rs @@ -9,6 +9,7 @@ mod dependency_trait; mod dependency_type; mod entry; mod import_dependency_trait; +mod loader_import_dependency; mod module_dependency; mod runtime_requirements_dependency; mod runtime_template; @@ -24,6 +25,7 @@ pub use dependency_trait::*; pub use dependency_type::DependencyType; pub use entry::*; pub use import_dependency_trait::ImportDependencyTrait; +pub use loader_import_dependency::LoaderImportDependency; pub use module_dependency::*; pub use runtime_requirements_dependency::RuntimeRequirementsDependency; pub use runtime_template::*; diff --git a/crates/rspack_core/src/loader/loader_runner.rs b/crates/rspack_core/src/loader/loader_runner.rs index 9c19306137d6..a0d2fb407db1 100644 --- a/crates/rspack_core/src/loader/loader_runner.rs +++ b/crates/rspack_core/src/loader/loader_runner.rs @@ -1,8 +1,10 @@ use std::sync::Arc; +use rspack_error::{Error, Result}; pub use rspack_loader_runner::{run_loaders, Content, Loader, LoaderContext}; +use tokio::sync::mpsc::unbounded_channel; -use crate::{CompilerOptions, Context, ModuleIdentifier, ResolverFactory}; +use crate::{CompilerOptions, Context, ModuleIdentifier, ResolverFactory, TaskResult, TaskSender}; #[derive(Debug, Clone)] pub struct CompilerContext { @@ -10,6 +12,44 @@ pub struct CompilerContext { pub resolver_factory: Arc, pub module: Option, // current module pub module_context: Option>, // current module context + pub task_sender: Option, +} + +impl CompilerContext { + pub async fn import_module( + &self, + request: String, + public_path: String, + original_module: Option, + original_module_context: Option>, + ) -> Result { + let (tx, mut rx) = unbounded_channel(); + self + .task_sender + .as_ref() + .expect("Could not get compilation.task_sender") + .send(Ok(TaskResult::ImportModule(Box::new( + crate::ImportModuleResult { + request, + sender: tx, + original_module, + original_module_context, + options: crate::ImportModuleOption { public_path }, + }, + )))) + .expect("Should start import_module"); + + let res = rx.recv().await; + + match res { + Some(Ok(res)) => Ok(res), + Some(Err(e)) => Err(e), + None => Err(Error::InternalError(rspack_error::InternalError::new( + "Failed to call importModule".into(), + rspack_error::Severity::Error, + ))), + } + } } pub type LoaderRunnerContext = CompilerContext; diff --git a/crates/rspack_core/src/logger.rs b/crates/rspack_core/src/logger.rs index b89354f2ce77..0ceeeff5c7fc 100644 --- a/crates/rspack_core/src/logger.rs +++ b/crates/rspack_core/src/logger.rs @@ -270,6 +270,7 @@ impl StartTimeAggregate { } } +#[derive(Debug)] pub struct CacheCount { label: &'static str, hit: u32, diff --git a/crates/rspack_core/src/utils/queue.rs b/crates/rspack_core/src/utils/queue.rs index ce18ff81ca3a..6cdcd283453e 100644 --- a/crates/rspack_core/src/utils/queue.rs +++ b/crates/rspack_core/src/utils/queue.rs @@ -1,6 +1,6 @@ use std::collections::VecDeque; -#[derive(Default)] +#[derive(Default, Debug)] pub struct WorkerQueue { inner: VecDeque, } diff --git a/crates/rspack_loader_sass/tests/fixtures.rs b/crates/rspack_loader_sass/tests/fixtures.rs index f456279f67bd..ff559691fbbb 100644 --- a/crates/rspack_loader_sass/tests/fixtures.rs +++ b/crates/rspack_loader_sass/tests/fixtures.rs @@ -97,6 +97,7 @@ async fn loader_test(actual: impl AsRef, expected: impl AsRef) { resolver_factory: Default::default(), module: None, module_context: None, + task_sender: None, }, ) .await diff --git a/crates/rspack_loader_swc/tests/fixtures.rs b/crates/rspack_loader_swc/tests/fixtures.rs index b8b305a2c3df..d2c0e9e45a76 100644 --- a/crates/rspack_loader_swc/tests/fixtures.rs +++ b/crates/rspack_loader_swc/tests/fixtures.rs @@ -97,6 +97,7 @@ async fn loader_test(actual: impl AsRef, expected: impl AsRef) { resolver_factory: Default::default(), module: None, module_context: None, + task_sender: None, }, ) .await