diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 3b004780..85f0f72f 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -34,6 +34,9 @@ jobs: - name: Setup benchmark data run: cd benches && pnpm install --ignore-workspace + - name: Cargo Bench + run: cargo bench + - uses: Boshen/setup-rust@main with: cache-key: benchmark @@ -43,8 +46,8 @@ jobs: - uses: ./.github/actions/pnpm - name: Build Benchmark env: - RUSTFLAGS: "-C debuginfo=1 -C strip=none -g" - run: cargo codspeed build + RUSTFLAGS: "-C debuginfo=1 -C strip=none -g --cfg codspeed" + run: cargo codspeed build --features codspeed - name: Run benchmark uses: CodSpeedHQ/action@v3 diff --git a/Cargo.lock b/Cargo.lock index 6d7fdc07..419dab82 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,15 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "addr2line" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +dependencies = [ + "gimli", +] + [[package]] name = "adler" version = "1.0.2" @@ -34,15 +43,9 @@ dependencies = [ [[package]] name = "anes" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" - -[[package]] -name = "anstyle" -version = "1.0.10" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "735d4f398ca57cfa2880225c2bf81c3b9af3be5bb22e44ae70118dad38713e84" [[package]] name = "arca" @@ -55,12 +58,38 @@ dependencies = [ "radix_trie", ] +[[package]] +name = "async-trait" +version = "0.1.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "autocfg" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +[[package]] +name = "backtrace" +version = "0.3.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + [[package]] name = "base64" version = "0.22.1" @@ -94,6 +123,12 @@ version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +[[package]] +name = "bpaf" +version = "0.9.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4848ed5727d39a7573551c205bcb1ccd88c8cad4ed2c80f62e2316f208196b8d" + [[package]] name = "bumpalo" version = "3.16.0" @@ -164,31 +199,6 @@ dependencies = [ "half", ] -[[package]] -name = "clap" -version = "4.5.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83" -dependencies = [ - "clap_builder", -] - -[[package]] -name = "clap_builder" -version = "4.5.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8" -dependencies = [ - "anstyle", - "clap_lex", -] - -[[package]] -name = "clap_lex" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" - [[package]] name = "clean-path" version = "0.2.1" @@ -208,42 +218,6 @@ dependencies = [ "uuid", ] -[[package]] -name = "codspeed-criterion-compat" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5926ca63222a35b9a2299adcaafecf596efe20a9a2048e4a81cb2fc3463b4a8" -dependencies = [ - "codspeed", - "codspeed-criterion-compat-walltime", - "colored", -] - -[[package]] -name = "codspeed-criterion-compat-walltime" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbae4da05076cbc673e242400ac8f4353bdb686e48020edc6e36a5c36ae0878e" -dependencies = [ - "anes", - "cast", - "ciborium", - "clap", - "codspeed", - "criterion-plot", - "is-terminal", - "itertools", - "num-traits", - "once_cell", - "oorandom", - "regex", - "serde", - "serde_derive", - "serde_json", - "tinytemplate", - "walkdir", -] - [[package]] name = "colored" version = "2.1.0" @@ -279,13 +253,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] -name = "criterion-plot" -version = "0.5.0" +name = "criterion2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +checksum = "09db22066fd79bd628faf416dac96e44054deb00531601bcc20c6d12506b3701" dependencies = [ + "anes", + "bpaf", "cast", - "itertools", + "ciborium", + "codspeed", + "colored", + "num-traits", + "oorandom", + "serde", + "serde_json", + "tokio", + "walkdir", ] [[package]] @@ -450,6 +434,95 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + [[package]] name = "getrandom" version = "0.3.2" @@ -462,6 +535,12 @@ dependencies = [ "wasi", ] +[[package]] +name = "gimli" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" + [[package]] name = "half" version = "2.4.1" @@ -490,12 +569,6 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" -[[package]] -name = "hermit-abi" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" - [[package]] name = "hex" version = "0.4.3" @@ -553,26 +626,6 @@ dependencies = [ "serde", ] -[[package]] -name = "is-terminal" -version = "0.4.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.11" @@ -669,6 +722,7 @@ dependencies = [ "once_cell", "serde", "serde_json", + "tokio", ] [[package]] @@ -745,6 +799,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.19.0" @@ -788,6 +851,12 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + [[package]] name = "pnp" version = "0.9.1" @@ -931,27 +1000,34 @@ dependencies = [ name = "rspack_resolver" version = "0.5.3" dependencies = [ + "async-trait", "cfg-if", - "codspeed-criterion-compat", + "criterion2", "dashmap", "document-features", "dunce", + "futures", "indexmap 2.7.1", "json-strip-comments", "normalize-path", - "once_cell", "pnp", "rayon", "regex", "rustc-hash", "serde", "serde_json", - "simdutf8", "thiserror", + "tokio", "tracing", "vfs", ] +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + [[package]] name = "rustc-hash" version = "2.0.0" @@ -1058,10 +1134,13 @@ dependencies = [ ] [[package]] -name = "simdutf8" -version = "0.1.5" +name = "slab" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] [[package]] name = "smallvec" @@ -1148,13 +1227,25 @@ dependencies = [ ] [[package]] -name = "tinytemplate" -version = "1.2.1" +name = "tokio" +version = "1.44.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +checksum = "f382da615b842244d4b8738c82ed1275e6c5dd90c459a30941cd07080b06c91a" dependencies = [ - "serde", - "serde_json", + "backtrace", + "pin-project-lite", + "tokio-macros", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 38486dbe..cb843e94 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -83,22 +83,27 @@ serde_json = { version = "1.0.117", features = [ ] } # preserve_order: package_json.exports requires order such as `["require", "import", "default"]` rustc-hash = { version = "2.0.0", default-features = false, features = ["std"] } dunce = "1.0.4" # Normalize Windows paths to the most compatible format, avoiding UNC where possible -once_cell = "1.19.0" # Use `std::sync::OnceLock::get_or_try_init` when it is stable. thiserror = "1.0.61" json-strip-comments = "1.0.2" indexmap = { version = "2.2.6", features = ["serde"] } cfg-if = "1.0" -simdutf8 = { version = "0.1.4", features = ["aarch64_neon"] } pnp = { version = "0.9.1", optional = true } document-features = { version = "0.2.8", optional = true } +futures = "0.3.31" +async-trait = "0.1.84" + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +tokio = { version = "1.42.0", default-features = false, features = ["sync", "rt-multi-thread", "macros", "fs"] } +[target.'cfg(target_arch = "wasm32")'.dependencies] +tokio = { version = "1.42.0", default-features = false, features = ["sync", "rt", "macros" ]} [dev-dependencies] vfs = "0.12.0" # for testing with in memory file system regex = "1.11.1" rayon = { version = "1.10.0" } -criterion2 = { version = "2.0.0", default-features = false, package = "codspeed-criterion-compat" } +criterion2 = { version = "2.0.0", default-features = false, features = ["async_tokio"]} normalize-path = { version = "0.2.1" } [features] @@ -108,6 +113,8 @@ default = ["yarn_pnp"] package_json_raw_json_api = [] ## [Yarn Plug'n'Play](https://yarnpkg.com/features/pnp) yarn_pnp = ["pnp"] +# For codspeed benchmark +codspeed = ["criterion2/codspeed"] [package.metadata.docs.rs] all-features = true diff --git a/benches/resolver.rs b/benches/resolver.rs index 7b1bcbea..a6925e9a 100644 --- a/benches/resolver.rs +++ b/benches/resolver.rs @@ -1,12 +1,15 @@ -use criterion2::{criterion_group, criterion_main, BenchmarkId, Criterion}; -use rayon::prelude::*; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use serde_json::Value; use std::fs::read_to_string; +use std::future::Future; use std::{ env, fs, io::{self, Write}, path::{Path, PathBuf}, + sync::Arc, }; +use tokio::runtime; +use tokio::task::JoinSet; fn symlink, Q: AsRef>(original: P, link: Q) -> io::Result<()> { #[cfg(target_family = "unix")] @@ -82,6 +85,16 @@ fn oxc_resolver() -> rspack_resolver::Resolver { }) } +fn create_async_resolve_task( + oxc_resolver: Arc, + path: PathBuf, + request: String, +) -> impl Future { + async move { + let _ = oxc_resolver.resolve(path, &request).await; + } +} + fn bench_resolver(c: &mut Criterion) { let cwd = env::current_dir().unwrap().join("benches"); @@ -96,23 +109,28 @@ fn bench_resolver(c: &mut Criterion) { .collect::>(); // check validity - for (path, request) in &data { - let r = oxc_resolver().resolve(path, request); - if !r.is_ok() { - panic!("resolve failed {path:?} {request},\n\nplease run npm install in `/benches` before running the benchmarks"); + runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + for (path, request) in &data { + let r = oxc_resolver().resolve(path, request).await; + if !r.is_ok() { + panic!("resolve failed {path:?} {request},\n\nplease run `pnpm install --ignore-workspace` in `/benches` before running the benchmarks"); + } } - } + }); let symlink_test_dir = create_symlinks().expect("Create symlink fixtures failed"); let symlinks_range = 0u32..10000; - for i in symlinks_range.clone() { - assert!( - oxc_resolver().resolve(&symlink_test_dir, &format!("./file{i}")).is_ok(), - "file{i}.js" - ); - } + // check validity + runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + for i in symlinks_range.clone() { + assert!( + oxc_resolver().resolve(&symlink_test_dir, &format!("./file{i}")).await.is_ok(), + "file{i}.js" + ); + } + }); let mut group = c.benchmark_group("resolver"); @@ -123,28 +141,42 @@ fn bench_resolver(c: &mut Criterion) { .expect("Failed to build global thread pool"); group.bench_with_input(BenchmarkId::from_parameter("single-thread"), &data, |b, data| { + let runner = + runtime::Builder::new_current_thread().build().expect("failed to create tokio runtime"); let oxc_resolver = oxc_resolver(); - b.iter_with_setup( + + b.to_async(runner).iter_with_setup( || { oxc_resolver.clear_cache(); }, - |_| { + |_| async { for (path, request) in data { - _ = oxc_resolver.resolve(path, request); + _ = oxc_resolver.resolve(path, request).await; } }, ); }); + #[cfg(not(feature = "codspeed"))] group.bench_with_input(BenchmarkId::from_parameter("multi-thread"), &data, |b, data| { - let oxc_resolver = oxc_resolver(); + let runner = runtime::Runtime::new().expect("failed to create tokio runtime"); + let oxc_resolver = Arc::new(oxc_resolver()); + b.iter_with_setup( || { oxc_resolver.clear_cache(); }, |_| { - data.par_iter().for_each(|(path, request)| { - _ = oxc_resolver.resolve(path, request); + runner.block_on(async { + let mut join_set = JoinSet::new(); + data.iter().for_each(|(path, request)| { + join_set.spawn(create_async_resolve_task( + oxc_resolver.clone(), + path.to_path_buf(), + request.to_string(), + )); + }); + let _ = join_set.join_all().await; }); }, ); @@ -154,14 +186,49 @@ fn bench_resolver(c: &mut Criterion) { BenchmarkId::from_parameter("resolve from symlinks"), &symlinks_range, |b, data| { + let runner = runtime::Runtime::new().expect("failed to create tokio runtime"); let oxc_resolver = oxc_resolver(); - b.iter(|| { - for i in data.clone() { - assert!( - oxc_resolver.resolve(&symlink_test_dir, &format!("./file{i}")).is_ok(), - "file{i}.js" - ); - } + + b.to_async(runner).iter_with_setup( + || { + oxc_resolver.clear_cache(); + }, + |_| async { + for i in data.clone() { + assert!( + oxc_resolver + .resolve(&symlink_test_dir, &format!("./file{i}")) + .await + .is_ok(), + "file{i}.js" + ); + } + }, + ); + }, + ); + + #[cfg(not(feature = "codspeed"))] + group.bench_with_input( + BenchmarkId::from_parameter("resolve from symlinks multi thread"), + &symlinks_range, + |b, data| { + let runner = runtime::Runtime::new().expect("failed to create tokio runtime"); + let oxc_resolver = Arc::new(oxc_resolver()); + + let symlink_test_dir = symlink_test_dir.clone(); + + b.to_async(runner).iter(|| async { + let mut join_set = JoinSet::new(); + + data.clone().for_each(|i| { + join_set.spawn(create_async_resolve_task( + oxc_resolver.clone(), + symlink_test_dir.clone(), + format!("./file{i}").to_string(), + )); + }); + join_set.join_all().await; }); }, ); diff --git a/examples/resolver.rs b/examples/resolver.rs index b6ae015d..43a22f1f 100644 --- a/examples/resolver.rs +++ b/examples/resolver.rs @@ -3,7 +3,8 @@ use std::{env, path::PathBuf}; use rspack_resolver::{AliasValue, ResolveOptions, Resolver}; -fn main() { +#[tokio::main] +async fn main() { let path = PathBuf::from(env::args().nth(1).expect("path")); assert!(path.is_dir(), "{path:?} must be a directory that will be resolved against."); @@ -26,7 +27,7 @@ fn main() { ..ResolveOptions::default() }; - match Resolver::new(options).resolve(path, &specifier) { + match Resolver::new(options).resolve(path, &specifier).await { Err(error) => println!("Error: {error}"), Ok(resolution) => println!("Resolved: {:?}", resolution.full_path()), } diff --git a/napi/Cargo.toml b/napi/Cargo.toml index 4392504a..8c1f339f 100644 --- a/napi/Cargo.toml +++ b/napi/Cargo.toml @@ -12,7 +12,7 @@ doctest = false [dependencies] oxc_resolver = { path = "..", package = "rspack_resolver" } -napi = { version = "3.0.0-alpha", default-features = false, features = ["napi3", "serde-json"] } +napi = { version = "3.0.0-alpha", default-features = false, features = ["napi3", "serde-json", "async"] } napi-derive = { version = "3.0.0-alpha" } tracing-subscriber = { version = "0.3.18", default-features = false, features = [ "std", diff --git a/napi/src/lib.rs b/napi/src/lib.rs index f59e705c..8098d97f 100644 --- a/napi/src/lib.rs +++ b/napi/src/lib.rs @@ -2,15 +2,14 @@ extern crate napi; extern crate napi_derive; extern crate oxc_resolver; +use napi::tokio::runtime; +use napi_derive::napi; +use oxc_resolver::{ResolveOptions, Resolver}; use std::{ path::{Path, PathBuf}, sync::Arc, }; -use napi::{bindgen_prelude::AsyncTask, Task}; -use napi_derive::napi; -use oxc_resolver::{ResolveOptions, Resolver}; - use self::{ options::{NapiResolveOptions, StrOrStrList}, tracing::init_tracing, @@ -27,8 +26,8 @@ pub struct ResolveResult { pub module_type: Option, } -fn resolve(resolver: &Resolver, path: &Path, request: &str) -> ResolveResult { - match resolver.resolve(path, request) { +async fn resolve(resolver: &Resolver, path: &Path, request: &str) -> ResolveResult { + match resolver.resolve(path, request).await { Ok(resolution) => ResolveResult { path: Some(resolution.full_path().to_string_lossy().to_string()), error: None, @@ -47,27 +46,7 @@ fn resolve(resolver: &Resolver, path: &Path, request: &str) -> ResolveResult { pub fn sync(path: String, request: String) -> ResolveResult { let path = PathBuf::from(path); let resolver = Resolver::new(ResolveOptions::default()); - resolve(&resolver, &path, &request) -} - -pub struct ResolveTask { - resolver: Arc, - directory: PathBuf, - request: String, -} - -#[napi] -impl Task for ResolveTask { - type Output = ResolveResult; - type JsValue = ResolveResult; - - fn compute(&mut self) -> napi::Result { - Ok(resolve(&self.resolver, &self.directory, &self.request)) - } - - fn resolve(&mut self, _: napi::Env, result: Self::Output) -> napi::Result { - Ok(result) - } + runtime::Handle::current().block_on(resolve(&resolver, &path, &request)) } #[napi] @@ -109,16 +88,16 @@ impl ResolverFactory { #[napi] pub fn sync(&self, directory: String, request: String) -> ResolveResult { let path = PathBuf::from(directory); - resolve(&self.resolver, &path, &request) + runtime::Handle::current().block_on(resolve(&self.resolver, &path, &request)) } /// Asynchronously resolve `specifier` at an absolute path to a `directory`. #[allow(clippy::needless_pass_by_value)] #[napi(js_name = "async")] - pub fn resolve_async(&self, directory: String, request: String) -> AsyncTask { + pub async fn resolve_async(&self, directory: String, request: String) -> ResolveResult { let path = PathBuf::from(directory); let resolver = self.resolver.clone(); - AsyncTask::new(ResolveTask { resolver, directory: path, request }) + resolve(&resolver, &path, &request).await } fn normalize_options(op: NapiResolveOptions) -> ResolveOptions { diff --git a/src/cache.rs b/src/cache.rs index 2aef978a..c1c6615a 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -1,7 +1,10 @@ -use once_cell::sync::OnceCell as OnceLock; +use futures::future::BoxFuture; +use tokio::sync::OnceCell as OnceLock; + use std::{ borrow::{Borrow, Cow}, convert::AsRef, + future::Future, hash::{BuildHasherDefault, Hash, Hasher}, io, ops::Deref, @@ -24,7 +27,7 @@ pub struct Cache { tsconfigs: DashMap, BuildHasherDefault>, } -impl Cache { +impl Cache { pub fn new(fs: Fs) -> Self { Self { fs, paths: DashSet::default(), tsconfigs: DashMap::default() } } @@ -53,16 +56,20 @@ impl Cache { data } - pub fn tsconfig Result<(), ResolveError>>( + pub async fn tsconfig( &self, root: bool, path: &Path, callback: F, // callback for modifying tsconfig with `extends` - ) -> Result, ResolveError> { + ) -> Result, ResolveError> + where + F: FnOnce(TsConfig) -> Fut + Send, + Fut: Send + Future>, + { if let Some(tsconfig_ref) = self.tsconfigs.get(path) { return Ok(Arc::clone(tsconfig_ref.value())); } - let meta = self.fs.metadata(path).ok(); + let meta = self.fs.metadata(path).await.ok(); let tsconfig_path = if meta.is_some_and(|m| m.is_file) { Cow::Borrowed(path) } else if meta.is_some_and(|m| m.is_dir) { @@ -75,6 +82,7 @@ impl Cache { let mut tsconfig_string = self .fs .read_to_string(&tsconfig_path) + .await .map_err(|_| ResolveError::TsconfigNotFound(path.to_path_buf()))?; let mut tsconfig = TsConfig::parse(root, &tsconfig_path, &mut tsconfig_string).map_err(|error| { @@ -84,7 +92,7 @@ impl Cache { Some(tsconfig_string), ) })?; - callback(&mut tsconfig)?; + tsconfig = callback(tsconfig).await?; let tsconfig = Arc::new(tsconfig.build()); self.tsconfigs.insert(path.to_path_buf(), Arc::clone(&tsconfig)); Ok(tsconfig) @@ -168,12 +176,12 @@ impl CachedPathImpl { self.parent.as_ref() } - fn meta(&self, fs: &Fs) -> Option { - *self.meta.get_or_init(|| fs.metadata(&self.path).ok()) + async fn meta(&self, fs: &Fs) -> Option { + *self.meta.get_or_init(|| async { fs.metadata(&self.path).await.ok() }).await } - pub fn is_file(&self, fs: &Fs, ctx: &mut Ctx) -> bool { - if let Some(meta) = self.meta(fs) { + pub async fn is_file(&self, fs: &Fs, ctx: &mut Ctx) -> bool { + if let Some(meta) = self.meta(fs).await { ctx.add_file_dependency(self.path()); meta.is_file } else { @@ -182,8 +190,8 @@ impl CachedPathImpl { } } - pub fn is_dir(&self, fs: &Fs, ctx: &mut Ctx) -> bool { - self.meta(fs).map_or_else( + pub async fn is_dir(&self, fs: &Fs, ctx: &mut Ctx) -> bool { + self.meta(fs).await.map_or_else( || { ctx.add_missing_dependency(self.path()); false @@ -192,40 +200,51 @@ impl CachedPathImpl { ) } - pub fn realpath(&self, fs: &Fs) -> io::Result { - self.canonicalized - .get_or_try_init(|| { - if fs.symlink_metadata(&self.path).is_ok_and(|m| m.is_symlink) { - return fs.canonicalize(&self.path).map(Some); - } - if let Some(parent) = self.parent() { - let parent_path = parent.realpath(fs)?; - return Ok(Some( - parent_path.normalize_with(self.path.strip_prefix(&parent.path).unwrap()), - )); - }; - Ok(None) - }) - .cloned() - .map(|r| r.unwrap_or_else(|| self.path.clone().to_path_buf())) + pub fn realpath<'a, Fs: FileSystem + Send + Sync>( + &'a self, + fs: &'a Fs, + ) -> BoxFuture<'a, io::Result> { + let fut = async move { + self.canonicalized + .get_or_try_init(|| async move { + if fs.symlink_metadata(&self.path).await.is_ok_and(|m| m.is_symlink) { + return fs.canonicalize(&self.path).await.map(Some); + } + if let Some(parent) = self.parent() { + let parent_path = parent.realpath(fs).await?; + return Ok(Some( + parent_path + .normalize_with(self.path.strip_prefix(&parent.path).unwrap()), + )); + }; + Ok(None) + }) + .await + .cloned() + .map(|r| r.unwrap_or_else(|| self.path.clone().to_path_buf())) + }; + Box::pin(fut) } - pub fn module_directory( + pub async fn module_directory( &self, module_name: &str, cache: &Cache, ctx: &mut Ctx, ) -> Option { let cached_path = cache.value(&self.path.join(module_name)); - cached_path.is_dir(&cache.fs, ctx).then_some(cached_path) + cached_path.is_dir(&cache.fs, ctx).await.then_some(cached_path) } - pub fn cached_node_modules( + pub async fn cached_node_modules( &self, cache: &Cache, ctx: &mut Ctx, ) -> Option { - self.node_modules.get_or_init(|| self.module_directory("node_modules", cache, ctx)).clone() + self.node_modules + .get_or_init(|| self.module_directory("node_modules", cache, ctx)) + .await + .clone() } /// Find package.json of a path by traversing parent directories. @@ -233,7 +252,7 @@ impl CachedPathImpl { /// # Errors /// /// * [ResolveError::JSON] - pub fn find_package_json( + pub async fn find_package_json( &self, fs: &Fs, options: &ResolveOptions, @@ -241,7 +260,7 @@ impl CachedPathImpl { ) -> Result>, ResolveError> { let mut cache_value = self; // Go up directories when the querying path is not a directory - while !cache_value.is_dir(fs, ctx) { + while !cache_value.is_dir(fs, ctx).await { if let Some(cv) = &cache_value.parent { cache_value = cv.as_ref(); } else { @@ -250,7 +269,7 @@ impl CachedPathImpl { } let mut cache_value = Some(cache_value); while let Some(cv) = cache_value { - if let Some(package_json) = cv.package_json(fs, options, ctx)? { + if let Some(package_json) = cv.package_json(fs, options, ctx).await? { return Ok(Some(Arc::clone(&package_json))); } cache_value = cv.parent.as_deref(); @@ -263,7 +282,7 @@ impl CachedPathImpl { /// # Errors /// /// * [ResolveError::JSON] - pub fn package_json( + pub async fn package_json( &self, fs: &Fs, options: &ResolveOptions, @@ -272,13 +291,13 @@ impl CachedPathImpl { // Change to `std::sync::OnceLock::get_or_try_init` when it is stable. let result = self .package_json - .get_or_try_init(|| { + .get_or_try_init(|| async { let package_json_path = self.path.join("package.json"); - let Ok(package_json_string) = fs.read_to_string(&package_json_path) else { + let Ok(package_json_string) = fs.read_to_string(&package_json_path).await else { return Ok(None); }; let real_path = if options.symlinks { - self.realpath(fs)?.join("package.json") + self.realpath(fs).await?.join("package.json") } else { package_json_path.clone() }; @@ -293,6 +312,7 @@ impl CachedPathImpl { ) }) }) + .await .cloned(); // https://github.com/webpack/enhanced-resolve/blob/58464fc7cb56673c9aa849e68e6300239601e615/lib/DescriptionFileUtils.js#L68-L82 match &result { diff --git a/src/context.rs b/src/context.rs index 9349de30..2685ae5f 100644 --- a/src/context.rs +++ b/src/context.rs @@ -81,7 +81,7 @@ impl ResolveContext { pub fn test_for_infinite_recursion(&mut self) -> Result<(), ResolveError> { self.depth += 1; // 64 should be more than enough for detecting infinite recursion. - if self.depth > 64 { + if self.depth > 32 { return Err(ResolveError::Recursion); } Ok(()) diff --git a/src/error.rs b/src/error.rs index 948ea8d2..5cc4916b 100644 --- a/src/error.rs +++ b/src/error.rs @@ -154,8 +154,8 @@ impl From for ResolveError { } } -#[test] -fn test_into_io_error() { +#[tokio::test] +async fn test_into_io_error() { use std::io::{self, ErrorKind}; let error_string = "IOError occurred"; let string_error = io::Error::new(ErrorKind::Interrupted, error_string.to_string()); @@ -179,8 +179,8 @@ fn test_into_io_error() { ); } -#[test] -fn test_coverage() { +#[tokio::test] +async fn test_coverage() { let error = ResolveError::NotFound("x".into()); assert_eq!(format!("{error:?}"), r#"NotFound("x")"#); assert_eq!(error.clone(), error); diff --git a/src/file_system.rs b/src/file_system.rs index 5112ad3d..12903ca0 100644 --- a/src/file_system.rs +++ b/src/file_system.rs @@ -8,13 +8,14 @@ use std::{ use pnp::fs::{LruZipCache, VPath, VPathInfo, ZipCache}; /// File System abstraction used for `ResolverGeneric` +#[async_trait::async_trait] pub trait FileSystem { /// See [std::fs::read] /// /// # Errors /// /// See [std::fs::read] - fn read(&self, path: &Path) -> io::Result>; + async fn read(&self, path: &Path) -> io::Result>; /// See [std::fs::read_to_string] /// /// # Errors @@ -25,7 +26,7 @@ pub trait FileSystem { /// because object safety requirements, it is especially useful, when /// you want to store multiple `dyn FileSystem` in a `Vec` or use a `ResolverGeneric` in /// napi env. - fn read_to_string(&self, path: &Path) -> io::Result; + async fn read_to_string(&self, path: &Path) -> io::Result; /// See [std::fs::metadata] /// @@ -36,7 +37,7 @@ pub trait FileSystem { /// because object safety requirements, it is especially useful, when /// you want to store multiple `dyn FileSystem` in a `Vec` or use a `ResolverGeneric` in /// napi env. - fn metadata(&self, path: &Path) -> io::Result; + async fn metadata(&self, path: &Path) -> io::Result; /// See [std::fs::symlink_metadata] /// @@ -48,7 +49,7 @@ pub trait FileSystem { /// because object safety requirements, it is especially useful, when /// you want to store multiple `dyn FileSystem` in a `Vec` or use a `ResolverGeneric` in /// napi env. - fn symlink_metadata(&self, path: &Path) -> io::Result; + async fn symlink_metadata(&self, path: &Path) -> io::Result; /// See [std::fs::canonicalize] /// @@ -60,7 +61,7 @@ pub trait FileSystem { /// because object safety requirements, it is especially useful, when /// you want to store multiple `dyn FileSystem` in a `Vec` or use a `ResolverGeneric` in /// napi env. - fn canonicalize(&self, path: &Path) -> io::Result; + async fn canonicalize(&self, path: &Path) -> io::Result; } /// Metadata information about a file @@ -121,40 +122,40 @@ impl Default for FileSystemOs { } } -fn buffer_to_string(bytes: Vec) -> io::Result { - // `simdutf8` is faster than `std::str::from_utf8` which `fs::read_to_string` uses internally - if simdutf8::basic::from_utf8(&bytes).is_err() { - // Same error as `fs::read_to_string` produces (`io::Error::INVALID_UTF8`) - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "stream did not contain valid UTF-8", - )); - } - // SAFETY: `simdutf8` has ensured it's a valid UTF-8 string - Ok(unsafe { String::from_utf8_unchecked(bytes) }) -} - +#[cfg(not(target_arch = "wasm32"))] +#[async_trait::async_trait] impl FileSystem for FileSystemOs { - fn read(&self, path: &Path) -> io::Result> { + async fn read(&self, path: &Path) -> io::Result> { cfg_if! { if #[cfg(feature = "yarn_pnp")] { if self.options.enable_pnp { return match VPath::from(path)? { VPath::Zip(info) => self.pnp_lru.read(info.physical_base_path(), info.zip_path), - VPath::Virtual(info) => std::fs::read(info.physical_base_path()), - VPath::Native(path) => std::fs::read(&path), + VPath::Virtual(info) => tokio::fs::read(info.physical_base_path()).await, + VPath::Native(path) => tokio::fs::read(&path).await, } } }} - std::fs::read(path) + tokio::fs::read(path).await } - fn read_to_string(&self, path: &Path) -> io::Result { - let buffer = self.read(path)?; - buffer_to_string(buffer) + + async fn read_to_string(&self, path: &Path) -> io::Result { + cfg_if! { + if #[cfg(feature = "yarn_pnp")] { + if self.options.enable_pnp { + return match VPath::from(path)? { + VPath::Zip(info) => self.pnp_lru.read_to_string(info.physical_base_path(), info.zip_path), + VPath::Virtual(info) => tokio::fs::read_to_string(info.physical_base_path()).await, + VPath::Native(path) => tokio::fs::read_to_string(&path).await, + } + } + } + } + tokio::fs::read_to_string(path).await } - fn metadata(&self, path: &Path) -> io::Result { + async fn metadata(&self, path: &Path) -> io::Result { cfg_if! { if #[cfg(feature = "yarn_pnp")] { if self.options.enable_pnp { @@ -164,22 +165,24 @@ impl FileSystem for FileSystemOs { .file_type(info.physical_base_path(), info.zip_path) .map(FileMetadata::from), VPath::Virtual(info) => { - fs::metadata(info.physical_base_path()).map(FileMetadata::from) + tokio::fs::metadata(info.physical_base_path()) + .await + .map(FileMetadata::from) } - VPath::Native(path) => fs::metadata(path).map(FileMetadata::from), + VPath::Native(path) => tokio::fs::metadata(path).await.map(FileMetadata::from), } } } } - fs::metadata(path).map(FileMetadata::from) + tokio::fs::metadata(path).await.map(FileMetadata::from) } - fn symlink_metadata(&self, path: &Path) -> io::Result { - fs::symlink_metadata(path).map(FileMetadata::from) + async fn symlink_metadata(&self, path: &Path) -> io::Result { + tokio::fs::symlink_metadata(path).await.map(FileMetadata::from) } - fn canonicalize(&self, path: &Path) -> io::Result { + async fn canonicalize(&self, path: &Path) -> io::Result { cfg_if! { if #[cfg(feature = "yarn_pnp")] { if self.options.enable_pnp { @@ -195,48 +198,47 @@ impl FileSystem for FileSystemOs { } cfg_if! { - if #[cfg(not(target_os = "wasi"))]{ - dunce::canonicalize(path) - } else { - use std::path::Component; - let mut path_buf = path.to_path_buf(); - loop { - let link = fs::read_link(&path_buf)?; - path_buf.pop(); - for component in link.components() { - match component { - Component::ParentDir => { - path_buf.pop(); - } - Component::Normal(seg) => { - #[cfg(target_family = "wasm")] - // Need to trim the extra \0 introduces by https://github.com/nodejs/uvwasi/issues/262 - { - path_buf.push(seg.to_string_lossy().trim_end_matches('\0')); - } - #[cfg(not(target_family = "wasm"))] - { - path_buf.push(seg); - } + if #[cfg(not(target_os = "wasi"))]{ + dunce::canonicalize(path) + } else { + use std::path::Component; + let mut path_buf = path.to_path_buf(); + loop { + let link = fs::read_link(&path_buf)?; + path_buf.pop(); + for component in link.components() { + match component { + Component::ParentDir => { + path_buf.pop(); + } + Component::Normal(seg) => { + #[cfg(target_family = "wasm")] + // Need to trim the extra \0 introduces by https://github.com/nodejs/uvwasi/issues/262 + { + path_buf.push(seg.to_string_lossy().trim_end_matches('\0')); } - Component::RootDir => { - path_buf = PathBuf::from("/"); + #[cfg(not(target_family = "wasm"))] + { + path_buf.push(seg); } - Component::CurDir | Component::Prefix(_) => {} } + Component::RootDir => { + path_buf = PathBuf::from("/"); + } + Component::CurDir | Component::Prefix(_) => {} } - if !fs::symlink_metadata(&path_buf)?.is_symlink() { - break; - } } - Ok(path_buf) + if !fs::symlink_metadata(&path_buf)?.is_symlink() { + break; + } + } + Ok(path_buf) } } } } - -#[test] -fn metadata() { +#[tokio::test] +async fn metadata() { let meta = FileMetadata { is_file: true, is_dir: true, is_symlink: true }; assert_eq!( format!("{meta:?}"), diff --git a/src/lib.rs b/src/lib.rs index 15d5a3cd..893317cd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -94,6 +94,7 @@ use crate::{ tsconfig::ExtendsField, tsconfig::{ProjectReference, TsConfig}, }; +use futures::future::{try_join_all, BoxFuture}; type ResolveResult = Result, ResolveError>; @@ -124,13 +125,13 @@ impl fmt::Debug for ResolverGeneric { } } -impl Default for ResolverGeneric { +impl Default for ResolverGeneric { fn default() -> Self { Self::new(ResolveOptions::default()) } } -impl ResolverGeneric { +impl ResolverGeneric { pub fn new(options: ResolveOptions) -> Self { Self { options: options.sanitize(), @@ -141,7 +142,7 @@ impl ResolverGeneric { } } -impl ResolverGeneric { +impl ResolverGeneric { pub fn new_with_file_system(file_system: Fs, options: ResolveOptions) -> Self { Self { options: options.sanitize(), @@ -183,13 +184,13 @@ impl ResolverGeneric { /// # Errors /// /// * See [ResolveError] - pub fn resolve>( + pub async fn resolve>( &self, directory: P, specifier: &str, ) -> Result { let mut ctx = Ctx::default(); - self.resolve_tracing(directory.as_ref(), specifier, &mut ctx) + self.resolve_tracing(directory.as_ref(), specifier, &mut ctx).await } /// Resolve `specifier` at absolute `path` with [ResolveContext] @@ -197,7 +198,7 @@ impl ResolverGeneric { /// # Errors /// /// * See [ResolveError] - pub fn resolve_with_context>( + pub async fn resolve_with_context>( &self, directory: P, specifier: &str, @@ -205,7 +206,7 @@ impl ResolverGeneric { ) -> Result { let mut ctx = Ctx::default(); ctx.init_file_dependencies(); - let result = self.resolve_tracing(directory.as_ref(), specifier, &mut ctx); + let result = self.resolve_tracing(directory.as_ref(), specifier, &mut ctx).await; if let Some(deps) = &mut ctx.file_dependencies { resolve_context.file_dependencies.extend(deps.drain(..)); } @@ -216,7 +217,7 @@ impl ResolverGeneric { } /// Wrap `resolve_impl` with `tracing` information - fn resolve_tracing( + async fn resolve_tracing( &self, directory: &Path, specifier: &str, @@ -224,7 +225,7 @@ impl ResolverGeneric { ) -> Result { let span = tracing::debug_span!("resolve", path = ?directory, specifier = specifier); let _enter = span.enter(); - let r = self.resolve_impl(directory, specifier, ctx); + let r = self.resolve_impl(directory, specifier, ctx).await; match &r { Ok(r) => { tracing::debug!(options = ?self.options, path = ?directory, specifier = specifier, ret = ?r.path); @@ -236,7 +237,7 @@ impl ResolverGeneric { r } - fn resolve_impl( + async fn resolve_impl( &self, path: &Path, specifier: &str, @@ -244,10 +245,11 @@ impl ResolverGeneric { ) -> Result { ctx.with_fully_specified(self.options.fully_specified); let cached_path = self.cache.value(path); - let cached_path = self.require(&cached_path, specifier, ctx)?; - let path = self.load_realpath(&cached_path)?; + let cached_path = self.require(&cached_path, specifier, ctx).await?; + let path = self.load_realpath(&cached_path).await?; - let package_json = cached_path.find_package_json(&self.cache.fs, &self.options, ctx)?; + let package_json = + cached_path.find_package_json(&self.cache.fs, &self.options, ctx).await?; if let Some(package_json) = &package_json { // path must be inside the package. debug_assert!(path.starts_with(package_json.directory())); @@ -266,51 +268,59 @@ impl ResolverGeneric { /// Y: path /// /// - fn require( - &self, - cached_path: &CachedPath, - specifier: &str, - ctx: &mut Ctx, - ) -> Result { - ctx.test_for_infinite_recursion()?; - - // enhanced-resolve: parse - let (parsed, try_fragment_as_path) = self.load_parse(cached_path, specifier, ctx)?; - if let Some(path) = try_fragment_as_path { - return Ok(path); - } + fn require<'a>( + &'a self, + cached_path: &'a CachedPath, + specifier: &'a str, + ctx: &'a mut Ctx, + ) -> BoxFuture<'a, Result> { + let fut = async move { + ctx.test_for_infinite_recursion()?; + + // enhanced-resolve: parse + let (parsed, try_fragment_as_path) = + self.load_parse(cached_path, specifier, ctx).await?; + if let Some(path) = try_fragment_as_path { + return Ok(path); + } - self.require_without_parse(cached_path, parsed.path(), ctx) + self.require_without_parse(cached_path, parsed.path(), ctx).await + }; + Box::pin(fut) } - fn require_without_parse( + async fn require_without_parse( &self, cached_path: &CachedPath, specifier: &str, ctx: &mut Ctx, ) -> Result { // tsconfig-paths - if let Some(path) = self.load_tsconfig_paths(cached_path, specifier, &mut Ctx::default())? { + if let Some(path) = + self.load_tsconfig_paths(cached_path, specifier, &mut Ctx::default()).await? + { return Ok(path); } // enhanced-resolve: try alias - if let Some(path) = self.load_alias(cached_path, specifier, &self.options.alias, ctx)? { + if let Some(path) = + self.load_alias(cached_path, specifier, &self.options.alias, ctx).await? + { return Ok(path); } let result = match Path::new(specifier).components().next() { // 2. If X begins with '/' Some(Component::RootDir | Component::Prefix(_)) => { - self.require_absolute(cached_path, specifier, ctx) + self.require_absolute(cached_path, specifier, ctx).await } // 3. If X begins with './' or '/' or '../' Some(Component::CurDir | Component::ParentDir) => { - self.require_relative(cached_path, specifier, ctx) + self.require_relative(cached_path, specifier, ctx).await } // 4. If X begins with '#' Some(Component::Normal(_)) if specifier.as_bytes()[0] == b'#' => { - self.require_hash(cached_path, specifier, ctx) + self.require_hash(cached_path, specifier, ctx).await } _ => { // 1. If X is a core module, @@ -321,18 +331,22 @@ impl ResolverGeneric { // (ESM) 5. Otherwise, // Note: specifier is now a bare specifier. // Set resolved the result of PACKAGE_RESOLVE(specifier, parentURL). - self.require_bare(cached_path, specifier, ctx) + self.require_bare(cached_path, specifier, ctx).await } }; - result.or_else(|err| { - if err.is_ignore() { - return Err(err); + match result { + Ok(_) => result, + Err(err) => { + if err.is_ignore() { + return Err(err); + } + // enhanced-resolve: try fallback + self.load_alias(cached_path, specifier, &self.options.fallback, ctx) + .await + .and_then(|value| value.ok_or(err)) } - // enhanced-resolve: try fallback - self.load_alias(cached_path, specifier, &self.options.fallback, ctx) - .and_then(|value| value.ok_or(err)) - }) + } } // PACKAGE_RESOLVE(packageSpecifier, parentURL) @@ -352,7 +366,7 @@ impl ResolverGeneric { Ok(()) } - fn require_absolute( + async fn require_absolute( &self, cached_path: &CachedPath, specifier: &str, @@ -364,24 +378,26 @@ impl ResolverGeneric { .next() .is_some_and(|c| matches!(c, Component::RootDir | Component::Prefix(_)))); if !self.options.prefer_relative && self.options.prefer_absolute { - if let Ok(path) = self.load_package_self_or_node_modules(cached_path, specifier, ctx) { + if let Ok(path) = + self.load_package_self_or_node_modules(cached_path, specifier, ctx).await + { return Ok(path); } } - if let Some(path) = self.load_roots(specifier, ctx) { + if let Some(path) = self.load_roots(specifier, ctx).await { return Ok(path); } // 2. If X begins with '/' // a. set Y to be the file system root let path = self.cache.value(Path::new(specifier)); - if let Some(path) = self.load_as_file_or_directory(&path, specifier, ctx)? { + if let Some(path) = self.load_as_file_or_directory(&path, specifier, ctx).await? { return Ok(path); } Err(ResolveError::NotFound(specifier.to_string())) } // 3. If X begins with './' or '/' or '../' - fn require_relative( + async fn require_relative( &self, cached_path: &CachedPath, specifier: &str, @@ -396,14 +412,14 @@ impl ResolverGeneric { let cached_path = self.cache.value(&path); // a. LOAD_AS_FILE(Y + X) // b. LOAD_AS_DIRECTORY(Y + X) - if let Some(path) = self.load_as_file_or_directory(&cached_path, specifier, ctx)? { + if let Some(path) = self.load_as_file_or_directory(&cached_path, specifier, ctx).await? { return Ok(path); } // c. THROW "not found" Err(ResolveError::NotFound(specifier.to_string())) } - fn require_hash( + async fn require_hash( &self, cached_path: &CachedPath, specifier: &str, @@ -411,13 +427,13 @@ impl ResolverGeneric { ) -> Result { debug_assert_eq!(specifier.chars().next(), Some('#')); // a. LOAD_PACKAGE_IMPORTS(X, dirname(Y)) - if let Some(path) = self.load_package_imports(cached_path, specifier, ctx)? { + if let Some(path) = self.load_package_imports(cached_path, specifier, ctx).await? { return Ok(path); } - self.load_package_self_or_node_modules(cached_path, specifier, ctx) + self.load_package_self_or_node_modules(cached_path, specifier, ctx).await } - fn require_bare( + async fn require_bare( &self, cached_path: &CachedPath, specifier: &str, @@ -429,11 +445,11 @@ impl ResolverGeneric { .next() .is_some_and(|c| matches!(c, Component::Normal(_)))); if self.options.prefer_relative { - if let Ok(path) = self.require_relative(cached_path, specifier, ctx) { + if let Ok(path) = self.require_relative(cached_path, specifier, ctx).await { return Ok(path); } } - self.load_package_self_or_node_modules(cached_path, specifier, ctx) + self.load_package_self_or_node_modules(cached_path, specifier, ctx).await } /// enhanced-resolve: ParsePlugin. @@ -444,7 +460,7 @@ impl ResolverGeneric { /// When a # is resolved as path it will be escaped in the result. Here: `.../some\0#thing.js`. /// /// - fn load_parse<'s>( + async fn load_parse<'s>( &self, cached_path: &CachedPath, specifier: &'s str, @@ -458,7 +474,7 @@ impl ResolverGeneric { let specifier = parsed.path(); let fragment = ctx.fragment.take().unwrap(); let path = format!("{specifier}{fragment}"); - if let Ok(path) = self.require_without_parse(cached_path, &path, ctx) { + if let Ok(path) = self.require_without_parse(cached_path, &path, ctx).await { return Ok((parsed, Some(path))); } ctx.fragment.replace(fragment); @@ -466,7 +482,7 @@ impl ResolverGeneric { Ok((parsed, None)) } - fn load_package_self_or_node_modules( + async fn load_package_self_or_node_modules( &self, cached_path: &CachedPath, specifier: &str, @@ -477,11 +493,11 @@ impl ResolverGeneric { ctx.with_fully_specified(false); } // 5. LOAD_PACKAGE_SELF(X, dirname(Y)) - if let Some(path) = self.load_package_self(cached_path, specifier, ctx)? { + if let Some(path) = self.load_package_self(cached_path, specifier, ctx).await? { return Ok(path); } // 6. LOAD_NODE_MODULES(X, dirname(Y)) - if let Some(path) = self.load_node_modules(cached_path, specifier, ctx)? { + if let Some(path) = self.load_node_modules(cached_path, specifier, ctx).await? { return Ok(path); } // 7. THROW "not found" @@ -489,7 +505,7 @@ impl ResolverGeneric { } /// LOAD_PACKAGE_IMPORTS(X, DIR) - fn load_package_imports( + async fn load_package_imports( &self, cached_path: &CachedPath, specifier: &str, @@ -498,47 +514,48 @@ impl ResolverGeneric { // 1. Find the closest package scope SCOPE to DIR. // 2. If no scope was found, return. let Some(package_json) = - cached_path.find_package_json(&self.cache.fs, &self.options, ctx)? + cached_path.find_package_json(&self.cache.fs, &self.options, ctx).await? else { return Ok(None); }; // 3. If the SCOPE/package.json "imports" is null or undefined, return. // 4. let MATCH = PACKAGE_IMPORTS_RESOLVE(X, pathToFileURL(SCOPE), ["node", "require"]) defined in the ESM resolver. - if let Some(path) = self.package_imports_resolve(specifier, &package_json, ctx)? { + if let Some(path) = self.package_imports_resolve(specifier, &package_json, ctx).await? { // 5. RESOLVE_ESM_MATCH(MATCH). - return self.resolve_esm_match(specifier, &path, ctx); + return self.resolve_esm_match(specifier, &path, ctx).await; } Ok(None) } - fn load_as_file(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { + async fn load_as_file(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { // enhanced-resolve feature: extension_alias - if let Some(path) = self.load_extension_alias(cached_path, ctx)? { + if let Some(path) = self.load_extension_alias(cached_path, ctx).await? { return Ok(Some(path)); } if self.options.enforce_extension.is_disabled() { // 1. If X is a file, load X as its file extension format. STOP - if let Some(path) = self.load_alias_or_file(cached_path, ctx)? { + if let Some(path) = self.load_alias_or_file(cached_path, ctx).await? { return Ok(Some(path)); } } // 2. If X.js is a file, load X.js as JavaScript text. STOP // 3. If X.json is a file, parse X.json to a JavaScript Object. STOP // 4. If X.node is a file, load X.node as binary addon. STOP - if let Some(path) = self.load_extensions(cached_path, &self.options.extensions, ctx)? { + if let Some(path) = self.load_extensions(cached_path, &self.options.extensions, ctx).await? + { return Ok(Some(path)); } Ok(None) } - fn load_as_directory(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { + async fn load_as_directory(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { // TODO: Only package.json is supported, so warn about having other values // Checking for empty files is needed for omitting checks on package.json // 1. If X/package.json is a file, if !self.options.description_files.is_empty() { // a. Parse X/package.json, and look for "main" field. if let Some(package_json) = - cached_path.package_json(&self.cache.fs, &self.options, ctx)? + cached_path.package_json(&self.cache.fs, &self.options, ctx).await? { // b. If "main" is a falsy value, GOTO 2. for main_field in package_json.main_fields(&self.options.main_fields) { @@ -554,11 +571,11 @@ impl ResolverGeneric { let main_field_path = cached_path.path().normalize_with(main_field.as_ref()); // d. LOAD_AS_FILE(M) let cached_path = self.cache.value(&main_field_path); - if let Ok(Some(path)) = self.load_as_file(&cached_path, ctx) { + if let Ok(Some(path)) = self.load_as_file(&cached_path, ctx).await { return Ok(Some(path)); } // e. LOAD_INDEX(M) - if let Some(path) = self.load_index(&cached_path, ctx)? { + if let Some(path) = self.load_index(&cached_path, ctx).await? { return Ok(Some(path)); } } @@ -567,32 +584,32 @@ impl ResolverGeneric { } } // 2. LOAD_INDEX(X) - self.load_index(cached_path, ctx) + self.load_index(cached_path, ctx).await } - fn load_as_file_or_directory( + async fn load_as_file_or_directory( &self, cached_path: &CachedPath, specifier: &str, ctx: &mut Ctx, ) -> ResolveResult { if self.options.resolve_to_context { - return Ok(cached_path.is_dir(&self.cache.fs, ctx).then(|| cached_path.clone())); + return Ok(cached_path.is_dir(&self.cache.fs, ctx).await.then(|| cached_path.clone())); } if !specifier.ends_with('/') { - if let Some(path) = self.load_as_file(cached_path, ctx)? { + if let Some(path) = self.load_as_file(cached_path, ctx).await? { return Ok(Some(path)); } } - if cached_path.is_dir(&self.cache.fs, ctx) { - if let Some(path) = self.load_as_directory(cached_path, ctx)? { + if cached_path.is_dir(&self.cache.fs, ctx).await { + if let Some(path) = self.load_as_directory(cached_path, ctx).await? { return Ok(Some(path)); } } Ok(None) } - fn load_extensions( + async fn load_extensions( &self, path: &CachedPath, extensions: &[String], @@ -607,16 +624,16 @@ impl ResolverGeneric { path_with_extension.reserve_exact(extension.len()); path_with_extension.push(extension); let cached_path = self.cache.value(Path::new(&path_with_extension)); - if let Some(path) = self.load_alias_or_file(&cached_path, ctx)? { + if let Some(path) = self.load_alias_or_file(&cached_path, ctx).await? { return Ok(Some(path)); } } Ok(None) } - fn load_realpath(&self, cached_path: &CachedPath) -> Result { + async fn load_realpath(&self, cached_path: &CachedPath) -> Result { if self.options.symlinks { - cached_path.realpath(&self.cache.fs).map_err(ResolveError::from) + cached_path.realpath(&self.cache.fs).await.map_err(ResolveError::from) } else { Ok(cached_path.to_path_buf()) } @@ -650,12 +667,12 @@ impl ResolverGeneric { true } - fn load_index(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { + async fn load_index(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { for main_file in &self.options.main_files { let main_path = cached_path.path().normalize_with(main_file); let cached_path = self.cache.value(&main_path); if self.options.enforce_extension.is_disabled() { - if let Some(path) = self.load_alias_or_file(&cached_path, ctx)? { + if let Some(path) = self.load_alias_or_file(&cached_path, ctx).await? { if self.check_restrictions(path.path()) { return Ok(Some(path)); } @@ -664,20 +681,22 @@ impl ResolverGeneric { // 1. If X/index.js is a file, load X/index.js as JavaScript text. STOP // 2. If X/index.json is a file, parse X/index.json to a JavaScript object. STOP // 3. If X/index.node is a file, load X/index.node as binary addon. STOP - if let Some(path) = self.load_extensions(&cached_path, &self.options.extensions, ctx)? { + if let Some(path) = + self.load_extensions(&cached_path, &self.options.extensions, ctx).await? + { return Ok(Some(path)); } } Ok(None) } - fn load_alias_or_file(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { + async fn load_alias_or_file(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { if !self.options.alias_fields.is_empty() { if let Some(package_json) = - cached_path.find_package_json(&self.cache.fs, &self.options, ctx)? + cached_path.find_package_json(&self.cache.fs, &self.options, ctx).await? { if let Some(path) = - self.load_browser_field(cached_path, None, &package_json, ctx)? + self.load_browser_field(cached_path, None, &package_json, ctx).await? { return Ok(Some(path)); } @@ -686,17 +705,19 @@ impl ResolverGeneric { // enhanced-resolve: try file as alias let alias_specifier = cached_path.path().to_string_lossy(); if let Some(path) = - self.load_alias(cached_path, &alias_specifier, &self.options.alias, ctx)? + self.load_alias(cached_path, &alias_specifier, &self.options.alias, ctx).await? { return Ok(Some(path)); } - if cached_path.is_file(&self.cache.fs, ctx) && self.check_restrictions(cached_path.path()) { + if cached_path.is_file(&self.cache.fs, ctx).await + && self.check_restrictions(cached_path.path()) + { return Ok(Some(cached_path.clone())); } Ok(None) } - fn load_node_modules( + async fn load_node_modules( &self, cached_path: &CachedPath, specifier: &str, @@ -705,7 +726,7 @@ impl ResolverGeneric { #[cfg(feature = "yarn_pnp")] { if self.options.enable_pnp { - if let Some(resolved_path) = self.load_pnp(cached_path, specifier, ctx)? { + if let Some(resolved_path) = self.load_pnp(cached_path, specifier, ctx).await? { return Ok(Some(resolved_path)); } } @@ -717,11 +738,12 @@ impl ResolverGeneric { for module_name in &self.options.modules { for cached_path in std::iter::successors(Some(cached_path), |p| p.parent()) { // Skip if /path/to/node_modules does not exist - if !cached_path.is_dir(&self.cache.fs, ctx) { + if !cached_path.is_dir(&self.cache.fs, ctx).await { continue; } - let Some(cached_path) = self.get_module_directory(cached_path, module_name, ctx) + let Some(cached_path) = + self.get_module_directory(cached_path, module_name, ctx).await else { continue; }; @@ -733,10 +755,10 @@ impl ResolverGeneric { let package_path = cached_path.path().normalize_with(package_name); let cached_path = self.cache.value(&package_path); // Try foo/node_modules/package_name - if cached_path.is_dir(&self.cache.fs, ctx) { + if cached_path.is_dir(&self.cache.fs, ctx).await { // a. LOAD_PACKAGE_EXPORTS(X, DIR) if let Some(path) = - self.load_package_exports(specifier, subpath, &cached_path, ctx)? + self.load_package_exports(specifier, subpath, &cached_path, ctx).await? { return Ok(Some(path)); } @@ -749,7 +771,7 @@ impl ResolverGeneric { // i.e. `foo/node_modules/@scope` is not a directory for `foo/node_modules/@scope/package` if package_name.starts_with('@') { if let Some(path) = cached_path.parent() { - if !path.is_dir(&self.cache.fs, ctx) { + if !path.is_dir(&self.cache.fs, ctx).await { continue; } } @@ -762,7 +784,9 @@ impl ResolverGeneric { // c. LOAD_AS_DIRECTORY(DIR/X) let node_module_file = cached_path.path().normalize_with(specifier); let cached_path = self.cache.value(&node_module_file); - if let Some(path) = self.load_as_file_or_directory(&cached_path, specifier, ctx)? { + if let Some(path) = + self.load_as_file_or_directory(&cached_path, specifier, ctx).await? + { return Ok(Some(path)); } } @@ -784,7 +808,7 @@ impl ResolverGeneric { } #[cfg(feature = "yarn_pnp")] - fn load_pnp( + async fn load_pnp( &self, cached_path: &CachedPath, specifier: &str, @@ -804,7 +828,8 @@ impl ResolverGeneric { Ok(pnp::Resolution::Resolved(path, subpath)) => { let cached_path = self.cache.value(&path); - let export_resolution = self.load_package_self(&cached_path, specifier, ctx)?; + let export_resolution = + self.load_package_self(&cached_path, specifier, ctx).await?; // can be found in pnp cached folder if export_resolution.is_some() { return Ok(export_resolution); @@ -820,7 +845,8 @@ impl ResolverGeneric { let inner_resolver = self.clone_with_options(self.options().clone()); // try as file or directory `path` in the pnp folder - let Ok(inner_resolution) = inner_resolver.resolve(&path, &inner_request) else { + let Ok(inner_resolution) = inner_resolver.resolve(&path, &inner_request).await + else { return Err(ResolveError::NotFound(specifier.to_string())); }; @@ -835,24 +861,24 @@ impl ResolverGeneric { } } - fn get_module_directory( + async fn get_module_directory( &self, cached_path: &CachedPath, module_name: &str, ctx: &mut Ctx, ) -> Option { if module_name == "node_modules" { - cached_path.cached_node_modules(&self.cache, ctx) + cached_path.cached_node_modules(&self.cache, ctx).await } else if cached_path.path().components().next_back() == Some(Component::Normal(OsStr::new(module_name))) { Some(cached_path.clone()) } else { - cached_path.module_directory(module_name, &self.cache, ctx) + cached_path.module_directory(module_name, &self.cache, ctx).await } } - fn load_package_exports( + async fn load_package_exports( &self, specifier: &str, subpath: &str, @@ -861,7 +887,8 @@ impl ResolverGeneric { ) -> ResolveResult { // 2. If X does not match this pattern or DIR/NAME/package.json is not a file, // return. - let Some(package_json) = cached_path.package_json(&self.cache.fs, &self.options, ctx)? + let Some(package_json) = + cached_path.package_json(&self.cache.fs, &self.options, ctx).await? else { return Ok(None); }; @@ -871,20 +898,18 @@ impl ResolverGeneric { // `package.json` "exports", ["node", "require"]) defined in the ESM resolver. // Note: The subpath is not prepended with a dot on purpose for exports in package_json.exports_fields(&self.options.exports_fields) { - if let Some(path) = self.package_exports_resolve( - cached_path.path(), - &format!(".{subpath}"), - exports, - ctx, - )? { + if let Some(path) = self + .package_exports_resolve(cached_path.path(), &format!(".{subpath}"), exports, ctx) + .await? + { // 6. RESOLVE_ESM_MATCH(MATCH) - return self.resolve_esm_match(specifier, &path, ctx); + return self.resolve_esm_match(specifier, &path, ctx).await; }; } Ok(None) } - fn load_package_self( + async fn load_package_self( &self, cached_path: &CachedPath, specifier: &str, @@ -893,7 +918,7 @@ impl ResolverGeneric { // 1. Find the closest package scope SCOPE to DIR. // 2. If no scope was found, return. let Some(package_json) = - cached_path.find_package_json(&self.cache.fs, &self.options, ctx)? + cached_path.find_package_json(&self.cache.fs, &self.options, ctx).await? else { return Ok(None); }; @@ -911,19 +936,20 @@ impl ResolverGeneric { // Note: The subpath is not prepended with a dot on purpose // because `package_exports_resolve` matches subpath without the leading dot. for exports in package_json.exports_fields(&self.options.exports_fields) { - if let Some(cached_path) = - self.package_exports_resolve(package_url, &format!(".{subpath}"), exports, ctx)? + if let Some(cached_path) = self + .package_exports_resolve(package_url, &format!(".{subpath}"), exports, ctx) + .await? { // 6. RESOLVE_ESM_MATCH(MATCH) - return self.resolve_esm_match(specifier, &cached_path, ctx); + return self.resolve_esm_match(specifier, &cached_path, ctx).await; } } } - self.load_browser_field(cached_path, Some(specifier), &package_json, ctx) + self.load_browser_field(cached_path, Some(specifier), &package_json, ctx).await } /// RESOLVE_ESM_MATCH(MATCH) - fn resolve_esm_match( + async fn resolve_esm_match( &self, specifier: &str, cached_path: &CachedPath, @@ -933,7 +959,7 @@ impl ResolverGeneric { // 2. If the file at RESOLVED_PATH exists, load RESOLVED_PATH as its extension format. STOP // // Non-compliant ESM can result in a directory, so directory is tried as well. - if let Some(path) = self.load_as_file_or_directory(cached_path, "", ctx)? { + if let Some(path) = self.load_as_file_or_directory(cached_path, "", ctx).await? { return Ok(Some(path)); } @@ -944,12 +970,10 @@ impl ResolverGeneric { // but also `?` is a valid character in a path, so we should try from right to left. while let Some(s) = path_str { if let Some((before, _)) = s.rsplit_once('?') { - if (self.load_as_file_or_directory( - &self.cache.value(Path::new(before)), - "", - ctx, - )?) - .is_some() + if (self + .load_as_file_or_directory(&self.cache.value(Path::new(before)), "", ctx) + .await?) + .is_some() { return Ok(Some(cached_path.clone())); } @@ -964,7 +988,7 @@ impl ResolverGeneric { } /// enhanced-resolve: AliasFieldPlugin for [ResolveOptions::alias_fields] - fn load_browser_field( + async fn load_browser_field( &self, cached_path: &CachedPath, module_specifier: Option<&str>, @@ -987,7 +1011,7 @@ impl ResolverGeneric { if ctx.resolving_alias.as_ref().is_some_and(|s| s == new_specifier) { // Complete when resolving to self `{"./a.js": "./a.js"}` if new_specifier.strip_prefix("./").filter(|s| path.ends_with(Path::new(s))).is_some() { - return if cached_path.is_file(&self.cache.fs, ctx) { + return if cached_path.is_file(&self.cache.fs, ctx).await { if self.check_restrictions(cached_path.path()) { Ok(Some(cached_path.clone())) } else { @@ -1002,11 +1026,11 @@ impl ResolverGeneric { ctx.with_resolving_alias(new_specifier.to_string()); ctx.with_fully_specified(false); let cached_path = self.cache.value(package_json.directory()); - self.require(&cached_path, new_specifier, ctx).map(Some) + self.require(&cached_path, new_specifier, ctx).await.map(Some) } /// enhanced-resolve: AliasPlugin for [ResolveOptions::alias] and [ResolveOptions::fallback]. - fn load_alias( + async fn load_alias( &self, cached_path: &CachedPath, specifier: &str, @@ -1033,14 +1057,17 @@ impl ResolverGeneric { for r in specifiers { match r { AliasValue::Path(alias_value) => { - if let Some(path) = self.load_alias_value( - cached_path, - alias_key, - alias_value, - specifier, - ctx, - &mut should_stop, - )? { + if let Some(path) = self + .load_alias_value( + cached_path, + alias_key, + alias_value, + specifier, + ctx, + &mut should_stop, + ) + .await? + { return Ok(Some(path)); } } @@ -1060,7 +1087,7 @@ impl ResolverGeneric { Ok(None) } - fn load_alias_value( + async fn load_alias_value( &self, cached_path: &CachedPath, alias_key: &str, @@ -1080,7 +1107,7 @@ impl ResolverGeneric { let alias_path = Path::new(alias_value).normalize(); // Must not append anything to alias_value if it is a file. let alias_value_cached_path = self.cache.value(&alias_path); - if alias_value_cached_path.is_file(&self.cache.fs, ctx) { + if alias_value_cached_path.is_file(&self.cache.fs, ctx).await { return Ok(None); } @@ -1096,7 +1123,7 @@ impl ResolverGeneric { *should_stop = true; ctx.with_fully_specified(false); - return match self.require(cached_path, new_specifier.as_ref(), ctx) { + return match self.require(cached_path, new_specifier.as_ref(), ctx).await { Err(ResolveError::NotFound(_) | ResolveError::MatchedAliasNotFound(_, _)) => { Ok(None) } @@ -1115,7 +1142,7 @@ impl ResolverGeneric { /// # Errors /// /// * [ResolveError::ExtensionAlias]: When all of the aliased extensions are not found - fn load_extension_alias(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { + async fn load_extension_alias(&self, cached_path: &CachedPath, ctx: &mut Ctx) -> ResolveResult { if self.options.extension_alias.is_empty() { return Ok(None); } @@ -1140,13 +1167,13 @@ impl ResolverGeneric { path_with_extension.reserve_exact(extension.len()); path_with_extension.push(extension); let cached_path = self.cache.value(Path::new(&path_with_extension)); - if let Some(path) = self.load_alias_or_file(&cached_path, ctx)? { + if let Some(path) = self.load_alias_or_file(&cached_path, ctx).await? { ctx.with_fully_specified(false); return Ok(Some(path)); } } // Bail if path is module directory such as `ipaddr.js` - if !cached_path.is_file(&self.cache.fs, ctx) { + if !cached_path.is_file(&self.cache.fs, ctx).await { ctx.with_fully_specified(false); return Ok(None); } else if !self.check_restrictions(cached_path.path()) { @@ -1170,14 +1197,14 @@ impl ResolverGeneric { /// defaults to context configuration option. /// /// On non-Windows systems these requests are resolved as an absolute path first. - fn load_roots(&self, specifier: &str, ctx: &mut Ctx) -> Option { + async fn load_roots(&self, specifier: &str, ctx: &mut Ctx) -> Option { if self.options.roots.is_empty() { return None; } if let Some(specifier) = specifier.strip_prefix(SLASH_START) { for root in &self.options.roots { let cached_path = self.cache.value(root); - if let Ok(path) = self.require_relative(&cached_path, specifier, ctx) { + if let Ok(path) = self.require_relative(&cached_path, specifier, ctx).await { return Some(path); } } @@ -1185,7 +1212,7 @@ impl ResolverGeneric { None } - fn load_tsconfig_paths( + async fn load_tsconfig_paths( &self, cached_path: &CachedPath, specifier: &str, @@ -1194,89 +1221,107 @@ impl ResolverGeneric { let Some(tsconfig_options) = &self.options.tsconfig else { return Ok(None); }; - let tsconfig = self.load_tsconfig( - /* root */ true, - &tsconfig_options.config_file, - &tsconfig_options.references, - )?; + let tsconfig = self + .load_tsconfig( + /* root */ true, + &tsconfig_options.config_file, + &tsconfig_options.references, + ) + .await?; let paths = tsconfig.resolve(cached_path.path(), specifier); for path in paths { let cached_path = self.cache.value(&path); - if let Ok(path) = self.require_relative(&cached_path, ".", ctx) { + if let Ok(path) = self.require_relative(&cached_path, ".", ctx).await { return Ok(Some(path)); } } Ok(None) } - fn load_tsconfig( - &self, + fn load_tsconfig<'a>( + &'a self, root: bool, - path: &Path, - references: &TsconfigReferences, - ) -> Result, ResolveError> { - self.cache.tsconfig(root, path, |tsconfig| { - let directory = self.cache.value(tsconfig.directory()); - tracing::trace!(tsconfig = ?tsconfig, "load_tsconfig"); - - // Extend tsconfig - if let Some(extends) = &tsconfig.extends { - let extended_tsconfig_paths = match extends { - ExtendsField::Single(s) => { - vec![self.get_extended_tsconfig_path(&directory, tsconfig, s)?] + path: &'a Path, + references: &'a TsconfigReferences, + ) -> BoxFuture<'a, Result, ResolveError>> { + let fut = async move { + self.cache + .tsconfig(root, path, |mut tsconfig| async move { + let directory = self.cache.value(tsconfig.directory()); + tracing::trace!(tsconfig = ?tsconfig, "load_tsconfig"); + + // Extend tsconfig + if let Some(extends) = &tsconfig.extends { + let extended_tsconfig_paths = match extends { + ExtendsField::Single(s) => { + vec![ + self.get_extended_tsconfig_path(&directory, &tsconfig, s) + .await?, + ] + } + ExtendsField::Multiple(specifiers) => { + try_join_all(specifiers.iter().map(|s| { + self.get_extended_tsconfig_path(&directory, &tsconfig, s) + })) + .await? + } + }; + for extended_tsconfig_path in extended_tsconfig_paths { + let extended_tsconfig = self + .load_tsconfig( + /* root */ false, + &extended_tsconfig_path, + &TsconfigReferences::Disabled, + ) + .await?; + tsconfig.extend_tsconfig(&extended_tsconfig); + } } - ExtendsField::Multiple(specifiers) => specifiers - .iter() - .map(|s| self.get_extended_tsconfig_path(&directory, tsconfig, s)) - .collect::, ResolveError>>()?, - }; - for extended_tsconfig_path in extended_tsconfig_paths { - let extended_tsconfig = self.load_tsconfig( - /* root */ false, - &extended_tsconfig_path, - &TsconfigReferences::Disabled, - )?; - tsconfig.extend_tsconfig(&extended_tsconfig); - } - } - // Load project references - match references { - TsconfigReferences::Disabled => { - tsconfig.references.drain(..); - } - TsconfigReferences::Auto => {} - TsconfigReferences::Paths(paths) => { - tsconfig.references = paths - .iter() - .map(|path| ProjectReference { path: path.clone(), tsconfig: None }) - .collect(); - } - } - if !tsconfig.references.is_empty() { - let directory = tsconfig.directory().to_path_buf(); - for reference in &mut tsconfig.references { - let reference_tsconfig_path = directory.normalize_with(&reference.path); - let tsconfig = self.cache.tsconfig( - /* root */ true, - &reference_tsconfig_path, - |reference_tsconfig| { - if reference_tsconfig.path == tsconfig.path { - return Err(ResolveError::TsconfigSelfReference( - reference_tsconfig.path.clone(), - )); - } - Ok(()) - }, - )?; - reference.tsconfig.replace(tsconfig); - } - } - Ok(()) - }) + // Load project references + match references { + TsconfigReferences::Disabled => { + tsconfig.references.drain(..); + } + TsconfigReferences::Auto => {} + TsconfigReferences::Paths(paths) => { + tsconfig.references = paths + .iter() + .map(|path| ProjectReference { path: path.clone(), tsconfig: None }) + .collect(); + } + } + if !tsconfig.references.is_empty() { + let directory = tsconfig.directory().to_path_buf(); + for reference in &mut tsconfig.references { + let reference_tsconfig_path = directory.normalize_with(&reference.path); + let tsconfig = self + .cache + .tsconfig( + /* root */ true, + &reference_tsconfig_path, + |reference_tsconfig| async { + if reference_tsconfig.path == tsconfig.path { + return Err(ResolveError::TsconfigSelfReference( + reference_tsconfig.path.clone(), + )); + } + Ok(reference_tsconfig) + }, + ) + .await?; + reference.tsconfig.replace(tsconfig); + } + } + + Ok(tsconfig) + }) + .await + }; + Box::pin(fut) } - fn get_extended_tsconfig_path( + async fn get_extended_tsconfig_path( &self, directory: &CachedPath, tsconfig: &TsConfig, @@ -1294,6 +1339,7 @@ impl ResolverGeneric { ..ResolveOptions::default() }) .load_package_self_or_node_modules(directory, specifier, &mut Ctx::default()) + .await .map(|p| p.to_path_buf()) .map_err(|err| match err { ResolveError::NotFound(_) => { @@ -1305,7 +1351,7 @@ impl ResolverGeneric { } /// PACKAGE_RESOLVE(packageSpecifier, parentURL) - fn package_resolve( + async fn package_resolve( &self, cached_path: &CachedPath, specifier: &str, @@ -1321,7 +1367,8 @@ impl ResolverGeneric { for module_name in &self.options.modules { for cached_path in std::iter::successors(Some(cached_path), |p| p.parent()) { // 1. Let packageURL be the URL resolution of "node_modules/" concatenated with packageSpecifier, relative to parentURL. - let Some(cached_path) = self.get_module_directory(cached_path, module_name, ctx) + let Some(cached_path) = + self.get_module_directory(cached_path, module_name, ctx).await else { continue; }; @@ -1330,20 +1377,23 @@ impl ResolverGeneric { let cached_path = self.cache.value(&package_path); // 3. If the folder at packageURL does not exist, then // 1. Continue the next loop iteration. - if cached_path.is_dir(&self.cache.fs, ctx) { + if cached_path.is_dir(&self.cache.fs, ctx).await { // 4. Let pjson be the result of READ_PACKAGE_JSON(packageURL). if let Some(package_json) = - cached_path.package_json(&self.cache.fs, &self.options, ctx)? + cached_path.package_json(&self.cache.fs, &self.options, ctx).await? { // 5. If pjson is not null and pjson.exports is not null or undefined, then // 1. Return the result of PACKAGE_EXPORTS_RESOLVE(packageURL, packageSubpath, pjson.exports, defaultConditions). for exports in package_json.exports_fields(&self.options.exports_fields) { - if let Some(path) = self.package_exports_resolve( - cached_path.path(), - &format!(".{subpath}"), - exports, - ctx, - )? { + if let Some(path) = self + .package_exports_resolve( + cached_path.path(), + &format!(".{subpath}"), + exports, + ctx, + ) + .await? + { return Ok(Some(path)); } } @@ -1354,7 +1404,7 @@ impl ResolverGeneric { // 1. Return the URL resolution of main in packageURL. let path = cached_path.path().normalize_with(main_field); let cached_path = self.cache.value(&path); - if cached_path.is_file(&self.cache.fs, ctx) + if cached_path.is_file(&self.cache.fs, ctx).await && self.check_restrictions(cached_path.path()) { return Ok(Some(cached_path.clone())); @@ -1364,7 +1414,7 @@ impl ResolverGeneric { } let subpath = format!(".{subpath}"); ctx.with_fully_specified(false); - return self.require(&cached_path, &subpath, ctx).map(Some); + return self.require(&cached_path, &subpath, ctx).await.map(Some); } } } @@ -1373,111 +1423,122 @@ impl ResolverGeneric { } /// PACKAGE_EXPORTS_RESOLVE(packageURL, subpath, exports, conditions) - fn package_exports_resolve( - &self, - package_url: &Path, - subpath: &str, - exports: &JSONValue, - ctx: &mut Ctx, - ) -> ResolveResult { - let conditions = &self.options.condition_names; - // 1. If exports is an Object with both a key starting with "." and a key not starting with ".", throw an Invalid Package Configuration error. - if let JSONValue::Object(map) = exports { - let mut has_dot = false; - let mut without_dot = false; - for key in map.keys() { - let starts_with_dot_or_hash = key.starts_with(['.', '#']); - has_dot = has_dot || starts_with_dot_or_hash; - without_dot = without_dot || !starts_with_dot_or_hash; - if has_dot && without_dot { - return Err(ResolveError::InvalidPackageConfig( + fn package_exports_resolve<'a>( + &'a self, + package_url: &'a Path, + subpath: &'a str, + exports: &'a JSONValue, + ctx: &'a mut Ctx, + ) -> BoxFuture<'a, ResolveResult> { + let fut = async move { + let conditions = &self.options.condition_names; + // 1. If exports is an Object with both a key starting with "." and a key not starting with ".", throw an Invalid Package Configuration error. + if let JSONValue::Object(map) = exports { + let mut has_dot = false; + let mut without_dot = false; + for key in map.keys() { + let starts_with_dot_or_hash = key.starts_with(['.', '#']); + has_dot = has_dot || starts_with_dot_or_hash; + without_dot = without_dot || !starts_with_dot_or_hash; + if has_dot && without_dot { + return Err(ResolveError::InvalidPackageConfig( + package_url.join("package.json"), + )); + } + } + } + // 2. If subpath is equal to ".", then + // Note: subpath is not prepended with a dot when passed in. + if subpath == "." { + // enhanced-resolve appends query and fragment when resolving exports field + // https://github.com/webpack/enhanced-resolve/blob/a998c7d218b7a9ec2461fc4fddd1ad5dd7687485/lib/ExportsFieldPlugin.js#L57-L62 + // This is only need when querying the main export, otherwise ctx is passed through. + if ctx.query.is_some() || ctx.fragment.is_some() { + let query = ctx.query.clone().unwrap_or_default(); + let fragment = ctx.fragment.clone().unwrap_or_default(); + return Err(ResolveError::PackagePathNotExported( + format!("./{}{query}{fragment}", subpath.trim_start_matches('.')), package_url.join("package.json"), )); } - } - } - // 2. If subpath is equal to ".", then - // Note: subpath is not prepended with a dot when passed in. - if subpath == "." { - // enhanced-resolve appends query and fragment when resolving exports field - // https://github.com/webpack/enhanced-resolve/blob/a998c7d218b7a9ec2461fc4fddd1ad5dd7687485/lib/ExportsFieldPlugin.js#L57-L62 - // This is only need when querying the main export, otherwise ctx is passed through. - if ctx.query.is_some() || ctx.fragment.is_some() { - let query = ctx.query.clone().unwrap_or_default(); - let fragment = ctx.fragment.clone().unwrap_or_default(); - return Err(ResolveError::PackagePathNotExported( - format!("./{}{query}{fragment}", subpath.trim_start_matches('.')), - package_url.join("package.json"), - )); - } - // 1. Let mainExport be undefined. - let main_export = match exports { - // 2. If exports is a String or Array, or an Object containing no keys starting with ".", then - JSONValue::String(_) | JSONValue::Array(_) => { - // 1. Set mainExport to exports. - Some(exports) + // 1. Let mainExport be undefined. + let main_export = match exports { + // 2. If exports is a String or Array, or an Object containing no keys starting with ".", then + JSONValue::String(_) | JSONValue::Array(_) => { + // 1. Set mainExport to exports. + Some(exports) + } + // 3. Otherwise if exports is an Object containing a "." property, then + JSONValue::Object(map) => { + // 1. Set mainExport to exports["."]. + map.get(".").map_or_else( + || { + if map + .keys() + .any(|key| key.starts_with("./") || key.starts_with('#')) + { + None + } else { + Some(exports) + } + }, + Some, + ) + } + _ => None, + }; + // 4. If mainExport is not undefined, then + if let Some(main_export) = main_export { + // 1. Let resolved be the result of PACKAGE_TARGET_RESOLVE( packageURL, mainExport, null, false, conditions). + let resolved = self + .package_target_resolve( + package_url, + ".", + main_export, + None, + /* is_imports */ false, + conditions, + ctx, + ) + .await?; + // 2. If resolved is not null or undefined, return resolved. + if let Some(path) = resolved { + return Ok(Some(path)); + } } - // 3. Otherwise if exports is an Object containing a "." property, then - JSONValue::Object(map) => { - // 1. Set mainExport to exports["."]. - map.get(".").map_or_else( - || { - if map.keys().any(|key| key.starts_with("./") || key.starts_with('#')) { - None - } else { - Some(exports) - } - }, - Some, + } + // 3. Otherwise, if exports is an Object and all keys of exports start with ".", then + if let JSONValue::Object(exports) = exports { + // 1. Let matchKey be the string "./" concatenated with subpath. + // Note: `package_imports_exports_resolve` does not require the leading dot. + let match_key = &subpath; + // 2. Let resolved be the result of PACKAGE_IMPORTS_EXPORTS_RESOLVE( matchKey, exports, packageURL, false, conditions). + if let Some(path) = self + .package_imports_exports_resolve( + match_key, + exports, + package_url, + /* is_imports */ false, + conditions, + ctx, ) - } - _ => None, - }; - // 4. If mainExport is not undefined, then - if let Some(main_export) = main_export { - // 1. Let resolved be the result of PACKAGE_TARGET_RESOLVE( packageURL, mainExport, null, false, conditions). - let resolved = self.package_target_resolve( - package_url, - ".", - main_export, - None, - /* is_imports */ false, - conditions, - ctx, - )?; - // 2. If resolved is not null or undefined, return resolved. - if let Some(path) = resolved { + .await? + { + // 3. If resolved is not null or undefined, return resolved. return Ok(Some(path)); } } - } - // 3. Otherwise, if exports is an Object and all keys of exports start with ".", then - if let JSONValue::Object(exports) = exports { - // 1. Let matchKey be the string "./" concatenated with subpath. - // Note: `package_imports_exports_resolve` does not require the leading dot. - let match_key = &subpath; - // 2. Let resolved be the result of PACKAGE_IMPORTS_EXPORTS_RESOLVE( matchKey, exports, packageURL, false, conditions). - if let Some(path) = self.package_imports_exports_resolve( - match_key, - exports, - package_url, - /* is_imports */ false, - conditions, - ctx, - )? { - // 3. If resolved is not null or undefined, return resolved. - return Ok(Some(path)); - } - } - // 4. Throw a Package Path Not Exported error. - Err(ResolveError::PackagePathNotExported( - subpath.to_string(), - package_url.join("package.json"), - )) + // 4. Throw a Package Path Not Exported error. + Err(ResolveError::PackagePathNotExported( + subpath.to_string(), + package_url.join("package.json"), + )) + }; + Box::pin(fut) } /// PACKAGE_IMPORTS_RESOLVE(specifier, parentURL, conditions) - fn package_imports_resolve( + async fn package_imports_resolve( &self, specifier: &str, package_json: &PackageJson, @@ -1506,14 +1567,17 @@ impl ResolverGeneric { )); } } - if let Some(path) = self.package_imports_exports_resolve( - specifier, - imports, - package_json.directory(), - /* is_imports */ true, - &self.options.condition_names, - ctx, - )? { + if let Some(path) = self + .package_imports_exports_resolve( + specifier, + imports, + package_json.directory(), + /* is_imports */ true, + &self.options.condition_names, + ctx, + ) + .await? + { // 2. If resolved is not null or undefined, return resolved. return Ok(Some(path)); } @@ -1531,7 +1595,7 @@ impl ResolverGeneric { } /// PACKAGE_IMPORTS_EXPORTS_RESOLVE(matchKey, matchObj, packageURL, isImports, conditions) - fn package_imports_exports_resolve( + async fn package_imports_exports_resolve( &self, match_key: &str, match_obj: &JSONMap, @@ -1550,15 +1614,17 @@ impl ResolverGeneric { // 1. Let target be the value of matchObj[matchKey]. if let Some(target) = match_obj.get(match_key) { // 2. Return the result of PACKAGE_TARGET_RESOLVE(packageURL, target, null, isImports, conditions). - return self.package_target_resolve( - package_url, - match_key, - target, - None, - is_imports, - conditions, - ctx, - ); + return self + .package_target_resolve( + package_url, + match_key, + target, + None, + is_imports, + conditions, + ctx, + ) + .await; } } @@ -1601,15 +1667,17 @@ impl ResolverGeneric { } if let Some(best_target) = best_target { // 3. Return the result of PACKAGE_TARGET_RESOLVE(packageURL, target, patternMatch, isImports, conditions). - return self.package_target_resolve( - package_url, - best_key, - best_target, - Some(best_match), - is_imports, - conditions, - ctx, - ); + return self + .package_target_resolve( + package_url, + best_key, + best_target, + Some(best_match), + is_imports, + conditions, + ctx, + ) + .await; } // 4. Return null. Ok(None) @@ -1617,153 +1685,164 @@ impl ResolverGeneric { /// PACKAGE_TARGET_RESOLVE(packageURL, target, patternMatch, isImports, conditions) #[allow(clippy::too_many_arguments)] - fn package_target_resolve( - &self, - package_url: &Path, - target_key: &str, - target: &JSONValue, - pattern_match: Option<&str>, + fn package_target_resolve<'a>( + &'a self, + package_url: &'a Path, + target_key: &'a str, + target: &'a JSONValue, + pattern_match: Option<&'a str>, is_imports: bool, - conditions: &[String], - ctx: &mut Ctx, - ) -> ResolveResult { - fn normalize_string_target<'a>( - target_key: &'a str, - target: &'a str, - pattern_match: Option<&'a str>, - package_url: &Path, - ) -> Result, ResolveError> { - let target = if let Some(pattern_match) = pattern_match { - if !target_key.contains('*') && !target.contains('*') { - // enhanced-resolve behaviour - // TODO: [DEP0148] DeprecationWarning: Use of deprecated folder mapping "./dist/" in the "exports" field module resolution of the package at xxx/package.json. - if target_key.ends_with('/') && target.ends_with('/') { - Cow::Owned(format!("{target}{pattern_match}")) + conditions: &'a [String], + ctx: &'a mut Ctx, + ) -> BoxFuture<'a, ResolveResult> { + let fut = async move { + fn normalize_string_target<'a>( + target_key: &'a str, + target: &'a str, + pattern_match: Option<&'a str>, + package_url: &Path, + ) -> Result, ResolveError> { + let target = if let Some(pattern_match) = pattern_match { + if !target_key.contains('*') && !target.contains('*') { + // enhanced-resolve behaviour + // TODO: [DEP0148] DeprecationWarning: Use of deprecated folder mapping "./dist/" in the "exports" field module resolution of the package at xxx/package.json. + if target_key.ends_with('/') && target.ends_with('/') { + Cow::Owned(format!("{target}{pattern_match}")) + } else { + return Err(ResolveError::InvalidPackageConfigDirectory( + package_url.join("package.json"), + )); + } } else { - return Err(ResolveError::InvalidPackageConfigDirectory( - package_url.join("package.json"), - )); + Cow::Owned(target.replace('*', pattern_match)) } } else { - Cow::Owned(target.replace('*', pattern_match)) - } - } else { - Cow::Borrowed(target) - }; - Ok(target) - } + Cow::Borrowed(target) + }; + Ok(target) + } - match target { - // 1. If target is a String, then - JSONValue::String(target) => { - // 1. If target does not start with "./", then - if !target.starts_with("./") { - // 1. If isImports is false, or if target starts with "../" or "/", or if target is a valid URL, then - if !is_imports || target.starts_with("../") || target.starts_with('/') { - // 1. Throw an Invalid Package Target error. + match target { + // 1. If target is a String, then + JSONValue::String(target) => { + // 1. If target does not start with "./", then + if !target.starts_with("./") { + // 1. If isImports is false, or if target starts with "../" or "/", or if target is a valid URL, then + if !is_imports || target.starts_with("../") || target.starts_with('/') { + // 1. Throw an Invalid Package Target error. + return Err(ResolveError::InvalidPackageTarget( + target.to_string(), + target_key.to_string(), + package_url.join("package.json"), + )); + } + // 2. If patternMatch is a String, then + // 1. Return PACKAGE_RESOLVE(target with every instance of "*" replaced by patternMatch, packageURL + "/"). + let target = normalize_string_target( + target_key, + target, + pattern_match, + package_url, + )?; + let package_url = self.cache.value(package_url); + // // 3. Return PACKAGE_RESOLVE(target, packageURL + "/"). + return self.package_resolve(&package_url, &target, ctx).await; + } + + // 2. If target split on "/" or "\" contains any "", ".", "..", or "node_modules" segments after the first "." segment, case insensitive and including percent encoded variants, throw an Invalid Package Target error. + // 3. Let resolvedTarget be the URL resolution of the concatenation of packageURL and target. + // 4. Assert: resolvedTarget is contained in packageURL. + // 5. If patternMatch is null, then + let target = + normalize_string_target(target_key, target, pattern_match, package_url)?; + if Path::new(target.as_ref()).is_invalid_exports_target() { return Err(ResolveError::InvalidPackageTarget( target.to_string(), target_key.to_string(), package_url.join("package.json"), )); } - // 2. If patternMatch is a String, then - // 1. Return PACKAGE_RESOLVE(target with every instance of "*" replaced by patternMatch, packageURL + "/"). - let target = - normalize_string_target(target_key, target, pattern_match, package_url)?; - let package_url = self.cache.value(package_url); - // // 3. Return PACKAGE_RESOLVE(target, packageURL + "/"). - return self.package_resolve(&package_url, &target, ctx); - } - - // 2. If target split on "/" or "\" contains any "", ".", "..", or "node_modules" segments after the first "." segment, case insensitive and including percent encoded variants, throw an Invalid Package Target error. - // 3. Let resolvedTarget be the URL resolution of the concatenation of packageURL and target. - // 4. Assert: resolvedTarget is contained in packageURL. - // 5. If patternMatch is null, then - let target = - normalize_string_target(target_key, target, pattern_match, package_url)?; - if Path::new(target.as_ref()).is_invalid_exports_target() { - return Err(ResolveError::InvalidPackageTarget( - target.to_string(), - target_key.to_string(), - package_url.join("package.json"), - )); + let resolved_target = package_url.normalize_with(target.as_ref()); + // 6. If patternMatch split on "/" or "\" contains any "", ".", "..", or "node_modules" segments, case insensitive and including percent encoded variants, throw an Invalid Module Specifier error. + // 7. Return the URL resolution of resolvedTarget with every instance of "*" replaced with patternMatch. + let value = self.cache.value(&resolved_target); + return Ok(Some(value)); } - let resolved_target = package_url.normalize_with(target.as_ref()); - // 6. If patternMatch split on "/" or "\" contains any "", ".", "..", or "node_modules" segments, case insensitive and including percent encoded variants, throw an Invalid Module Specifier error. - // 7. Return the URL resolution of resolvedTarget with every instance of "*" replaced with patternMatch. - let value = self.cache.value(&resolved_target); - return Ok(Some(value)); - } - // 2. Otherwise, if target is a non-null Object, then - JSONValue::Object(target) => { - // 1. If exports contains any index property keys, as defined in ECMA-262 6.1.7 Array Index, throw an Invalid Package Configuration error. - // 2. For each property p of target, in object insertion order as, - for (key, target_value) in target { - // 1. If p equals "default" or conditions contains an entry for p, then - if key == "default" || conditions.contains(key) { - // 1. Let targetValue be the value of the p property in target. - // 2. Let resolved be the result of PACKAGE_TARGET_RESOLVE( packageURL, targetValue, patternMatch, isImports, conditions). - let resolved = self.package_target_resolve( - package_url, - target_key, - target_value, - pattern_match, - is_imports, - conditions, - ctx, - ); - // 3. If resolved is equal to undefined, continue the loop. - if let Some(path) = resolved? { - // 4. Return resolved. - return Ok(Some(path)); + // 2. Otherwise, if target is a non-null Object, then + JSONValue::Object(target) => { + // 1. If exports contains any index property keys, as defined in ECMA-262 6.1.7 Array Index, throw an Invalid Package Configuration error. + // 2. For each property p of target, in object insertion order as, + for (key, target_value) in target { + // 1. If p equals "default" or conditions contains an entry for p, then + if key == "default" || conditions.contains(key) { + // 1. Let targetValue be the value of the p property in target. + // 2. Let resolved be the result of PACKAGE_TARGET_RESOLVE( packageURL, targetValue, patternMatch, isImports, conditions). + let resolved = self + .package_target_resolve( + package_url, + target_key, + target_value, + pattern_match, + is_imports, + conditions, + ctx, + ) + .await; + // 3. If resolved is equal to undefined, continue the loop. + if let Some(path) = resolved? { + // 4. Return resolved. + return Ok(Some(path)); + } } } + // 3. Return undefined. + return Ok(None); } - // 3. Return undefined. - return Ok(None); - } - // 3. Otherwise, if target is an Array, then - JSONValue::Array(targets) => { - // 1. If _target.length is zero, return null. - if targets.is_empty() { - // Note: return PackagePathNotExported has the same effect as return because there are no matches. - return Err(ResolveError::PackagePathNotExported( - pattern_match.unwrap_or(".").to_string(), - package_url.join("package.json"), - )); - } - // 2. For each item targetValue in target, do - for (i, target_value) in targets.iter().enumerate() { - // 1. Let resolved be the result of PACKAGE_TARGET_RESOLVE( packageURL, targetValue, patternMatch, isImports, conditions), continuing the loop on any Invalid Package Target error. - let resolved = self.package_target_resolve( - package_url, - target_key, - target_value, - pattern_match, - is_imports, - conditions, - ctx, - ); - - if resolved.is_err() && i == targets.len() { - return resolved; + // 3. Otherwise, if target is an Array, then + JSONValue::Array(targets) => { + // 1. If _target.length is zero, return null. + if targets.is_empty() { + // Note: return PackagePathNotExported has the same effect as return because there are no matches. + return Err(ResolveError::PackagePathNotExported( + pattern_match.unwrap_or(".").to_string(), + package_url.join("package.json"), + )); } + // 2. For each item targetValue in target, do + for (i, target_value) in targets.iter().enumerate() { + // 1. Let resolved be the result of PACKAGE_TARGET_RESOLVE( packageURL, targetValue, patternMatch, isImports, conditions), continuing the loop on any Invalid Package Target error. + let resolved = self + .package_target_resolve( + package_url, + target_key, + target_value, + pattern_match, + is_imports, + conditions, + ctx, + ) + .await; - // 2. If resolved is undefined, continue the loop. - if let Ok(Some(path)) = resolved { - // 3. Return resolved. - return Ok(Some(path)); + if resolved.is_err() && i == targets.len() { + return resolved; + } + + // 2. If resolved is undefined, continue the loop. + if let Ok(Some(path)) = resolved { + // 3. Return resolved. + return Ok(Some(path)); + } } + // 3. Return or throw the last fallback resolution null return or error. + // Note: see `resolved.is_err() && i == targets.len()` } - // 3. Return or throw the last fallback resolution null return or error. - // Note: see `resolved.is_err() && i == targets.len()` + _ => {} } - _ => {} - } - // 4. Otherwise, if target is null, return null. - Ok(None) - // 5. Otherwise throw an Invalid Package Target error. + // 4. Otherwise, if target is null, return null. + Ok(None) + // 5. Otherwise throw an Invalid Package Target error. + }; + Box::pin(fut) } // Returns (module, subpath) diff --git a/src/package_json.rs b/src/package_json.rs index 7e05d202..0ba12a1b 100644 --- a/src/package_json.rs +++ b/src/package_json.rs @@ -126,7 +126,7 @@ impl PackageJson { pub(crate) fn main_fields<'a>( &'a self, main_fields: &'a [String], - ) -> impl Iterator + '_ { + ) -> impl Iterator + 'a { main_fields .iter() .filter_map(|main_field| self.raw_json.get(main_field)) @@ -139,7 +139,7 @@ impl PackageJson { pub(crate) fn exports_fields<'a>( &'a self, exports_fields: &'a [Vec], - ) -> impl Iterator + '_ { + ) -> impl Iterator + 'a { exports_fields.iter().filter_map(|object_path| { self.raw_json .as_object() @@ -153,7 +153,7 @@ impl PackageJson { pub(crate) fn imports_fields<'a>( &'a self, imports_fields: &'a [Vec], - ) -> impl Iterator + '_ { + ) -> impl Iterator + 'a { imports_fields.iter().filter_map(|object_path| { self.raw_json .as_object() @@ -169,7 +169,7 @@ impl PackageJson { fn browser_fields<'a>( &'a self, alias_fields: &'a [Vec], - ) -> impl Iterator + '_ { + ) -> impl Iterator + 'a { alias_fields.iter().filter_map(|object_path| { self.raw_json .as_object() @@ -190,7 +190,7 @@ impl PackageJson { path: &Path, request: Option<&str>, alias_fields: &'a [Vec], - ) -> Result, ResolveError> { + ) -> Result, ResolveError> { for object in self.browser_fields(alias_fields) { if let Some(request) = request { if let Some(value) = object.get(request) { diff --git a/src/path.rs b/src/path.rs index f66fdd76..38b980cd 100644 --- a/src/path.rs +++ b/src/path.rs @@ -101,8 +101,8 @@ impl PathUtil for Path { } // https://github.com/webpack/enhanced-resolve/blob/main/test/path.test.js -#[test] -fn is_invalid_exports_target() { +#[tokio::test] +async fn is_invalid_exports_target() { let test_cases = [ "../a.js", "../", @@ -123,8 +123,8 @@ fn is_invalid_exports_target() { assert!(!Path::new("/").is_invalid_exports_target()); } -#[test] -fn normalize() { +#[tokio::test] +async fn normalize() { assert_eq!(Path::new("/foo/.././foo/").normalize(), Path::new("/foo")); assert_eq!(Path::new("C://").normalize(), Path::new("C://")); assert_eq!(Path::new("C:").normalize(), Path::new("C:")); diff --git a/src/resolution.rs b/src/resolution.rs index a3aaa2e7..e0f7dc89 100644 --- a/src/resolution.rs +++ b/src/resolution.rs @@ -76,8 +76,8 @@ impl Resolution { } } -#[test] -fn test() { +#[tokio::test] +async fn test() { let resolution = Resolution { path: PathBuf::from("foo"), query: Some("?query".to_string()), diff --git a/src/tests/alias.rs b/src/tests/alias.rs index 2fa8f903..e3b53b0d 100644 --- a/src/tests/alias.rs +++ b/src/tests/alias.rs @@ -5,9 +5,9 @@ use std::path::Path; use crate::{AliasValue, Resolution, ResolveContext, ResolveError, ResolveOptions, Resolver}; -#[test] +#[tokio::test] #[cfg(not(target_os = "windows"))] // MemoryFS's path separator is always `/` so the test will not pass in windows. -fn alias() { +async fn alias() { use super::memory_fs::MemoryFS; use crate::ResolverGeneric; use std::path::{Path, PathBuf}; @@ -106,7 +106,7 @@ fn alias() { ]; for (comment, request, expected) in pass { - let resolved_path = resolver.resolve(f, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(f, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(PathBuf::from(expected)), "{comment} {request}"); } @@ -116,14 +116,14 @@ fn alias() { ]; for (comment, request, expected) in ignore { - let resolution = resolver.resolve(f, request); + let resolution = resolver.resolve(f, request).await; assert_eq!(resolution, Err(expected), "{comment} {request}"); } } // Not part of enhanced-resolve -#[test] -fn infinite_recursion() { +#[tokio::test] +async fn infinite_recursion() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { alias: vec![ @@ -132,7 +132,7 @@ fn infinite_recursion() { ], ..ResolveOptions::default() }); - let resolution = resolver.resolve(f, "./a"); + let resolution = resolver.resolve(f, "./a").await; assert_eq!(resolution, Err(ResolveError::Recursion)); } @@ -150,20 +150,20 @@ fn check_slash(path: &Path) { } } -#[test] -fn absolute_path() { +#[tokio::test] +async fn absolute_path() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { alias: vec![(f.join("foo").to_str().unwrap().to_string(), vec![AliasValue::Ignore])], modules: vec![f.clone().to_str().unwrap().to_string()], ..ResolveOptions::default() }); - let resolution = resolver.resolve(&f, "foo/index"); + let resolution = resolver.resolve(&f, "foo/index").await; assert_eq!(resolution, Err(ResolveError::Ignored(f.join("foo")))); } -#[test] -fn system_path() { +#[tokio::test] +async fn system_path() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { alias: vec![("@app".into(), vec![AliasValue::from(f.join("alias").to_string_lossy())])], @@ -173,14 +173,14 @@ fn system_path() { let specifiers = ["@app/files/a", "@app/files/a.js"]; for specifier in specifiers { - let path = resolver.resolve(&f, specifier).map(Resolution::into_path_buf).unwrap(); + let path = resolver.resolve(&f, specifier).await.map(Resolution::into_path_buf).unwrap(); assert_eq!(path, f.join("alias/files/a.js")); check_slash(&path); } } -#[test] -fn alias_is_full_path() { +#[tokio::test] +async fn alias_is_full_path() { let f = super::fixture(); let dir = f.join("foo"); let dir_str = dir.to_string_lossy().to_string(); @@ -203,7 +203,7 @@ fn alias_is_full_path() { for specifier in specifiers { let resolution = resolver.resolve_with_context(&f, &specifier, &mut ctx); - assert_eq!(resolution.map(|r| r.full_path()), Ok(dir.join("index.js"))); + assert_eq!(resolution.await.map(|r| r.full_path()), Ok(dir.join("index.js"))); } for path in ctx.file_dependencies { @@ -221,8 +221,8 @@ fn alias_is_full_path() { } // For the `should_stop` variable in `load_alias` -#[test] -fn all_alias_values_are_not_found() { +#[tokio::test] +async fn all_alias_values_are_not_found() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { alias: vec![( @@ -231,15 +231,15 @@ fn all_alias_values_are_not_found() { )], ..ResolveOptions::default() }); - let resolution = resolver.resolve(&f, "m1/a.js"); + let resolution = resolver.resolve(&f, "m1/a.js").await; assert_eq!( resolution, Err(ResolveError::MatchedAliasNotFound("m1/a.js".to_string(), "m1".to_string(),)) ); } -#[test] -fn alias_fragment() { +#[tokio::test] +async fn alias_fragment() { let f = super::fixture(); let data = [ @@ -264,13 +264,13 @@ fn alias_fragment() { alias: vec![("foo".to_string(), vec![AliasValue::Path(request.to_string())])], ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "foo").map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, "foo").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {request}"); } } -#[test] -fn alias_try_fragment_as_path() { +#[tokio::test] +async fn alias_try_fragment_as_path() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { alias: vec![( @@ -279,6 +279,6 @@ fn alias_try_fragment_as_path() { )], ..ResolveOptions::default() }); - let resolution = resolver.resolve(&f, "#/a").map(|r| r.full_path()); + let resolution = resolver.resolve(&f, "#/a").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(f.join("#").join("a.js"))); } diff --git a/src/tests/browser_field.rs b/src/tests/browser_field.rs index 171ee099..7f547609 100644 --- a/src/tests/browser_field.rs +++ b/src/tests/browser_field.rs @@ -2,8 +2,8 @@ use crate::{AliasValue, ResolveError, ResolveOptions, Resolver}; -#[test] -fn ignore() { +#[tokio::test] +async fn ignore() { let f = super::fixture().join("browser-module"); let resolver = Resolver::new(ResolveOptions { @@ -25,33 +25,33 @@ fn ignore() { ]; for (path, request, expected) in data { - let resolution = resolver.resolve(&path, request); + let resolution = resolver.resolve(&path, request).await; let expected = ResolveError::Ignored(expected); assert_eq!(resolution, Err(expected), "{path:?} {request}"); } } -#[test] -fn shared_resolvers() { +#[tokio::test] +async fn shared_resolvers() { let f = super::fixture().join("browser-module"); let resolver1 = Resolver::new(ResolveOptions { alias_fields: vec![vec!["innerBrowser1".into(), "field".into(), "browser".into()]], ..ResolveOptions::default() }); - let resolved_path = resolver1.resolve(&f, "./lib/main1.js").map(|r| r.full_path()); + let resolved_path = resolver1.resolve(&f, "./lib/main1.js").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f.join("lib/main.js"))); let resolver2 = resolver1.clone_with_options(ResolveOptions { alias_fields: vec![vec!["innerBrowser2".into(), "browser".into()]], ..ResolveOptions::default() }); - let resolved_path = resolver2.resolve(&f, "./lib/main2.js").map(|r| r.full_path()); + let resolved_path = resolver2.resolve(&f, "./lib/main2.js").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f.join("./lib/replaced.js"))); } -#[test] -fn replace_file() { +#[tokio::test] +async fn replace_file() { let f = super::fixture().join("browser-module"); let resolver = Resolver::new(ResolveOptions { @@ -86,13 +86,13 @@ fn replace_file() { ]; for (comment, path, request, expected) in data { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } } -#[test] -fn recurse_fail() { +#[tokio::test] +async fn recurse_fail() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { @@ -108,13 +108,13 @@ fn recurse_fail() { ]; for (comment, path, request, expected) in data { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Err(expected), "{comment} {path:?} {request}"); } } -#[test] -fn broken() { +#[tokio::test] +async fn broken() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { @@ -130,13 +130,13 @@ fn broken() { ]; for (path, request, expected) in data { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, expected, "{path:?} {request}"); } } -#[test] -fn crypto_js() { +#[tokio::test] +async fn crypto_js() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { @@ -148,13 +148,14 @@ fn crypto_js() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(f.join("crypto-js"), "crypto").map(|r| r.full_path()); + let resolved_path = + resolver.resolve(f.join("crypto-js"), "crypto").await.map(|r| r.full_path()); assert_eq!(resolved_path, Err(ResolveError::Ignored(f.join("crypto-js")))); } // https://github.com/webpack/webpack/blob/87660921808566ef3b8796f8df61bd79fc026108/test/cases/resolving/browser-field/index.js#L40-L43 -#[test] -fn recursive() { +#[tokio::test] +async fn recursive() { let f = super::fixture().join("browser-module"); let resolver = Resolver::new(ResolveOptions { @@ -170,13 +171,13 @@ fn recursive() { ]; for (comment, path, request) in data { - let resolved_path = resolver.resolve(&path, request); + let resolved_path = resolver.resolve(&path, request).await; assert_eq!(resolved_path, Err(ResolveError::Recursion), "{comment} {path:?} {request}"); } } -#[test] -fn with_query() { +#[tokio::test] +async fn with_query() { let f = super::fixture().join("browser-module"); let resolver = Resolver::new(ResolveOptions { @@ -184,6 +185,6 @@ fn with_query() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "./foo").map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, "./foo").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f.join("lib").join("browser.js?query"))); } diff --git a/src/tests/builtins.rs b/src/tests/builtins.rs index 5d6d14d9..7fe24c38 100644 --- a/src/tests/builtins.rs +++ b/src/tests/builtins.rs @@ -2,16 +2,16 @@ use std::path::Path; use crate::{ResolveError, ResolveOptions, Resolver}; -#[test] -fn builtins_off() { +#[tokio::test] +async fn builtins_off() { let f = Path::new("/"); let resolver = Resolver::default(); - let resolved_path = resolver.resolve(f, "zlib").map(|r| r.full_path()); + let resolved_path = resolver.resolve(f, "zlib").await.map(|r| r.full_path()); assert_eq!(resolved_path, Err(ResolveError::NotFound("zlib".into()))); } -#[test] -fn builtins() { +#[tokio::test] +async fn builtins() { let f = Path::new("/"); let resolver = Resolver::new(ResolveOptions::default().with_builtin_modules(true)); @@ -87,25 +87,25 @@ fn builtins() { for request in pass { let prefixed_request = format!("node:{request}"); for request in [prefixed_request.clone(), request.to_string()] { - let resolved_path = resolver.resolve(f, &request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(f, &request).await.map(|r| r.full_path()); let err = ResolveError::Builtin(prefixed_request.clone()); assert_eq!(resolved_path, Err(err), "{request}"); } } } -#[test] -fn fail() { +#[tokio::test] +async fn fail() { let f = Path::new("/"); let resolver = Resolver::new(ResolveOptions::default().with_builtin_modules(true)); let request = "xxx"; - let resolved_path = resolver.resolve(f, request); + let resolved_path = resolver.resolve(f, request).await; let err = ResolveError::NotFound(request.to_string()); assert_eq!(resolved_path, Err(err), "{request}"); } -#[test] -fn imports() { +#[tokio::test] +async fn imports() { let f = super::fixture().join("builtins"); let resolver = Resolver::new(ResolveOptions { builtin_modules: true, @@ -114,7 +114,7 @@ fn imports() { }); for request in ["#fs", "#http"] { - let resolved_path = resolver.resolve(f.clone(), request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(f.clone(), request).await.map(|r| r.full_path()); let err = ResolveError::Builtin(format!("node:{}", request.trim_start_matches('#'))); assert_eq!(resolved_path, Err(err)); } diff --git a/src/tests/dependencies.rs b/src/tests/dependencies.rs index bcc2355b..b92fad12 100644 --- a/src/tests/dependencies.rs +++ b/src/tests/dependencies.rs @@ -18,8 +18,8 @@ mod windows { ]) } - #[test] - fn test() { + #[tokio::test] + async fn test() { let file_system = file_system(); let resolver = ResolverGeneric::::new_with_file_system( @@ -97,7 +97,7 @@ mod windows { let mut ctx = ResolveContext::default(); let path = PathBuf::from(context); let resolved = - resolver.resolve_with_context(path, request, &mut ctx).map(|r| r.full_path()); + resolver.resolve_with_context(path, request, &mut ctx).await.map(|r| r.full_path()); assert_eq!(resolved, Ok(PathBuf::from(result))); let file_dependencies = FxHashSet::from_iter(file_dependencies.iter().map(PathBuf::from)); diff --git a/src/tests/exports_field.rs b/src/tests/exports_field.rs index 78e770c4..253b351d 100644 --- a/src/tests/exports_field.rs +++ b/src/tests/exports_field.rs @@ -6,8 +6,8 @@ use crate::{Ctx, PathUtil, ResolveError, ResolveOptions, Resolver}; use serde_json::json; use std::path::Path; -#[test] -fn test_simple() { +#[tokio::test] +async fn test_simple() { let f = super::fixture().join("exports-field"); let f2 = super::fixture().join("exports-field2"); let f4 = super::fixture().join("exports-field-error"); @@ -54,7 +54,7 @@ fn test_simple() { // * should log the correct info for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } @@ -81,14 +81,14 @@ fn test_simple() { ]; for (comment, path, request, error) in fail { - let resolution = resolver.resolve(&path, request); + let resolution = resolver.resolve(&path, request).await; assert_eq!(resolution, Err(error), "{comment} {path:?} {request}"); } } // resolve using exports field, not a browser field #1 -#[test] -fn exports_not_browser_field1() { +#[tokio::test] +async fn exports_not_browser_field1() { let f = super::fixture().join("exports-field"); let resolver = Resolver::new(ResolveOptions { @@ -98,13 +98,14 @@ fn exports_not_browser_field1() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "exports-field/dist/main.js").map(|r| r.full_path()); + let resolved_path = + resolver.resolve(&f, "exports-field/dist/main.js").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f.join("node_modules/exports-field/lib/lib2/main.js"))); } // resolve using exports field and a browser alias field #2 -#[test] -fn exports_not_browser_field2() { +#[tokio::test] +async fn exports_not_browser_field2() { let f2 = super::fixture().join("exports-field2"); let resolver = Resolver::new(ResolveOptions { @@ -114,13 +115,14 @@ fn exports_not_browser_field2() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f2, "exports-field/dist/main.js").map(|r| r.full_path()); + let resolved_path = + resolver.resolve(&f2, "exports-field/dist/main.js").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f2.join("node_modules/exports-field/lib/browser.js"))); } // should resolve extension without fullySpecified -#[test] -fn extension_without_fully_specified() { +#[tokio::test] +async fn extension_without_fully_specified() { let f2 = super::fixture().join("exports-field2"); let commonjs_resolver = Resolver::new(ResolveOptions { @@ -130,12 +132,12 @@ fn extension_without_fully_specified() { }); let resolved_path = - commonjs_resolver.resolve(&f2, "exports-field/dist/main").map(|r| r.full_path()); + commonjs_resolver.resolve(&f2, "exports-field/dist/main").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f2.join("node_modules/exports-field/lib/lib2/main.js"))); } -#[test] -fn field_name_path() { +#[tokio::test] +async fn field_name_path() { let f2 = super::fixture().join("exports-field2"); let f3 = super::fixture().join("exports-field3"); @@ -153,7 +155,7 @@ fn field_name_path() { extensions: vec![".js".into()], ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f3, "exports-field").map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f3, "exports-field").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f3.join("node_modules/exports-field/main.js"))); } @@ -164,7 +166,7 @@ fn field_name_path() { extensions: vec![".js".into()], ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f2, "exports-field").map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f2, "exports-field").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f2.join("node_modules/exports-field/index.js"))); // field name path #5 @@ -174,7 +176,7 @@ fn field_name_path() { extensions: vec![".js".into()], ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f3, "exports-field").map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f3, "exports-field").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f3.join("node_modules/exports-field/index"))); // non-compliant export targeting a directory @@ -183,12 +185,12 @@ fn field_name_path() { extensions: vec![".js".into()], ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f3, "exports-field").map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f3, "exports-field").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f3.join("node_modules/exports-field/src/index.js"))); } -#[test] -fn shared_resolvers() { +#[tokio::test] +async fn shared_resolvers() { let f3 = super::fixture().join("exports-field3"); let resolver1 = Resolver::new(ResolveOptions { @@ -196,7 +198,7 @@ fn shared_resolvers() { extensions: vec![".js".into()], ..ResolveOptions::default() }); - let resolved_path = resolver1.resolve(&f3, "exports-field").map(|r| r.full_path()); + let resolved_path = resolver1.resolve(&f3, "exports-field").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f3.join("node_modules/exports-field/main.js"))); let resolver2 = resolver1.clone_with_options(ResolveOptions { @@ -204,12 +206,12 @@ fn shared_resolvers() { extensions: vec![".js".into()], ..ResolveOptions::default() }); - let resolved_path = resolver2.resolve(&f3, "exports-field").map(|r| r.full_path()); + let resolved_path = resolver2.resolve(&f3, "exports-field").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f3.join("node_modules/exports-field/index"))); } -#[test] -fn extension_alias_1_2() { +#[tokio::test] +async fn extension_alias_1_2() { let f = super::fixture().join("exports-field-and-extension-alias"); let resolver = Resolver::new(ResolveOptions { @@ -227,13 +229,13 @@ fn extension_alias_1_2() { ]; for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } } -#[test] -fn extension_alias_3() { +#[tokio::test] +async fn extension_alias_3() { let f = super::fixture().join("exports-field-and-extension-alias"); let resolver = Resolver::new(ResolveOptions { @@ -253,13 +255,13 @@ fn extension_alias_3() { ]; for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } } -#[test] -fn extension_alias_throw_error() { +#[tokio::test] +async fn extension_alias_throw_error() { let f = super::fixture().join("exports-field-and-extension-alias"); let resolver = Resolver::new(ResolveOptions { @@ -280,7 +282,7 @@ fn extension_alias_throw_error() { ]; for (comment, path, request, error) in fail { - let resolution = resolver.resolve(&path, request); + let resolution = resolver.resolve(&path, request).await; assert_eq!(resolution, Err(error), "{comment} {path:?} {request}"); } } @@ -312,8 +314,8 @@ fn exports_field(value: serde_json::Value) -> serde_json::Value { value } -#[test] -fn test_cases() { +#[tokio::test] +async fn test_cases() { let test_cases = [ TestCase { name: "sample #1", @@ -2530,6 +2532,7 @@ fn test_cases() { &case.exports_field, &mut Ctx::default(), ) + .await .map(|p| p.map(|p| p.to_path_buf())); if let Some(expect) = case.expect { if expect.is_empty() { diff --git a/src/tests/extension_alias.rs b/src/tests/extension_alias.rs index cf4fd362..926a8df5 100644 --- a/src/tests/extension_alias.rs +++ b/src/tests/extension_alias.rs @@ -2,8 +2,8 @@ use crate::{ResolveError, ResolveOptions, Resolver}; -#[test] -fn extension_alias() { +#[tokio::test] +async fn extension_alias() { let f = super::fixture().join("extension-alias"); let resolver = Resolver::new(ResolveOptions { @@ -25,19 +25,19 @@ fn extension_alias() { ]; for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } // should not allow to fallback to the original extension or add extensions - let resolution = resolver.resolve(&f, "./index.mjs").unwrap_err(); + let resolution = resolver.resolve(&f, "./index.mjs").await.unwrap_err(); let expected = ResolveError::ExtensionAlias("index.mjs".into(), "index.mts".into(), f); assert_eq!(resolution, expected); } // should not apply extension alias to extensions or mainFiles field -#[test] -fn not_apply_to_extension_nor_main_files() { +#[tokio::test] +async fn not_apply_to_extension_nor_main_files() { let f = super::fixture().join("extension-alias"); let resolver = Resolver::new(ResolveOptions { @@ -54,7 +54,7 @@ fn not_apply_to_extension_nor_main_files() { ]; for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); let expected = f.join(expected); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } diff --git a/src/tests/extensions.rs b/src/tests/extensions.rs index 1ebd86f4..dacf17e8 100644 --- a/src/tests/extensions.rs +++ b/src/tests/extensions.rs @@ -3,8 +3,8 @@ use crate::{EnforceExtension, Resolution, ResolveContext, ResolveError, ResolveOptions, Resolver}; use rustc_hash::FxHashSet; -#[test] -fn extensions() { +#[tokio::test] +async fn extensions() { let f = super::fixture().join("extensions"); let resolver = Resolver::new(ResolveOptions { @@ -23,7 +23,7 @@ fn extensions() { ]; for (comment, request, expected_path) in pass { - let resolved_path = resolver.resolve(&f, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, request).await.map(|r| r.full_path()); let expected = f.join(expected_path); assert_eq!(resolved_path, Ok(expected), "{comment} {request} {expected_path}"); } @@ -34,15 +34,15 @@ fn extensions() { ]; for (comment, request, expected_error) in fail { - let resolution = resolver.resolve(&f, request); + let resolution = resolver.resolve(&f, request).await; let error = ResolveError::NotFound(expected_error); assert_eq!(resolution, Err(error), "{comment} {request} {resolution:?}"); } } // should default enforceExtension to true when extensions includes an empty string -#[test] -fn default_enforce_extension() { +#[tokio::test] +async fn default_enforce_extension() { let f = super::fixture().join("extensions"); let mut ctx = ResolveContext::default(); @@ -50,7 +50,8 @@ fn default_enforce_extension() { extensions: vec![".ts".into(), String::new(), ".js".into()], ..ResolveOptions::default() }) - .resolve_with_context(&f, "./foo", &mut ctx); + .resolve_with_context(&f, "./foo", &mut ctx) + .await; assert_eq!(resolved.map(Resolution::into_path_buf), Ok(f.join("foo.ts"))); assert_eq!( @@ -61,8 +62,8 @@ fn default_enforce_extension() { } // should respect enforceExtension when extensions includes an empty string -#[test] -fn respect_enforce_extension() { +#[tokio::test] +async fn respect_enforce_extension() { let f = super::fixture().join("extensions"); let mut ctx = ResolveContext::default(); @@ -71,7 +72,8 @@ fn respect_enforce_extension() { extensions: vec![".ts".into(), String::new(), ".js".into()], ..ResolveOptions::default() }) - .resolve_with_context(&f, "./foo", &mut ctx); + .resolve_with_context(&f, "./foo", &mut ctx) + .await; assert_eq!(resolved.map(Resolution::into_path_buf), Ok(f.join("foo.ts"))); assert_eq!( @@ -81,8 +83,8 @@ fn respect_enforce_extension() { assert_eq!(ctx.missing_dependencies, FxHashSet::from_iter([f.join("foo")])); } -#[test] -fn multi_dot_extension() { +#[tokio::test] +async fn multi_dot_extension() { let f = super::fixture().join("extensions"); let resolver = Resolver::new(ResolveOptions { @@ -98,7 +100,7 @@ fn multi_dot_extension() { ]; for (comment, request, expected_path) in pass { - let resolved_path = resolver.resolve(&f, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, request).await.map(|r| r.full_path()); let expected = f.join(expected_path); assert_eq!(resolved_path, Ok(expected), "{comment} {request} {expected_path}"); } @@ -109,7 +111,7 @@ fn multi_dot_extension() { ]; for (comment, request, expected_error) in fail { - let resolution = resolver.resolve(&f, request); + let resolution = resolver.resolve(&f, request).await; let error = ResolveError::NotFound(expected_error); assert_eq!(resolution, Err(error), "{comment} {request} {resolution:?}"); } diff --git a/src/tests/fallback.rs b/src/tests/fallback.rs index 3da2d767..fce04888 100644 --- a/src/tests/fallback.rs +++ b/src/tests/fallback.rs @@ -1,8 +1,8 @@ //! https://github.com/webpack/enhanced-resolve/blob/main/test/fallback.test.js -#[test] +#[tokio::test] #[cfg(not(target_os = "windows"))] // MemoryFS's path separator is always `/` so the test will not pass in windows. -fn fallback() { +async fn fallback() { use super::memory_fs::MemoryFS; use crate::{AliasValue, ResolveError, ResolveOptions, ResolverGeneric}; use std::path::{Path, PathBuf}; @@ -83,7 +83,7 @@ fn fallback() { ]; for (comment, request, expected) in pass { - let resolved_path = resolver.resolve(f, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(f, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(PathBuf::from(expected)), "{comment} {request}"); } @@ -94,7 +94,7 @@ fn fallback() { ]; for (comment, request, expected) in ignore { - let resolution = resolver.resolve(f, request); + let resolution = resolver.resolve(f, request).await; assert_eq!(resolution, Err(expected), "{comment} {request}"); } } diff --git a/src/tests/full_specified.rs b/src/tests/full_specified.rs index a70c0191..3838bd5c 100644 --- a/src/tests/full_specified.rs +++ b/src/tests/full_specified.rs @@ -25,8 +25,8 @@ mod windows { ]) } - #[test] - fn test() { + #[tokio::test] + async fn test() { let file_system = file_system(); let resolver = ResolverGeneric::::new_with_file_system( @@ -53,7 +53,7 @@ mod windows { ]; for (comment, request) in failing_resolves { - let resolution = resolver.resolve("/a", request); + let resolution = resolver.resolve("/a", request).await; assert!(resolution.is_err(), "{comment} {request}"); } @@ -70,14 +70,14 @@ mod windows { ]; for (comment, request, expected) in successful_resolves { - let resolution = resolver.resolve("/a", request).map(|r| r.full_path()); + let resolution = resolver.resolve("/a", request).await.map(|r| r.full_path()); assert_eq!(resolution, Ok(PathBuf::from(expected)), "{comment} {request}"); } } - #[test] + #[tokio::test] #[cfg(not(target_os = "windows"))] // MemoryFS's path separator is always `/` so the test will not pass in windows. - fn resolve_to_context() { + async fn resolve_to_context() { let file_system = file_system(); let resolver = ResolverGeneric::::new_with_file_system( @@ -106,7 +106,7 @@ mod windows { ]; for (comment, request, expected) in successful_resolves { - let resolution = resolver.resolve("/a", request).map(|r| r.full_path()); + let resolution = resolver.resolve("/a", request).await.map(|r| r.full_path()); assert_eq!(resolution, Ok(PathBuf::from(expected)), "{comment} {request}"); } } diff --git a/src/tests/imports_field.rs b/src/tests/imports_field.rs index c928627a..6cacd203 100644 --- a/src/tests/imports_field.rs +++ b/src/tests/imports_field.rs @@ -7,8 +7,8 @@ use serde_json::json; use crate::{Ctx, JSONMap, PathUtil, ResolveError, ResolveOptions, Resolver}; use std::path::Path; -#[test] -fn test_simple() { +#[tokio::test] +async fn test_simple() { let f = super::fixture().join("imports-field"); let f2 = super::fixture().join("imports-exports-wildcard/node_modules/m/"); @@ -30,7 +30,7 @@ fn test_simple() { ]; for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } @@ -45,13 +45,13 @@ fn test_simple() { ]; for (comment, path, request, error) in fail { - let resolution = resolver.resolve(&path, request); + let resolution = resolver.resolve(&path, request).await; assert_eq!(resolution, Err(error), "{comment} {path:?} {request}"); } } -#[test] -fn shared_resolvers() { +#[tokio::test] +async fn shared_resolvers() { let f = super::fixture().join("imports-field"); // field name #1 @@ -63,7 +63,7 @@ fn shared_resolvers() { ..ResolveOptions::default() }); - let resolved_path = resolver1.resolve(&f, "#imports-field").map(|r| r.full_path()); + let resolved_path = resolver1.resolve(&f, "#imports-field").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f.join("b.js"))); // field name #2 @@ -72,7 +72,7 @@ fn shared_resolvers() { ..ResolveOptions::default() }); - let resolved_path = resolver2.resolve(&f, "#b").map(|r| r.full_path()); + let resolved_path = resolver2.resolve(&f, "#b").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f.join("a.js"))); } @@ -105,8 +105,8 @@ fn imports_field(value: serde_json::Value) -> JSONMap { serde_json::from_str(&s).unwrap() } -#[test] -fn test_cases() { +#[tokio::test] +async fn test_cases() { let test_cases = [ TestCase { name: "sample #1", @@ -1304,6 +1304,7 @@ fn test_cases() { &case.condition_names.iter().map(ToString::to_string).collect::>(), &mut Ctx::default(), ) + .await .map(|p| p.map(|p| p.to_path_buf())); if let Some(expect) = case.expect { if expect.is_empty() { diff --git a/src/tests/incorrect_description_file.rs b/src/tests/incorrect_description_file.rs index af8678c3..9f0a50d6 100644 --- a/src/tests/incorrect_description_file.rs +++ b/src/tests/incorrect_description_file.rs @@ -5,11 +5,11 @@ use rustc_hash::FxHashSet; use crate::{JSONError, Resolution, ResolveContext, ResolveError, ResolveOptions, Resolver}; // should not resolve main in incorrect description file #1 -#[test] -fn incorrect_description_file_1() { +#[tokio::test] +async fn incorrect_description_file_1() { let f = super::fixture().join("incorrect-package"); let mut ctx = ResolveContext::default(); - let resolution = Resolver::default().resolve_with_context(f.join("pack1"), ".", &mut ctx); + let resolution = Resolver::default().resolve_with_context(f.join("pack1"), ".", &mut ctx).await; let _error = ResolveError::JSON(JSONError { path: f.join("pack1/package.json"), message: String::from("EOF while parsing a value at line 3 column 0"), @@ -27,10 +27,10 @@ fn incorrect_description_file_1() { } // should not resolve main in incorrect description file #2 -#[test] -fn incorrect_description_file_2() { +#[tokio::test] +async fn incorrect_description_file_2() { let f = super::fixture().join("incorrect-package"); - let resolution = Resolver::default().resolve(f.join("pack2"), "."); + let resolution = Resolver::default().resolve(f.join("pack2"), ".").await; let _error = ResolveError::JSON(JSONError { path: f.join("pack2/package.json"), message: String::from("EOF while parsing a value at line 1 column 0"), @@ -42,27 +42,27 @@ fn incorrect_description_file_2() { } // should not resolve main in incorrect description file #3 -#[test] -fn incorrect_description_file_3() { +#[tokio::test] +async fn incorrect_description_file_3() { let f = super::fixture().join("incorrect-package"); - let resolution = Resolver::default().resolve(f.join("pack2"), "."); + let resolution = Resolver::default().resolve(f.join("pack2"), ".").await; assert!(resolution.is_err()); } // `enhanced_resolve` does not have this test case -#[test] -fn no_description_file() { +#[tokio::test] +async fn no_description_file() { let f = super::fixture_root().join("enhanced_resolve"); // has description file let resolver = Resolver::default(); assert_eq!( - resolver.resolve(&f, ".").map(Resolution::into_path_buf), + resolver.resolve(&f, ".").await.map(Resolution::into_path_buf), Ok(f.join("lib/index.js")) ); // without description file let resolver = Resolver::new(ResolveOptions { description_files: vec![], ..ResolveOptions::default() }); - assert_eq!(resolver.resolve(&f, "."), Err(ResolveError::NotFound(".".into()))); + assert_eq!(resolver.resolve(&f, ".").await, Err(ResolveError::NotFound(".".into()))); } diff --git a/src/tests/main_field.rs b/src/tests/main_field.rs index 4afed89e..e8f73908 100644 --- a/src/tests/main_field.rs +++ b/src/tests/main_field.rs @@ -2,8 +2,8 @@ use crate::{ResolveOptions, Resolver}; -#[test] -fn test() { +#[tokio::test] +async fn test() { let f = super::fixture().join("restrictions"); let resolver1 = Resolver::new(ResolveOptions { @@ -11,7 +11,7 @@ fn test() { ..ResolveOptions::default() }); - let resolution = resolver1.resolve(&f, "pck2").map(|r| r.full_path()); + let resolution = resolver1.resolve(&f, "pck2").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(f.join("node_modules/pck2/index.css"))); let resolver2 = resolver1.clone_with_options(ResolveOptions { @@ -19,12 +19,12 @@ fn test() { ..ResolveOptions::default() }); - let resolution = resolver2.resolve(&f, "pck2").map(|r| r.full_path()); + let resolution = resolver2.resolve(&f, "pck2").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(f.join("node_modules/pck2/module.js"))); } -#[test] -fn test_fallback() { +#[tokio::test] +async fn test_fallback() { let f = super::fixture_root().join("invalid"); let resolver1 = Resolver::new(ResolveOptions { @@ -33,6 +33,6 @@ fn test_fallback() { ..ResolveOptions::default() }); - let resolution = resolver1.resolve(&f, "main_field_fallback").map(|r| r.full_path()); + let resolution = resolver1.resolve(&f, "main_field_fallback").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(f.join("node_modules/main_field_fallback/exist.js"))); } diff --git a/src/tests/memory_fs.rs b/src/tests/memory_fs.rs index 492fde97..8868f453 100644 --- a/src/tests/memory_fs.rs +++ b/src/tests/memory_fs.rs @@ -40,9 +40,9 @@ impl MemoryFS { file.write_all(content.as_bytes()).unwrap(); } } - +#[async_trait::async_trait] impl FileSystem for MemoryFS { - fn read_to_string(&self, path: &Path) -> io::Result { + async fn read_to_string(&self, path: &Path) -> io::Result { use vfs::FileSystem; let mut file = self .fs @@ -52,12 +52,12 @@ impl FileSystem for MemoryFS { file.read_to_string(&mut buffer).unwrap(); Ok(buffer) } - fn read(&self, path: &Path) -> io::Result> { - let buf = self.read_to_string(path)?; + async fn read(&self, path: &Path) -> io::Result> { + let buf = self.read_to_string(path).await?; Ok(buf.into_bytes()) } - fn metadata(&self, path: &Path) -> io::Result { + async fn metadata(&self, path: &Path) -> io::Result { use vfs::FileSystem; let metadata = self .fs @@ -68,11 +68,11 @@ impl FileSystem for MemoryFS { Ok(FileMetadata::new(is_file, is_dir, false)) } - fn symlink_metadata(&self, path: &Path) -> io::Result { - self.metadata(path) + async fn symlink_metadata(&self, path: &Path) -> io::Result { + self.metadata(path).await } - fn canonicalize(&self, _path: &Path) -> io::Result { + async fn canonicalize(&self, _path: &Path) -> io::Result { Err(io::Error::new(io::ErrorKind::NotFound, "not a symlink")) } } diff --git a/src/tests/missing.rs b/src/tests/missing.rs index 4bf611ef..f0e40c4f 100644 --- a/src/tests/missing.rs +++ b/src/tests/missing.rs @@ -4,8 +4,8 @@ use normalize_path::NormalizePath; use crate::{AliasValue, ResolveContext, ResolveOptions, Resolver}; -#[test] -fn test() { +#[tokio::test] +async fn test() { let f = super::fixture(); let data = [ @@ -52,7 +52,7 @@ fn test() { for (specifier, missing_dependencies) in data { let mut ctx = ResolveContext::default(); - let _ = resolver.resolve_with_context(&f, specifier, &mut ctx); + let _ = resolver.resolve_with_context(&f, specifier, &mut ctx).await; for path in ctx.file_dependencies { assert_eq!(path, path.normalize(), "{path:?}"); @@ -69,8 +69,8 @@ fn test() { } } -#[test] -fn alias_and_extensions() { +#[tokio::test] +async fn alias_and_extensions() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { diff --git a/src/tests/mod.rs b/src/tests/mod.rs index e347dd1f..6fefe803 100644 --- a/src/tests/mod.rs +++ b/src/tests/mod.rs @@ -34,8 +34,8 @@ pub fn fixture() -> PathBuf { fixture_root().join("enhanced_resolve").join("test").join("fixtures") } -#[test] -fn threaded_environment() { +#[tokio::test] +async fn threaded_environment() { let cwd = env::current_dir().unwrap(); let resolver = Arc::new(Resolver::default()); for _ in 0..2 { diff --git a/src/tests/pnp.rs b/src/tests/pnp.rs index 4c8448a7..e1874544 100644 --- a/src/tests/pnp.rs +++ b/src/tests/pnp.rs @@ -6,8 +6,8 @@ use crate::ResolveError::NotFound; use crate::{ResolveOptions, Resolver}; -#[test] -fn pnp1() { +#[tokio::test] +async fn pnp1() { let fixture = super::fixture_root().join("pnp"); let resolver = Resolver::new(ResolveOptions { @@ -17,14 +17,14 @@ fn pnp1() { }); assert_eq!( - resolver.resolve(&fixture, "is-even").map(|r| r.full_path()), + resolver.resolve(&fixture, "is-even").await.map(|r| r.full_path()), Ok(fixture.join( ".yarn/cache/is-even-npm-1.0.0-9f726520dc-2728cc2f39.zip/node_modules/is-even/index.js" )) ); assert_eq!( - resolver.resolve(&fixture, "lodash.zip").map(|r| r.full_path()), + resolver.resolve(&fixture, "lodash.zip").await.map(|r| r.full_path()), Ok(fixture.join( ".yarn/cache/lodash.zip-npm-4.2.0-5299417ec8-e596da80a6.zip/node_modules/lodash.zip/index.js" )) @@ -38,6 +38,7 @@ fn pnp1() { ), "is-odd" ) + .await .map(|r| r.full_path()), Ok(fixture.join( ".yarn/cache/is-odd-npm-0.1.2-9d980a9da8-7dc6c6fd00.zip/node_modules/is-odd/index.js" @@ -45,29 +46,28 @@ fn pnp1() { ); assert_eq!( - resolver.resolve(&fixture, "is-odd").map(|r| r.full_path()), + resolver.resolve(&fixture, "is-odd").await.map(|r| r.full_path()), Ok(fixture.join( ".yarn/cache/is-odd-npm-3.0.1-93c3c3f41b-89ee2e353c.zip/node_modules/is-odd/index.js" )), ); assert_eq!( - resolver.resolve(&fixture, "preact").map(|r| r.full_path()), + resolver.resolve(&fixture, "preact").await.map(|r| r.full_path()), Ok(fixture.join( ".yarn/cache/preact-npm-10.25.4-2dd2c0aa44-33a009d614.zip/node_modules/preact/dist/preact.mjs" )), ); assert_eq!( - resolver.resolve(&fixture, "preact/devtools").map(|r| r.full_path()), + resolver.resolve(&fixture, "preact/devtools").await.map(|r| r.full_path()), Ok(fixture.join( ".yarn/cache/preact-npm-10.25.4-2dd2c0aa44-33a009d614.zip/node_modules/preact/devtools/dist/devtools.mjs" )), ); } - -#[test] -fn resolve_in_pnp_linked_folder() { +#[tokio::test] +async fn resolve_in_pnp_linked_folder() { let fixture = super::fixture_root().join("pnp"); let resolver = Resolver::new(ResolveOptions { @@ -77,19 +77,19 @@ fn resolve_in_pnp_linked_folder() { }); assert_eq!( - resolver.resolve(&fixture, "lib/lib.js").map(|r| r.full_path()), + resolver.resolve(&fixture, "lib/lib.js").await.map(|r| r.full_path()), Ok(fixture.join("shared/lib.js")) ); } -#[test] -fn resolve_pnp_pkg_should_failed_while_disable_pnp_mode() { +#[tokio::test] +async fn resolve_pnp_pkg_should_failed_while_disable_pnp_mode() { let fixture = super::fixture_root().join("pnp"); let resolver = Resolver::new(ResolveOptions { enable_pnp: false, ..ResolveOptions::default() }); assert_eq!( - resolver.resolve(&fixture, "is-even").map(|r| r.full_path()), + resolver.resolve(&fixture, "is-even").await.map(|r| r.full_path()), Err(NotFound("is-even".to_string())) ); } diff --git a/src/tests/resolve.rs b/src/tests/resolve.rs index f2dbb88e..b7e2ce61 100644 --- a/src/tests/resolve.rs +++ b/src/tests/resolve.rs @@ -2,8 +2,8 @@ use crate::{ResolveError, ResolveOptions, Resolver}; -#[test] -fn resolve() { +#[tokio::test] +async fn resolve() { let f = super::fixture(); let resolver = Resolver::default(); @@ -53,13 +53,13 @@ fn resolve() { ]; for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } } -#[test] -fn issue238_resolve() { +#[tokio::test] +async fn issue238_resolve() { let f = super::fixture().join("issue-238"); let resolver = Resolver::new(ResolveOptions { extensions: vec![".js".into(), ".jsx".into(), ".ts".into(), ".tsx".into()], @@ -67,12 +67,12 @@ fn issue238_resolve() { ..ResolveOptions::default() }); let resolved_path = - resolver.resolve(f.join("src/common"), "config/myObjectFile").map(|r| r.full_path()); + resolver.resolve(f.join("src/common"), "config/myObjectFile").await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(f.join("src/common/config/myObjectFile.js")),); } -#[test] -fn prefer_relative() { +#[tokio::test] +async fn prefer_relative() { let f = super::fixture(); let resolver = @@ -85,13 +85,13 @@ fn prefer_relative() { ]; for (comment, request, expected) in pass { - let resolved_path = resolver.resolve(&f, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {request}"); } } -#[test] -fn resolve_to_context() { +#[tokio::test] +async fn resolve_to_context() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { resolve_to_context: true, ..ResolveOptions::default() }); @@ -105,15 +105,15 @@ fn resolve_to_context() { ]; for (comment, path, request, expected) in data { - let resolved_path = resolver.resolve(&path, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } } -#[test] -fn resolve_hash_as_module() { +#[tokio::test] +async fn resolve_hash_as_module() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions::default()); - let resolution = resolver.resolve(f, "#a"); + let resolution = resolver.resolve(f, "#a").await; assert_eq!(resolution, Err(ResolveError::NotFound("#a".into()))); } diff --git a/src/tests/restrictions.rs b/src/tests/restrictions.rs index feb929ee..c95705f4 100644 --- a/src/tests/restrictions.rs +++ b/src/tests/restrictions.rs @@ -6,8 +6,8 @@ use regex::Regex; use crate::{ResolveError, ResolveOptions, Resolver, Restriction}; -#[test] -fn should_respect_regexp_restriction() { +#[tokio::test] +async fn should_respect_regexp_restriction() { let f = super::fixture().join("restrictions"); let re = Regex::new(r"\.(sass|scss|css)$").unwrap(); @@ -19,12 +19,12 @@ fn should_respect_regexp_restriction() { ..ResolveOptions::default() }); - let resolution = resolver1.resolve(&f, "pck1").map(|r| r.full_path()); + let resolution = resolver1.resolve(&f, "pck1").await.map(|r| r.full_path()); assert_eq!(resolution, Err(ResolveError::NotFound("pck1".to_string()))); } -#[test] -fn should_try_to_find_alternative_1() { +#[tokio::test] +async fn should_try_to_find_alternative_1() { let f = super::fixture().join("restrictions"); let re = Regex::new(r"\.(sass|scss|css)$").unwrap(); @@ -37,12 +37,12 @@ fn should_try_to_find_alternative_1() { ..ResolveOptions::default() }); - let resolution = resolver1.resolve(&f, "pck1").map(|r| r.full_path()); + let resolution = resolver1.resolve(&f, "pck1").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(f.join("node_modules/pck1/index.css"))); } -#[test] -fn should_respect_string_restriction() { +#[tokio::test] +async fn should_respect_string_restriction() { let fixture = super::fixture(); let f = fixture.join("restrictions"); @@ -52,12 +52,12 @@ fn should_respect_string_restriction() { ..ResolveOptions::default() }); - let resolution = resolver.resolve(&f, "pck2"); + let resolution = resolver.resolve(&f, "pck2").await; assert_eq!(resolution, Err(ResolveError::NotFound("pck2".to_string()))); } -#[test] -fn should_try_to_find_alternative_2() { +#[tokio::test] +async fn should_try_to_find_alternative_2() { let f = super::fixture().join("restrictions"); let re = Regex::new(r"\.(sass|scss|css)$").unwrap(); @@ -70,12 +70,12 @@ fn should_try_to_find_alternative_2() { ..ResolveOptions::default() }); - let resolution = resolver1.resolve(&f, "pck2").map(|r| r.full_path()); + let resolution = resolver1.resolve(&f, "pck2").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(f.join("node_modules/pck2/index.css"))); } -#[test] -fn should_try_to_find_alternative_3() { +#[tokio::test] +async fn should_try_to_find_alternative_3() { let f = super::fixture().join("restrictions"); let re = Regex::new(r"\.(sass|scss|css)$").unwrap(); @@ -88,6 +88,6 @@ fn should_try_to_find_alternative_3() { ..ResolveOptions::default() }); - let resolution = resolver1.resolve(&f, "pck2").map(|r| r.full_path()); + let resolution = resolver1.resolve(&f, "pck2").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(f.join("node_modules/pck2/index.css"))); } diff --git a/src/tests/roots.rs b/src/tests/roots.rs index b50e2a07..bad78cb5 100644 --- a/src/tests/roots.rs +++ b/src/tests/roots.rs @@ -8,8 +8,8 @@ fn dirname() -> PathBuf { super::fixture_root().join("enhanced_resolve").join("test") } -#[test] -fn roots() { +#[tokio::test] +async fn roots() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { @@ -29,7 +29,7 @@ fn roots() { ]; for (comment, request, expected) in pass { - let resolved_path = resolver.resolve(&f, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {request}"); } @@ -39,26 +39,26 @@ fn roots() { ]; for (comment, request, expected) in fail { - let resolution = resolver.resolve(&f, request); + let resolution = resolver.resolve(&f, request).await; assert_eq!(resolution, Err(expected), "{comment} {request}"); } } -#[test] -fn resolve_to_context() { +#[tokio::test] +async fn resolve_to_context() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { roots: vec![dirname(), f.clone()], resolve_to_context: true, ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "/fixtures/lib").map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, "/fixtures/lib").await.map(|r| r.full_path()); let expected = f.join("lib"); assert_eq!(resolved_path, Ok(expected)); } -#[test] -fn prefer_absolute() { +#[tokio::test] +async fn prefer_absolute() { let f = super::fixture(); let resolver = Resolver::new(ResolveOptions { extensions: vec![".js".into()], @@ -74,17 +74,18 @@ fn prefer_absolute() { ]; for (comment, request, expected) in pass { - let resolved_path = resolver.resolve(&f, &request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, &request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {request}"); } } -#[test] -fn roots_fall_through() { +#[tokio::test] +async fn roots_fall_through() { let f = super::fixture(); let absolute_path = f.join("roots_fall_through/index.js"); let specifier = absolute_path.to_string_lossy(); - let resolution = Resolver::new(ResolveOptions::default().with_root(&f)).resolve(&f, &specifier); + let resolution = + Resolver::new(ResolveOptions::default().with_root(&f)).resolve(&f, &specifier).await; assert_eq!( resolution.map(super::super::resolution::Resolution::into_path_buf), Ok(absolute_path) diff --git a/src/tests/scoped_packages.rs b/src/tests/scoped_packages.rs index 8258f00f..f35a5fe5 100644 --- a/src/tests/scoped_packages.rs +++ b/src/tests/scoped_packages.rs @@ -2,8 +2,8 @@ use crate::{ResolveOptions, Resolver}; -#[test] -fn scoped_packages() { +#[tokio::test] +async fn scoped_packages() { let f = super::fixture().join("scoped"); let resolver = Resolver::new(ResolveOptions { @@ -19,7 +19,7 @@ fn scoped_packages() { ]; for (comment, path, request, expected) in pass { - let resolved_path = resolver.resolve(&f, request).map(|r| r.full_path()); + let resolved_path = resolver.resolve(&f, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } } diff --git a/src/tests/simple.rs b/src/tests/simple.rs index 1dd0e26c..1816de7f 100644 --- a/src/tests/simple.rs +++ b/src/tests/simple.rs @@ -4,19 +4,19 @@ use std::env; use crate::Resolver; -#[test] -fn resolve_abs_main() { +#[tokio::test] +async fn resolve_abs_main() { let resolver = Resolver::default(); let dirname = env::current_dir().unwrap().join("fixtures"); let f = dirname.join("invalid/main.js"); // a's main field id `/dist/index.js` - let resolution = resolver.resolve(&f, "a").unwrap(); + let resolution = resolver.resolve(&f, "a").await.unwrap(); assert_eq!(resolution.path(), dirname.join("invalid/node_modules/a/dist/index.js")); } -#[test] -fn simple() { +#[tokio::test] +async fn simple() { // mimic `enhanced-resolve/test/simple.test.js` let dirname = env::current_dir().unwrap().join("fixtures"); let f = dirname.join("enhanced_resolve/test"); @@ -30,14 +30,14 @@ fn simple() { ]; for (comment, path, request) in data { - let resolved_path = resolver.resolve(&path, request).map(|f| f.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|f| f.full_path()); let expected = dirname.join("enhanced_resolve/lib/index.js"); assert_eq!(resolved_path, Ok(expected), "{comment} {path:?} {request}"); } } -#[test] -fn dashed_name() { +#[tokio::test] +async fn dashed_name() { let f = super::fixture(); let resolver = Resolver::default(); @@ -52,7 +52,7 @@ fn dashed_name() { ]; for (path, request, expected) in data { - let resolved_path = resolver.resolve(&path, request).map(|f| f.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|f| f.full_path()); assert_eq!(resolved_path, Ok(expected), "{path:?} {request}"); } } @@ -63,8 +63,8 @@ mod windows { use crate::ResolveOptions; - #[test] - fn no_package() { + #[tokio::test] + async fn no_package() { use crate::ResolverGeneric; use std::path::Path; let f = Path::new("/"); @@ -73,7 +73,7 @@ mod windows { file_system, ResolveOptions::default(), ); - let resolved_path = resolver.resolve(f, "package"); + let resolved_path = resolver.resolve(f, "package").await; assert!(resolved_path.is_err()); } } diff --git a/src/tests/symlink.rs b/src/tests/symlink.rs index 7589b560..bc8e9fa1 100644 --- a/src/tests/symlink.rs +++ b/src/tests/symlink.rs @@ -61,8 +61,8 @@ fn cleanup_symlinks(temp_path: &Path) { _ = fs::remove_dir_all(temp_path); } -#[test] -fn test() -> io::Result<()> { +#[tokio::test] +async fn test() -> io::Result<()> { let root = super::fixture_root().join("enhanced_resolve"); let dirname = root.join("test"); let temp_path = dirname.join("temp"); @@ -109,11 +109,11 @@ fn test() -> io::Result<()> { ]; for (comment, path, request) in pass { - let filename = resolver_with_symlinks.resolve(&path, request).map(|r| r.full_path()); + let filename = resolver_with_symlinks.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(filename, Ok(root.join("lib/index.js")), "{comment:?}"); let resolved_path = - resolver_without_symlinks.resolve(&path, request).map(|r| r.full_path()); + resolver_without_symlinks.resolve(&path, request).await.map(|r| r.full_path()); assert_eq!(resolved_path, Ok(path.join(request))); } diff --git a/src/tests/tsconfig_paths.rs b/src/tests/tsconfig_paths.rs index b6dd9999..e2a88fda 100644 --- a/src/tests/tsconfig_paths.rs +++ b/src/tests/tsconfig_paths.rs @@ -10,8 +10,8 @@ use crate::{ }; // -#[test] -fn tsconfig() { +#[tokio::test] +async fn tsconfig() { let f = super::fixture_root().join("tsconfig"); #[rustfmt::skip] @@ -37,7 +37,7 @@ fn tsconfig() { ..ResolveOptions::default() }); let path = subdir.map_or(dir.clone(), |subdir| dir.join(subdir)); - let resolved_path = resolver.resolve(&path, request).map(|f| f.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|f| f.full_path()); assert_eq!(resolved_path, Ok(expected), "{request} {path:?}"); } @@ -54,13 +54,13 @@ fn tsconfig() { ..ResolveOptions::default() }); for (path, request, expected) in data { - let resolution = resolver.resolve(&path, request).map(|f| f.full_path()); + let resolution = resolver.resolve(&path, request).await.map(|f| f.full_path()); assert_eq!(resolution, expected, "{path:?} {request}"); } } -#[test] -fn tsconfig_fallthrough() { +#[tokio::test] +async fn tsconfig_fallthrough() { let f = super::fixture_root().join("tsconfig"); let resolver = Resolver::new(ResolveOptions { @@ -71,12 +71,12 @@ fn tsconfig_fallthrough() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "/"); + let resolved_path = resolver.resolve(&f, "/").await; assert_eq!(resolved_path, Err(ResolveError::NotFound("/".into()))); } -#[test] -fn json_with_comments() { +#[tokio::test] +async fn json_with_comments() { let f = super::fixture_root().join("tsconfig/cases/trailing-comma"); let resolver = Resolver::new(ResolveOptions { @@ -87,12 +87,12 @@ fn json_with_comments() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "foo").map(|f| f.full_path()); + let resolved_path = resolver.resolve(&f, "foo").await.map(|f| f.full_path()); assert_eq!(resolved_path, Ok(f.join("bar.js"))); } -#[test] -fn broken() { +#[tokio::test] +async fn broken() { let f = super::fixture_root().join("tsconfig"); let resolver = Resolver::new(ResolveOptions { @@ -103,7 +103,7 @@ fn broken() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "/"); + let resolved_path = resolver.resolve(&f, "/").await; let _error = ResolveError::JSON(JSONError { path: f.join("tsconfig_broken.json"), message: String::from("EOF while parsing an object at line 2 column 0"), @@ -114,8 +114,8 @@ fn broken() { assert!(matches!(resolved_path, Err(ResolveError::JSON(_)))); } -#[test] -fn empty() { +#[tokio::test] +async fn empty() { let f = super::fixture_root().join("tsconfig/cases/empty"); let resolver = Resolver::new(ResolveOptions { @@ -126,13 +126,13 @@ fn empty() { ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&f, "./index").map(|f| f.full_path()); + let resolved_path = resolver.resolve(&f, "./index").await.map(|f| f.full_path()); assert_eq!(resolved_path, Ok(f.join("index.js"))); } // -#[test] -fn test_paths() { +#[tokio::test] +async fn test_paths() { let path = Path::new("/foo/tsconfig.json"); let mut tsconfig_json = serde_json::json!({ "compilerOptions": { @@ -168,8 +168,8 @@ fn test_paths() { } // -#[test] -fn test_base_url() { +#[tokio::test] +async fn test_base_url() { let path = Path::new("/foo/tsconfig.json"); let mut tsconfig_json = serde_json::json!({ "compilerOptions": { @@ -193,8 +193,8 @@ fn test_base_url() { } // -#[test] -fn test_paths_and_base_url() { +#[tokio::test] +async fn test_paths_and_base_url() { let path = Path::new("/foo/tsconfig.json"); let mut tsconfig_json = serde_json::json!({ "compilerOptions": { @@ -228,8 +228,8 @@ fn test_paths_and_base_url() { // Template variable ${configDir} for substitution of config files directory path // https://github.com/microsoft/TypeScript/pull/58042 -#[test] -fn test_template_variable() { +#[tokio::test] +async fn test_template_variable() { let f = super::fixture_root().join("tsconfig"); let f2 = f.join("cases").join("paths_template_variable"); @@ -248,7 +248,7 @@ fn test_template_variable() { }), ..ResolveOptions::default() }); - let resolved_path = resolver.resolve(&dir, request).map(|f| f.full_path()); + let resolved_path = resolver.resolve(&dir, request).await.map(|f| f.full_path()); assert_eq!(resolved_path, Ok(expected), "{request} {tsconfig} {dir:?}"); } } @@ -333,8 +333,8 @@ mod windows_test { // Path matching tests from tsconfig-paths // * // * - #[test] - fn match_path() { + #[tokio::test] + async fn match_path() { let pass = [ OneTest { name: "should locate path that matches with star and exists", @@ -505,8 +505,11 @@ OneTest { let root = PathBuf::from("/root"); for test in pass { - let resolved_path = - test.resolver(&root).resolve(&root, test.requested_module).map(|f| f.full_path()); + let resolved_path = test + .resolver(&root) + .resolve(&root, test.requested_module) + .await + .map(|f| f.full_path()); assert_eq!(resolved_path, Ok(PathBuf::from(test.expected_path)), "{}", test.name); } @@ -534,8 +537,11 @@ OneTest { ]; for test in fail { - let resolved_path = - test.resolver(&root).resolve(&root, test.requested_module).map(|f| f.full_path()); + let resolved_path = test + .resolver(&root) + .resolve(&root, test.requested_module) + .await + .map(|f| f.full_path()); assert_eq!( resolved_path, Err(ResolveError::NotFound(test.requested_module.into())), diff --git a/src/tests/tsconfig_project_references.rs b/src/tests/tsconfig_project_references.rs index 0d019d27..ff59cc91 100644 --- a/src/tests/tsconfig_project_references.rs +++ b/src/tests/tsconfig_project_references.rs @@ -2,8 +2,8 @@ use crate::{ResolveError, ResolveOptions, Resolver, TsconfigOptions, TsconfigReferences}; -#[test] -fn auto() { +#[tokio::test] +async fn auto() { let f = super::fixture_root().join("tsconfig/cases/project_references"); let resolver = Resolver::new(ResolveOptions { @@ -33,13 +33,13 @@ fn auto() { ]; for (path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|f| f.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|f| f.full_path()); assert_eq!(resolved_path, Ok(expected), "{request} {path:?}"); } } -#[test] -fn disabled() { +#[tokio::test] +async fn disabled() { let f = super::fixture_root().join("tsconfig/cases/project_references"); let resolver = Resolver::new(ResolveOptions { @@ -64,13 +64,13 @@ fn disabled() { ]; for (path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|f| f.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|f| f.full_path()); assert_eq!(resolved_path, expected, "{request} {path:?}"); } } -#[test] -fn manual() { +#[tokio::test] +async fn manual() { let f = super::fixture_root().join("tsconfig/cases/project_references"); let resolver = Resolver::new(ResolveOptions { @@ -95,13 +95,13 @@ fn manual() { ]; for (path, request, expected) in pass { - let resolved_path = resolver.resolve(&path, request).map(|f| f.full_path()); + let resolved_path = resolver.resolve(&path, request).await.map(|f| f.full_path()); assert_eq!(resolved_path, expected, "{request} {path:?}"); } } -#[test] -fn self_reference() { +#[tokio::test] +async fn self_reference() { let f = super::fixture_root().join("tsconfig/cases/project_references"); #[rustfmt::skip] @@ -122,7 +122,7 @@ fn self_reference() { ..ResolveOptions::default() }); let path = f.join("app"); - let resolved_path = resolver.resolve(&path, "@/index.ts").map(|f| f.full_path()); + let resolved_path = resolver.resolve(&path, "@/index.ts").await.map(|f| f.full_path()); assert_eq!( resolved_path, Err(ResolveError::TsconfigSelfReference(f.join("app/tsconfig.json"))), diff --git a/tests/integration_test.rs b/tests/integration_test.rs index 0309d447..80595317 100644 --- a/tests/integration_test.rs +++ b/tests/integration_test.rs @@ -8,33 +8,33 @@ fn dir() -> PathBuf { env::current_dir().unwrap() } -fn resolve(specifier: &str) -> Resolution { +async fn resolve(specifier: &str) -> Resolution { let path = dir(); - Resolver::new(ResolveOptions::default()).resolve(path, specifier).unwrap() + Resolver::new(ResolveOptions::default()).resolve(path, specifier).await.unwrap() } -#[test] -fn clone() { - let resolution = resolve("./tests/package.json"); +#[tokio::test] +async fn clone() { + let resolution = resolve("./tests/package.json").await; assert_eq!(resolution.clone(), resolution); } -#[test] -fn debug() { - let resolution = resolve("./tests/package.json"); +#[tokio::test] +async fn debug() { + let resolution = resolve("./tests/package.json").await; let s = format!("{resolution:?}"); assert!(!s.is_empty()); } -#[test] -fn eq() { - let resolution = resolve("./tests/package.json"); +#[tokio::test] +async fn eq() { + let resolution = resolve("./tests/package.json").await; assert_eq!(resolution, resolution); } -#[test] -fn package_json() { - let resolution = resolve("./tests/package.json"); +#[tokio::test] +async fn package_json() { + let resolution = resolve("./tests/package.json").await; let package_json = resolution.package_json().unwrap(); assert_eq!(package_json.name.as_ref().unwrap(), "name"); assert_eq!(package_json.r#type.as_ref().unwrap().as_str(), "module".into()); @@ -42,9 +42,9 @@ fn package_json() { } #[cfg(feature = "package_json_raw_json_api")] -#[test] -fn package_json_raw_json_api() { - let resolution = resolve("./tests/package.json"); +#[tokio::test] +async fn package_json_raw_json_api() { + let resolution = resolve("./tests/package.json").await; assert!(resolution .package_json() .unwrap() @@ -53,40 +53,38 @@ fn package_json_raw_json_api() { .is_some_and(|name| name == "name")); } -#[test] -fn clear_cache() { +#[tokio::test] +async fn clear_cache() { let resolver = Resolver::new(ResolveOptions::default()); resolver.clear_cache(); // exists } -#[test] -fn options() { +#[tokio::test] +async fn options() { let resolver = Resolver::new(ResolveOptions::default()); let options = resolver.options(); assert!(!format!("{options:?}").is_empty()); } -#[test] -fn debug_resolver() { +#[tokio::test] +async fn debug_resolver() { let resolver = Resolver::new(ResolveOptions::default()); assert!(!format!("{resolver:?}").is_empty()); } -#[test] -fn dependencies() { +#[tokio::test] +async fn dependencies() { let path = dir(); let mut ctx = ResolveContext::default(); - let _ = Resolver::new(ResolveOptions::default()).resolve_with_context( - path, - "./tests/package.json", - &mut ctx, - ); + let _ = Resolver::new(ResolveOptions::default()) + .resolve_with_context(path, "./tests/package.json", &mut ctx) + .await; assert!(!ctx.file_dependencies.is_empty()); assert!(ctx.missing_dependencies.is_empty()); } -#[test] -fn options_api() { +#[tokio::test] +async fn options_api() { _ = ResolveOptions::default() .with_builtin_modules(true) .with_condition_names(&[]) diff --git a/tests/resolve_test.rs b/tests/resolve_test.rs index 19772b60..84a37b92 100644 --- a/tests/resolve_test.rs +++ b/tests/resolve_test.rs @@ -6,19 +6,19 @@ fn dir() -> PathBuf { env::current_dir().unwrap() } -#[test] -fn chinese() { +#[tokio::test] +async fn chinese() { let dir = dir(); let specifier = "./fixtures/misc/中文/中文.js"; - let resolution = Resolver::new(ResolveOptions::default()).resolve(&dir, specifier); + let resolution = Resolver::new(ResolveOptions::default()).resolve(&dir, specifier).await; assert_eq!( resolution.map(rspack_resolver::Resolution::into_path_buf), Ok(dir.join("fixtures/misc/中文/中文.js")) ); } -#[test] -fn styled_components() { +#[tokio::test] +async fn styled_components() { let dir = dir(); let path = dir.join("fixtures/pnpm"); let module_path = dir.join("node_modules/.pnpm/styled-components@6.1.1_react-dom@18.3.1_react@18.3.1__react@18.3.1/node_modules/styled-components"); @@ -27,7 +27,7 @@ fn styled_components() { // cjs let options = ResolveOptions { alias_fields: vec![vec!["browser".into()]], ..ResolveOptions::default() }; - let resolution = Resolver::new(options).resolve(&path, specifier); + let resolution = Resolver::new(options).resolve(&path, specifier).await; assert_eq!( resolution.map(rspack_resolver::Resolution::into_path_buf), Ok(module_path.join("dist/styled-components.browser.cjs.js")) @@ -39,15 +39,15 @@ fn styled_components() { main_fields: vec!["module".into()], ..ResolveOptions::default() }; - let resolution = Resolver::new(options).resolve(&path, specifier); + let resolution = Resolver::new(options).resolve(&path, specifier).await; assert_eq!( resolution.map(rspack_resolver::Resolution::into_path_buf), Ok(module_path.join("dist/styled-components.browser.esm.js")) ); } -#[test] -fn axios() { +#[tokio::test] +async fn axios() { let dir = dir(); let path = dir.join("fixtures/pnpm"); let module_path = dir.join("node_modules/.pnpm/axios@1.6.2/node_modules/axios"); @@ -55,7 +55,7 @@ fn axios() { // default let options = ResolveOptions::default(); - let resolution = Resolver::new(options).resolve(&path, specifier); + let resolution = Resolver::new(options).resolve(&path, specifier).await; assert_eq!( resolution.map(rspack_resolver::Resolution::into_path_buf), Ok(module_path.join("index.js")) @@ -66,7 +66,7 @@ fn axios() { condition_names: vec!["browser".into(), "require".into()], ..ResolveOptions::default() }; - let resolution = Resolver::new(options).resolve(&path, specifier); + let resolution = Resolver::new(options).resolve(&path, specifier).await; assert_eq!( resolution.map(rspack_resolver::Resolution::into_path_buf), Ok(module_path.join("dist/browser/axios.cjs")) @@ -77,15 +77,15 @@ fn axios() { condition_names: vec!["node".into(), "require".into()], ..ResolveOptions::default() }; - let resolution = Resolver::new(options).resolve(&path, specifier); + let resolution = Resolver::new(options).resolve(&path, specifier).await; assert_eq!( resolution.map(rspack_resolver::Resolution::into_path_buf), Ok(module_path.join("dist/node/axios.cjs")) ); } -#[test] -fn postcss() { +#[tokio::test] +async fn postcss() { let dir = dir(); let path = dir.join("fixtures/pnpm"); let module_path = path.join("node_modules/postcss"); @@ -95,16 +95,16 @@ fn postcss() { }); // should ignore "path" - let resolution = resolver.resolve(&module_path, "path"); + let resolution = resolver.resolve(&module_path, "path").await; assert_eq!(resolution, Err(ResolveError::Ignored(module_path.clone()))); // should ignore "./lib/terminal-highlight" - let resolution = resolver.resolve(&module_path, "./lib/terminal-highlight"); + let resolution = resolver.resolve(&module_path, "./lib/terminal-highlight").await; assert_eq!(resolution, Err(ResolveError::Ignored(module_path.join("lib/terminal-highlight")))); } -#[test] -fn ipaddr_js() { +#[tokio::test] +async fn ipaddr_js() { let dir = dir(); let path = dir.join("fixtures/pnpm"); let module_path = @@ -126,13 +126,13 @@ fn ipaddr_js() { ]; for resolver in resolvers { - let resolution = resolver.resolve(&path, "ipaddr.js").map(|r| r.full_path()); + let resolution = resolver.resolve(&path, "ipaddr.js").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(module_path.clone())); } } -#[test] -fn decimal_js() { +#[tokio::test] +async fn decimal_js() { let dir = dir(); let path = dir.join("fixtures/pnpm"); let module_path = @@ -153,13 +153,13 @@ fn decimal_js() { ]; for resolver in resolvers { - let resolution = resolver.resolve(&path, "decimal.js").map(|r| r.full_path()); + let resolution = resolver.resolve(&path, "decimal.js").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(module_path.clone())); } } -#[test] -fn decimal_js_from_mathjs() { +#[tokio::test] +async fn decimal_js_from_mathjs() { let dir = dir(); let path = dir.join("node_modules/.pnpm/mathjs@13.2.0/node_modules/mathjs/lib/esm"); let module_path = @@ -180,7 +180,7 @@ fn decimal_js_from_mathjs() { ]; for resolver in resolvers { - let resolution = resolver.resolve(&path, "decimal.js").map(|r| r.full_path()); + let resolution = resolver.resolve(&path, "decimal.js").await.map(|r| r.full_path()); assert_eq!(resolution, Ok(module_path.clone())); } }