diff --git a/crates/turbo-tasks-memory/tests/debug.rs b/crates/turbo-tasks-memory/tests/debug.rs index f57950afba6d2..b62e7bf9a3166 100644 --- a/crates/turbo-tasks-memory/tests/debug.rs +++ b/crates/turbo-tasks-memory/tests/debug.rs @@ -3,7 +3,7 @@ use std::sync::Mutex; use anyhow::Result; -use turbo_tasks::{debug::ValueDebug, TaskInput}; +use turbo_tasks::debug::ValueDebug; use turbo_tasks_testing::{register, run}; register!(); diff --git a/crates/turbopack-core/src/issue/mod.rs b/crates/turbopack-core/src/issue/mod.rs index 45c07f588cc87..4eff9783370f7 100644 --- a/crates/turbopack-core/src/issue/mod.rs +++ b/crates/turbopack-core/src/issue/mod.rs @@ -1,6 +1,5 @@ pub mod analyze; pub mod code_gen; -pub mod package_json; pub mod resolve; pub mod unsupported_module; diff --git a/crates/turbopack-core/src/issue/package_json.rs b/crates/turbopack-core/src/issue/package_json.rs deleted file mode 100644 index c8b5ca943afe2..0000000000000 --- a/crates/turbopack-core/src/issue/package_json.rs +++ /dev/null @@ -1,34 +0,0 @@ -use anyhow::Result; -use turbo_tasks::primitives::StringVc; -use turbo_tasks_fs::FileSystemPathVc; - -use super::{Issue, IssueVc}; - -#[turbo_tasks::value(shared)] -pub struct PackageJsonIssue { - pub path: FileSystemPathVc, - pub error_message: String, -} - -#[turbo_tasks::value_impl] -impl Issue for PackageJsonIssue { - #[turbo_tasks::function] - fn title(&self) -> StringVc { - StringVc::cell("Error parsing package.json file".to_string()) - } - - #[turbo_tasks::function] - fn category(&self) -> StringVc { - StringVc::cell("parse".to_string()) - } - - #[turbo_tasks::function] - fn context(&self) -> FileSystemPathVc { - self.path - } - - #[turbo_tasks::function] - fn description(&self) -> StringVc { - StringVc::cell(self.error_message.clone()) - } -} diff --git a/crates/turbopack-core/src/lib.rs b/crates/turbopack-core/src/lib.rs index 280e10a93eef6..6fa94581689ad 100644 --- a/crates/turbopack-core/src/lib.rs +++ b/crates/turbopack-core/src/lib.rs @@ -16,6 +16,7 @@ pub mod error; pub mod ident; pub mod introspect; pub mod issue; +pub mod package_json; pub mod plugin; pub mod proxied_asset; pub mod reference; diff --git a/crates/turbopack-core/src/package_json.rs b/crates/turbopack-core/src/package_json.rs new file mode 100644 index 0000000000000..bc299002f2724 --- /dev/null +++ b/crates/turbopack-core/src/package_json.rs @@ -0,0 +1,85 @@ +use std::{fmt::Write, ops::Deref}; + +use anyhow::Result; +use serde_json::Value as JsonValue; +use turbo_tasks::{debug::ValueDebugFormat, primitives::StringVc, trace::TraceRawVcs}; +use turbo_tasks_fs::{FileContent, FileJsonContent, FileJsonContentReadRef, FileSystemPathVc}; + +use super::issue::{Issue, IssueVc}; + +/// PackageJson wraps the parsed JSON content of a `package.json` file. The +/// wrapper is necessary so that we can reference the [FileJsonContent]'s inner +/// [serde_json::Value] without cloning it. +#[derive(PartialEq, Eq, ValueDebugFormat, TraceRawVcs)] +pub struct PackageJson(FileJsonContentReadRef); + +impl Deref for PackageJson { + type Target = JsonValue; + fn deref(&self) -> &Self::Target { + match &*self.0 { + FileJsonContent::Content(json) => json, + _ => unreachable!("PackageJson is guaranteed to hold Content"), + } + } +} + +#[turbo_tasks::value(transparent, serialization = "none")] +pub struct OptionPackageJson(Option); + +/// Reads a package.json file (if it exists). If the file is unparseable, it +/// emits a useful [Issue] pointing to the invalid location. +#[turbo_tasks::function] +pub async fn read_package_json(path: FileSystemPathVc) -> Result { + let read = path.read_json().await?; + match &*read { + FileJsonContent::Content(_) => Ok(OptionPackageJson(Some(PackageJson(read))).cell()), + FileJsonContent::NotFound => Ok(OptionPackageJson(None).cell()), + FileJsonContent::Unparseable(e) => { + let mut message = "package.json is not parseable: invalid JSON: ".to_string(); + if let FileContent::Content(content) = &*path.read().await? { + let text = content.content().to_str()?; + e.write_with_content(&mut message, &text)?; + } else { + write!(message, "{}", e)?; + } + PackageJsonIssue { + error_message: message, + path, + } + .cell() + .as_issue() + .emit(); + Ok(OptionPackageJson(None).cell()) + } + } +} + +/// Reusable Issue struct representing any problem with a `package.json` +#[turbo_tasks::value(shared)] +pub struct PackageJsonIssue { + pub path: FileSystemPathVc, + pub error_message: String, +} + +#[turbo_tasks::value_impl] +impl Issue for PackageJsonIssue { + #[turbo_tasks::function] + fn title(&self) -> StringVc { + StringVc::cell("Error parsing package.json file".to_string()) + } + + #[turbo_tasks::function] + fn category(&self) -> StringVc { + StringVc::cell("parse".to_string()) + } + + #[turbo_tasks::function] + fn context(&self) -> FileSystemPathVc { + self.path + } + + #[turbo_tasks::function] + fn description(&self) -> StringVc { + StringVc::cell(self.error_message.clone()) + } +} diff --git a/crates/turbopack-core/src/resolve/mod.rs b/crates/turbopack-core/src/resolve/mod.rs index a0666aa735c50..1562ed9121d6b 100644 --- a/crates/turbopack-core/src/resolve/mod.rs +++ b/crates/turbopack-core/src/resolve/mod.rs @@ -5,7 +5,7 @@ use std::{ pin::Pin, }; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, bail, Result}; use serde_json::Value as JsonValue; use turbo_tasks::{ primitives::{BoolVc, StringVc, StringsVc}, @@ -13,24 +13,22 @@ use turbo_tasks::{ }; use turbo_tasks_fs::{ util::{normalize_path, normalize_request}, - FileJsonContent, FileJsonContentVc, FileSystemEntryType, FileSystemPathVc, RealPathResult, + FileSystemEntryType, FileSystemPathVc, RealPathResult, }; use self::{ - exports::ExportsField, options::{ resolve_modules_options, ImportMapResult, ResolveInPackage, ResolveIntoPackage, ResolveModules, ResolveModulesOptionsVc, ResolveOptionsVc, }, parse::{Request, RequestVc}, pattern::QueryMapVc, + remap::{ExportsField, ImportsField}, }; use crate::{ asset::{Asset, AssetOptionVc, AssetVc, AssetsVc}, - issue::{ - package_json::{PackageJsonIssue, PackageJsonIssueVc}, - resolve::{ResolvingIssue, ResolvingIssueVc}, - }, + issue::resolve::{ResolvingIssue, ResolvingIssueVc}, + package_json::{read_package_json, PackageJsonIssue, PackageJsonIssueVc}, reference::{AssetReference, AssetReferenceVc}, reference_type::ReferenceType, resolve::{ @@ -42,18 +40,18 @@ use crate::{ }; mod alias_map; -pub(crate) mod exports; pub mod node; pub mod options; pub mod origin; pub mod parse; pub mod pattern; pub mod plugin; +pub(crate) mod remap; pub use alias_map::{ AliasMap, AliasMapIntoIter, AliasMapLookupIterator, AliasMatch, AliasPattern, AliasTemplate, }; -pub use exports::{ExportsValue, ResolveAliasMap, ResolveAliasMapVc}; +pub use remap::{ResolveAliasMap, ResolveAliasMapVc, SubpathValue}; use crate::issue::{IssueSeverity, IssueSeverityVc, OptionIssueSourceVc}; @@ -389,32 +387,71 @@ enum ExportsFieldResult { None, } +/// Extracts the "exports" field out of the nearest package.json, parsing it +/// into an appropriate [AliasMap] for lookups. #[turbo_tasks::function] -async fn exports_field( - package_json_path: FileSystemPathVc, - package_json: FileJsonContentVc, - field: &str, -) -> Result { - if let FileJsonContent::Content(package_json) = &*package_json.await? { - let field_value = &package_json[field]; - if let serde_json::Value::Null = field_value { - return Ok(ExportsFieldResult::None.into()); +async fn exports_field(package_json_path: FileSystemPathVc) -> Result { + let read = read_package_json(package_json_path).await?; + let package_json = match &*read { + Some(json) => json, + None => return Ok(ExportsFieldResult::None.cell()), + }; + + let Some(exports) = package_json.get("exports") else { + return Ok(ExportsFieldResult::None.cell()); + }; + match exports.try_into() { + Ok(exports) => Ok(ExportsFieldResult::Some(exports).cell()), + Err(err) => { + let issue: PackageJsonIssueVc = PackageJsonIssue { + path: package_json_path, + error_message: err.to_string(), + } + .into(); + issue.as_issue().emit(); + Ok(ExportsFieldResult::None.cell()) } - let exports_field: Result = field_value.try_into(); - match exports_field { - Ok(exports_field) => Ok(ExportsFieldResult::Some(exports_field).into()), - Err(err) => { - let issue: PackageJsonIssueVc = PackageJsonIssue { - path: package_json_path, - error_message: err.to_string(), - } - .into(); - issue.as_issue().emit(); - Ok(ExportsFieldResult::None.into()) + } +} + +#[turbo_tasks::value(shared)] +enum ImportsFieldResult { + Some( + #[turbo_tasks(debug_ignore, trace_ignore)] ImportsField, + FileSystemPathVc, + ), + None, +} + +/// Extracts the "imports" field out of the nearest package.json, parsing it +/// into an appropriate [AliasMap] for lookups. +#[turbo_tasks::function] +async fn imports_field(context: FileSystemPathVc) -> Result { + let package_json_context = find_context_file(context, package_json()).await?; + let FindContextFileResult::Found(package_json_path, _refs) = &*package_json_context else { + return Ok(ImportsFieldResult::None.cell()); + }; + + let read = read_package_json(*package_json_path).await?; + let package_json = match &*read { + Some(json) => json, + None => return Ok(ImportsFieldResult::None.cell()), + }; + + let Some(imports) = package_json.get("imports") else { + return Ok(ImportsFieldResult::None.cell()); + }; + match imports.try_into() { + Ok(imports) => Ok(ImportsFieldResult::Some(imports, *package_json_path).cell()), + Err(err) => { + let issue: PackageJsonIssueVc = PackageJsonIssue { + path: *package_json_path, + error_message: err.to_string(), } + .into(); + issue.as_issue().emit(); + Ok(ImportsFieldResult::None.cell()) } - } else { - Ok(ExportsFieldResult::None.into()) } } @@ -737,12 +774,7 @@ async fn resolve_internal( .push(resolved(*path, context, request, options_value, options).await?); } PatternMatch::Directory(_, path) => { - let package_json_path = path.join("package.json"); - let package_json = package_json_path.read_json(); - results.push( - resolve_into_folder(*path, package_json, package_json_path, options) - .await?, - ); + results.push(resolve_into_folder(*path, options).await?); } } } @@ -817,19 +849,28 @@ async fn resolve_internal( ResolveResult::unresolveable().into() } Request::Empty => ResolveResult::unresolveable().into(), - Request::PackageInternal { path: _ } => { - let issue: ResolvingIssueVc = ResolvingIssue { - severity: IssueSeverity::Error.cell(), - request_type: "package internal import: not implemented yet".to_string(), - request, + Request::PackageInternal { path } => { + let options_value = options.await?; + let (conditions, unspecified_conditions) = options_value + .in_package + .iter() + .find_map(|item| match item { + ResolveInPackage::ImportsField { + conditions, + unspecified_conditions, + } => Some((Cow::Borrowed(conditions), *unspecified_conditions)), + _ => None, + }) + .unwrap_or_else(|| (Default::default(), ConditionValue::Unset)); + resolve_package_internal_with_imports_field( context, - resolve_options: options, - error_message: Some("package internal imports are not implemented yet".to_string()), - source: OptionIssueSourceVc::none(), - } - .into(); - issue.as_issue().emit(); - ResolveResult::unresolveable().into() + request, + options, + path, + &conditions, + &unspecified_conditions, + ) + .await? } Request::Uri { protocol, @@ -872,10 +913,9 @@ async fn resolve_internal( async fn resolve_into_folder( package_path: FileSystemPathVc, - package_json: FileJsonContentVc, - package_json_path: FileSystemPathVc, options: ResolveOptionsVc, ) -> Result { + let package_json_path = package_path.join("package.json"); let options_value = options.await?; for resolve_into_package in options_value.into_package.iter() { match resolve_into_package { @@ -891,7 +931,7 @@ async fn resolve_into_folder( return Ok(resolve_internal(package_path, request, options)); } ResolveIntoPackage::MainField(name) => { - if let FileJsonContent::Content(package_json) = &*package_json.await? { + if let Some(package_json) = &*read_package_json(package_json_path).await? { if let Some(field_value) = package_json[name].as_str() { let request = RequestVc::parse(Value::new(normalize_request(field_value).into())); @@ -907,18 +947,17 @@ async fn resolve_into_folder( return Ok(result.into()); } } - } + }; } ResolveIntoPackage::ExportsField { - field, conditions, unspecified_conditions, } => { if let ExportsFieldResult::Some(exports_field) = - &*exports_field(package_json_path, package_json, field).await? + &*exports_field(package_json_path).await? { // other options do not apply anymore when an exports field exist - return handle_exports_field( + return handle_exports_imports_field( package_path, package_json_path, options, @@ -946,12 +985,13 @@ async fn resolve_module_request( for in_package in options_value.in_package.iter() { match in_package { ResolveInPackage::AliasField(field) => { - if let FindContextFileResult::Found(package_json, refs) = + if let FindContextFileResult::Found(package_json_path, refs) = &*find_context_file(context, package_json()).await? { - if let FileJsonContent::Content(package) = &*package_json.read_json().await? { - if let Some(field_value) = package[field].as_object() { - let package_path = package_json.parent(); + let read = read_package_json(*package_json_path).await?; + if let Some(package_json) = &*read { + if let Some(field_value) = package_json[field].as_object() { + let package_path = package_json_path.parent(); let full_pattern = Pattern::concat([module.to_string().into(), path.clone()]); if let Some(request) = full_pattern.into_string() { @@ -961,7 +1001,7 @@ async fn resolve_module_request( refs.clone(), package_path, options, - *package_json, + *package_json_path, &request, field, ) @@ -972,6 +1012,11 @@ async fn resolve_module_request( } } } + ResolveInPackage::ImportsField { .. } => { + // resolve_module_request is called when importing a node + // module, not a PackageInternal one, so the imports field + // doesn't apply. + } } } @@ -996,13 +1041,8 @@ async fn resolve_module_request( // "[baseUrl]/foo/bar" or "[baseUrl]/node_modules/foo/bar", and we'll need to // try both. for package_path in &result.packages { - let package_json_path = package_path.join("package.json"); - let package_json = package_json_path.read_json(); if is_match { - results.push( - resolve_into_folder(*package_path, package_json, package_json_path, options) - .await?, - ); + results.push(resolve_into_folder(*package_path, options).await?); } if could_match_others { for resolve_into_package in options_value.into_package.iter() { @@ -1010,27 +1050,19 @@ async fn resolve_module_request( ResolveIntoPackage::Default(_) | ResolveIntoPackage::MainField(_) => { // doesn't affect packages with subpath if path.is_match("/") { - results.push( - resolve_into_folder( - *package_path, - package_json, - package_json_path, - options, - ) - .await?, - ); + results.push(resolve_into_folder(*package_path, options).await?); } } ResolveIntoPackage::ExportsField { - field, conditions, unspecified_conditions, } => { + let package_json_path = package_path.join("package.json"); if let ExportsFieldResult::Some(exports_field) = - &*exports_field(package_json_path, package_json, field).await? + &*exports_field(package_json_path).await? { if let Some(path) = path.clone().into_string() { - results.push(handle_exports_field( + results.push(handle_exports_imports_field( *package_path, package_json_path, options, @@ -1166,15 +1198,16 @@ async fn resolved( options: ResolveOptionsVc, ) -> Result { let RealPathResult { path, symlinks } = &*fs_path.realpath_with_links().await?; - for resolve_in in in_package.iter() { - match resolve_in { + for in_package in in_package.iter() { + match in_package { ResolveInPackage::AliasField(field) => { - if let FindContextFileResult::Found(package_json, refs) = + if let FindContextFileResult::Found(package_json_path, refs) = &*find_context_file(fs_path.parent(), package_json()).await? { - if let FileJsonContent::Content(package) = &*package_json.read_json().await? { - if let Some(field_value) = package[field].as_object() { - let package_path = package_json.parent(); + let read = read_package_json(*package_json_path).await?; + if let Some(package_json) = &*read { + if let Some(field_value) = package_json[field].as_object() { + let package_path = package_json_path.parent(); if let Some(rel_path) = package_path.await?.get_relative_path_to(&*fs_path.await?) { @@ -1184,7 +1217,7 @@ async fn resolved( refs.clone(), package_path, options, - *package_json, + *package_json_path, &rel_path, field, ) @@ -1195,6 +1228,10 @@ async fn resolved( } } } + ResolveInPackage::ImportsField { .. } => { + // resolved is called when importing a relative path, not a + // PackageInternal one, so the imports field doesn't apply. + } } } @@ -1225,21 +1262,21 @@ async fn resolved( .into()) } -fn handle_exports_field( +fn handle_exports_imports_field( package_path: FileSystemPathVc, - package_json: FileSystemPathVc, + package_json_path: FileSystemPathVc, options: ResolveOptionsVc, - exports_field: &ExportsField, + exports_imports_field: &AliasMap, path: &str, conditions: &BTreeMap, unspecified_conditions: &ConditionValue, ) -> Result { let mut results = Vec::new(); let mut conditions_state = HashMap::new(); - let values = exports_field + let values = exports_imports_field .lookup(path) .map(AliasMatch::try_into_self) - .collect::>>>()?; + .collect::>>()?; for value in values.iter() { if value.add_results( conditions, @@ -1264,10 +1301,58 @@ fn handle_exports_field( // other options do not apply anymore when an exports field exist Ok(merge_results_with_references( resolved_results, - vec![AffectingResolvingAssetReferenceVc::new(package_json).into()], + vec![AffectingResolvingAssetReferenceVc::new(package_json_path).into()], )) } +/// Resolves a `#dep` import using the containing package.json's `imports` +/// field. The dep may be a constant string or a pattern, and the values can be +/// static strings or conditions like `import` or `require` to handle ESM/CJS +/// with differently compiled files. +async fn resolve_package_internal_with_imports_field( + context: FileSystemPathVc, + request: RequestVc, + resolve_options: ResolveOptionsVc, + pattern: &Pattern, + conditions: &BTreeMap, + unspecified_conditions: &ConditionValue, +) -> Result { + let Pattern::Constant(specifier) = pattern else { + bail!("PackageInternal requests can only be Constant strings"); + }; + // https://github.com/nodejs/node/blob/1b177932/lib/internal/modules/esm/resolve.js#L615-L619 + if specifier == "#" || specifier.starts_with("#/") || specifier.ends_with('/') { + let issue: ResolvingIssueVc = ResolvingIssue { + severity: IssueSeverity::Error.cell(), + context, + request_type: format!("package imports request: `{specifier}`"), + request, + resolve_options, + error_message: None, + source: OptionIssueSourceVc::none(), + } + .into(); + issue.as_issue().emit(); + return Ok(ResolveResult::unresolveable().into()); + } + + let imports_result = imports_field(context).await?; + let (imports, package_json_path) = match &*imports_result { + ImportsFieldResult::Some(i, p) => (i, p), + ImportsFieldResult::None => return Ok(ResolveResult::unresolveable().into()), + }; + + handle_exports_imports_field( + context, + *package_json_path, + resolve_options, + imports, + &specifier, + conditions, + unspecified_conditions, + ) +} + #[turbo_tasks::value] pub struct AffectingResolvingAssetReference { path: FileSystemPathVc, diff --git a/crates/turbopack-core/src/resolve/node.rs b/crates/turbopack-core/src/resolve/node.rs index c5308fa7cd36c..abcf3cc85b5e8 100644 --- a/crates/turbopack-core/src/resolve/node.rs +++ b/crates/turbopack-core/src/resolve/node.rs @@ -1,11 +1,17 @@ use turbo_tasks_fs::FileSystemPathVc; use super::options::{ - ConditionValue, ResolveIntoPackage, ResolveModules, ResolveOptions, ResolveOptionsVc, + ConditionValue, ResolutionConditions, ResolveInPackage, ResolveIntoPackage, ResolveModules, + ResolveOptions, ResolveOptionsVc, }; #[turbo_tasks::function] pub fn node_cjs_resolve_options(root: FileSystemPathVc) -> ResolveOptionsVc { + let conditions: ResolutionConditions = [ + ("node".to_string(), ConditionValue::Set), + ("require".to_string(), ConditionValue::Set), + ] + .into(); ResolveOptions { extensions: vec![".js".to_string(), ".json".to_string(), ".node".to_string()], modules: vec![ResolveModules::Nested( @@ -14,17 +20,16 @@ pub fn node_cjs_resolve_options(root: FileSystemPathVc) -> ResolveOptionsVc { )], into_package: vec![ ResolveIntoPackage::ExportsField { - field: "exports".to_string(), - conditions: [ - ("node".to_string(), ConditionValue::Set), - ("require".to_string(), ConditionValue::Set), - ] - .into(), + conditions: conditions.clone(), unspecified_conditions: ConditionValue::Unset, }, ResolveIntoPackage::MainField("main".to_string()), ResolveIntoPackage::Default("index".to_string()), ], + in_package: vec![ResolveInPackage::ImportsField { + conditions, + unspecified_conditions: ConditionValue::Unset, + }], ..Default::default() } .cell() diff --git a/crates/turbopack-core/src/resolve/options.rs b/crates/turbopack-core/src/resolve/options.rs index a9b10b45181d0..f817561681c53 100644 --- a/crates/turbopack-core/src/resolve/options.rs +++ b/crates/turbopack-core/src/resolve/options.rs @@ -34,7 +34,7 @@ pub enum ResolveModules { Registry(FileSystemPathVc, LockedVersionsVc), } -#[derive(TraceRawVcs, Hash, PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] +#[derive(TraceRawVcs, Hash, PartialEq, Eq, Clone, Copy, Debug, Serialize, Deserialize)] pub enum ConditionValue { Set, Unset, @@ -51,6 +51,8 @@ impl From for ConditionValue { } } +pub type ResolutionConditions = BTreeMap; + /// The different ways to resolve a package, as described in package.json. #[derive(TraceRawVcs, Hash, PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] pub enum ResolveIntoPackage { @@ -58,8 +60,7 @@ pub enum ResolveIntoPackage { /// /// [exports]: https://nodejs.org/api/packages.html#exports ExportsField { - field: String, - conditions: BTreeMap, + conditions: ResolutionConditions, unspecified_conditions: ConditionValue, }, /// Using a [main]-like field (e.g. [main], [module], [browser], etc.). @@ -77,6 +78,13 @@ pub enum ResolveIntoPackage { pub enum ResolveInPackage { /// Using a alias field which allows to map requests AliasField(String), + /// Using the [imports] field. + /// + /// [imports]: https://nodejs.org/api/packages.html#imports + ImportsField { + conditions: ResolutionConditions, + unspecified_conditions: ConditionValue, + }, } #[turbo_tasks::value(shared)] diff --git a/crates/turbopack-core/src/resolve/exports.rs b/crates/turbopack-core/src/resolve/remap.rs similarity index 64% rename from crates/turbopack-core/src/resolve/exports.rs rename to crates/turbopack-core/src/resolve/remap.rs index 2136ba8784dff..484dabebd1648 100644 --- a/crates/turbopack-core/src/resolve/exports.rs +++ b/crates/turbopack-core/src/resolve/remap.rs @@ -1,54 +1,91 @@ -use std::collections::{BTreeMap, HashMap}; +use std::{ + collections::{BTreeMap, HashMap}, + fmt::Display, + ops::Deref, +}; -use anyhow::{anyhow, bail, Result}; +use anyhow::{bail, Result}; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use serde_json::Value; use super::{ - alias_map::{AliasMap, AliasMapIter, AliasMapLookupIterator, AliasPattern, AliasTemplate}, + alias_map::{AliasMap, AliasMapIter, AliasPattern, AliasTemplate}, options::ConditionValue, }; -/// The result an "exports" field describes. Can represent multiple +/// A small helper type to differentiate parsing exports and imports fields. +#[derive(Copy, Clone)] +enum ExportImport { + Export, + Import, +} + +impl Display for ExportImport { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Export => f.write_str("export"), + Self::Import => f.write_str("import"), + } + } +} + +/// The result an "exports"/"imports" field describes. Can represent multiple /// alternatives, conditional result, ignored result (null mapping) and a plain /// result. #[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] -pub enum ExportsValue { - Alternatives(Vec), - Conditional(Vec<(String, ExportsValue)>), +pub enum SubpathValue { + /// Alternative subpaths, defined with `"path": ["other1", "other2"]`, + /// allows for specifying multiple possible remappings to be tried. This + /// may be that conditions didn't match, or that a particular path + /// wasn't found. + Alternatives(Vec), + + /// Conditional subpaths, defined with `"path": { "condition": "other"}`, + /// allow remapping based on certain predefined conditions. Eg, if using + /// ESM import syntax, the `import` condition allows you to remap to a + /// file that uses ESM syntax. + /// Node defines several conditions in https://nodejs.org/api/packages.html#conditional-exports + /// TODO: Should this use an enum of predefined keys? + Conditional(Vec<(String, SubpathValue)>), + + /// A result subpath, defined with `"path": "other"`, remaps imports of + /// `path` to `other`. Result(String), + + /// An excluded subpath, defined with `"path": null`, prevents importing + /// this subpath. Excluded, } -impl AliasTemplate for ExportsValue { +impl AliasTemplate for SubpathValue { type Output<'a> = Result where Self: 'a; fn replace(&self, capture: &str) -> Result { Ok(match self { - ExportsValue::Alternatives(list) => ExportsValue::Alternatives( + SubpathValue::Alternatives(list) => SubpathValue::Alternatives( list.iter() .map(|value| value.replace(capture)) .collect::>>()?, ), - ExportsValue::Conditional(list) => ExportsValue::Conditional( + SubpathValue::Conditional(list) => SubpathValue::Conditional( list.iter() .map(|(condition, value)| Ok((condition.clone(), value.replace(capture)?))) .collect::>>()?, ), - ExportsValue::Result(value) => ExportsValue::Result(value.replace('*', capture)), - ExportsValue::Excluded => ExportsValue::Excluded, + SubpathValue::Result(value) => SubpathValue::Result(value.replace('*', capture)), + SubpathValue::Excluded => SubpathValue::Excluded, }) } } -impl ExportsValue { +impl SubpathValue { /// Returns an iterator over all leaf results. fn results_mut(&mut self) -> ResultsIterMut<'_> { ResultsIterMut { stack: vec![self] } } - /// Walks the [ExportsValue] and adds results to the `target` vector. It + /// Walks the [SubpathValue] and adds results to the `target` vector. It /// uses the `conditions` to skip or enter conditional results. /// The state of conditions is stored within `condition_overrides`, which is /// also exposed to the consumer. @@ -60,7 +97,7 @@ impl ExportsValue { target: &mut Vec<&'a str>, ) -> bool { match self { - ExportsValue::Alternatives(list) => { + SubpathValue::Alternatives(list) => { for value in list { if value.add_results( conditions, @@ -73,7 +110,7 @@ impl ExportsValue { } false } - ExportsValue::Conditional(list) => { + SubpathValue::Conditional(list) => { for (condition, value) in list { let condition_value = if condition == "default" { &ConditionValue::Set @@ -112,17 +149,48 @@ impl ExportsValue { } false } - ExportsValue::Result(r) => { + SubpathValue::Result(r) => { target.push(r); true } - ExportsValue::Excluded => true, + SubpathValue::Excluded => true, + } + } + + fn try_new(value: &Value, ty: ExportImport) -> Result { + match value { + Value::Null => Ok(SubpathValue::Excluded), + Value::String(s) => Ok(SubpathValue::Result(s.to_string())), + Value::Number(_) => bail!("numeric values are invalid in {ty}s field entries"), + Value::Bool(_) => bail!("boolean values are invalid in {ty}s field entries"), + Value::Object(object) => Ok(SubpathValue::Conditional( + object + .iter() + .map(|(key, value)| { + if key.starts_with('.') { + bail!( + "invalid key \"{}\" in an {ty} field conditions object. Did you \ + mean to place this request at a higher level?", + key + ); + } + + Ok((key.to_string(), SubpathValue::try_new(value, ty)?)) + }) + .collect::>>()?, + )), + Value::Array(array) => Ok(SubpathValue::Alternatives( + array + .iter() + .map(|value| SubpathValue::try_new(value, ty)) + .collect::>>()?, + )), } } } struct ResultsIterMut<'a> { - stack: Vec<&'a mut ExportsValue>, + stack: Vec<&'a mut SubpathValue>, } impl<'a> Iterator for ResultsIterMut<'a> { @@ -131,65 +199,27 @@ impl<'a> Iterator for ResultsIterMut<'a> { fn next(&mut self) -> Option { while let Some(value) = self.stack.pop() { match value { - ExportsValue::Alternatives(list) => { + SubpathValue::Alternatives(list) => { for value in list { self.stack.push(value); } } - ExportsValue::Conditional(list) => { + SubpathValue::Conditional(list) => { for (_, value) in list { self.stack.push(value); } } - ExportsValue::Result(r) => return Some(r), - ExportsValue::Excluded => {} + SubpathValue::Result(r) => return Some(r), + SubpathValue::Excluded => {} } } None } } -impl TryFrom<&Value> for ExportsValue { - type Error = anyhow::Error; - - fn try_from(value: &Value) -> Result { - match value { - Value::Null => Ok(ExportsValue::Excluded), - Value::String(s) => Ok(ExportsValue::Result(s.to_string())), - Value::Number(_) => Err(anyhow!( - "numeric values are invalid in exports field entries" - )), - Value::Bool(_) => Err(anyhow!( - "boolean values are invalid in exports field entries" - )), - Value::Object(object) => Ok(ExportsValue::Conditional( - object - .iter() - .map(|(key, value)| { - if key.starts_with('.') { - bail!( - "invalid key \"{}\" in an export field conditions object. Did you \ - mean to place this request at a higher level?", - key - ); - } - Ok((key.to_string(), value.try_into()?)) - }) - .collect::>>()?, - )), - Value::Array(array) => Ok(ExportsValue::Alternatives( - array - .iter() - .map(|value| value.try_into()) - .collect::>>()?, - )), - } - } -} - /// Content of an "exports" field in a package.json #[derive(PartialEq, Eq, Serialize, Deserialize)] -pub struct ExportsField(AliasMap); +pub struct ExportsField(AliasMap); impl TryFrom<&Value> for ExportsField { type Error = anyhow::Error; @@ -212,7 +242,7 @@ impl TryFrom<&Value> for ExportsField { continue; } - let mut value: ExportsValue = value.try_into()?; + let mut value = SubpathValue::try_new(value, ExportImport::Export)?; let pattern = if is_folder_shorthand(key) { expand_folder_shorthand(key, &mut value)? @@ -226,10 +256,15 @@ impl TryFrom<&Value> for ExportsField { if !conditions.is_empty() { map.insert( AliasPattern::Exact(".".to_string()), - ExportsValue::Conditional( + SubpathValue::Conditional( conditions .into_iter() - .map(|(key, value)| Ok((key.to_string(), value.try_into()?))) + .map(|(key, value)| { + Ok(( + key.to_string(), + SubpathValue::try_new(value, ExportImport::Export)?, + )) + }) .collect::>>()?, ), ); @@ -241,7 +276,7 @@ impl TryFrom<&Value> for ExportsField { let mut map = AliasMap::new(); map.insert( AliasPattern::exact("."), - ExportsValue::Result(string.to_string()), + SubpathValue::Result(string.to_string()), ); map } @@ -252,10 +287,10 @@ impl TryFrom<&Value> for ExportsField { // This allows for more complex patterns than the spec allows, since we accept // the following: // [{ "node": "./node.js", "default": "./index.js" }, "./index.js"] - ExportsValue::Alternatives( + SubpathValue::Alternatives( array .iter() - .map(|value| value.try_into()) + .map(|value| SubpathValue::try_new(value, ExportImport::Export)) .collect::>>()?, ), ); @@ -269,6 +304,50 @@ impl TryFrom<&Value> for ExportsField { } } +impl Deref for ExportsField { + type Target = AliasMap; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// Content of an "imports" field in a package.json +#[derive(PartialEq, Eq, Serialize, Deserialize)] +pub struct ImportsField(AliasMap); + +impl TryFrom<&Value> for ImportsField { + type Error = anyhow::Error; + + fn try_from(value: &Value) -> Result { + // The "imports" field must be an object. + // https://nodejs.org/api/packages.html#imports + let map = match value { + Value::Object(object) => { + let mut map = AliasMap::new(); + + for (key, value) in object.iter() { + if !key.starts_with('#') { + bail!("imports key \"{key}\" must begin with a '#'") + } + let value = SubpathValue::try_new(value, ExportImport::Import)?; + map.insert(AliasPattern::parse(key), value); + } + + map + } + _ => bail!("\"imports\" field must be an object"), + }; + Ok(Self(map)) + } +} + +impl Deref for ImportsField { + type Target = AliasMap; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + /// Returns true if the given string is a folder path shorthand. fn is_folder_shorthand(key: &str) -> bool { key.ends_with('/') && key.find('*').is_none() @@ -281,7 +360,7 @@ fn is_folder_shorthand(key: &str) -> bool { /// This is not implemented directly by [`AliasMap`] as it is not /// shared behavior with the tsconfig.json `paths` field. Instead, /// we do the expansion here. -fn expand_folder_shorthand(key: &str, value: &mut ExportsValue) -> Result { +fn expand_folder_shorthand(key: &str, value: &mut SubpathValue) -> Result { // Transform folder patterns into wildcard patterns. let pattern = AliasPattern::wildcard(key, ""); @@ -311,19 +390,10 @@ fn expand_folder_shorthand(key: &str, value: &mut ExportsValue) -> Result(&'a self, request: &'a str) -> AliasMapLookupIterator<'a, ExportsValue> { - self.0.lookup(request) - } -} - /// Content of an "alias" configuration #[turbo_tasks::value(shared)] #[derive(Default)] -pub struct ResolveAliasMap(#[turbo_tasks(trace_ignore)] AliasMap); +pub struct ResolveAliasMap(#[turbo_tasks(trace_ignore)] AliasMap); impl TryFrom<&IndexMap> for ResolveAliasMap { type Error = anyhow::Error; @@ -332,7 +402,7 @@ impl TryFrom<&IndexMap> for ResolveAliasMap { let mut map = AliasMap::new(); for (key, value) in object.iter() { - let mut value: ExportsValue = value.try_into()?; + let mut value = SubpathValue::try_new(value, ExportImport::Export)?; let pattern = if is_folder_shorthand(key) { expand_folder_shorthand(key, &mut value)? @@ -347,8 +417,8 @@ impl TryFrom<&IndexMap> for ResolveAliasMap { } impl<'a> IntoIterator for &'a ResolveAliasMap { - type Item = (AliasPattern, &'a ExportsValue); - type IntoIter = AliasMapIter<'a, ExportsValue>; + type Item = (AliasPattern, &'a SubpathValue); + type IntoIter = AliasMapIter<'a, SubpathValue>; fn into_iter(self) -> Self::IntoIter { (&self.0).into_iter() diff --git a/crates/turbopack-ecmascript/src/resolve/mod.rs b/crates/turbopack-ecmascript/src/resolve/mod.rs index 142b4fe78b802..f7ec1b62da843 100644 --- a/crates/turbopack-ecmascript/src/resolve/mod.rs +++ b/crates/turbopack-ecmascript/src/resolve/mod.rs @@ -11,24 +11,41 @@ use turbopack_core::{ }, resolve::{ handle_resolve_error, - options::{ConditionValue, ResolveIntoPackage, ResolveOptions, ResolveOptionsVc}, + options::{ + ConditionValue, ResolutionConditions, ResolveInPackage, ResolveIntoPackage, + ResolveOptions, ResolveOptionsVc, + }, origin::{ResolveOrigin, ResolveOriginVc}, parse::RequestVc, resolve, ResolveResultVc, }, }; +/// Retrieves the [ResolutionConditions] of both the "into" package (allowing a +/// package to control how it can be imported) and the "in" package (controlling +/// how this package imports others) resolution options, so that they can be +/// manipulated together. +fn get_condition_maps(options: &mut ResolveOptions) -> Vec<&mut ResolutionConditions> { + let mut condition_maps = Vec::with_capacity(2); + for item in options.into_package.iter_mut() { + if let ResolveIntoPackage::ExportsField { conditions, .. } = item { + condition_maps.push(conditions); + } + } + for item in options.in_package.iter_mut() { + if let ResolveInPackage::ImportsField { conditions, .. } = item { + condition_maps.push(conditions); + } + } + condition_maps +} + #[turbo_tasks::function] pub async fn apply_esm_specific_options(options: ResolveOptionsVc) -> Result { let mut options: ResolveOptions = options.await?.clone_value(); - for item in options.into_package.iter_mut() { - match item { - ResolveIntoPackage::ExportsField { conditions, .. } => { - conditions.insert("import".to_string(), ConditionValue::Set); - conditions.insert("require".to_string(), ConditionValue::Unset); - } - ResolveIntoPackage::MainField(_) | ResolveIntoPackage::Default(_) => {} - } + for conditions in get_condition_maps(&mut options) { + conditions.insert("import".to_string(), ConditionValue::Set); + conditions.insert("require".to_string(), ConditionValue::Unset); } Ok(options.into()) } @@ -36,14 +53,9 @@ pub async fn apply_esm_specific_options(options: ResolveOptionsVc) -> Result Result { let mut options: ResolveOptions = options.await?.clone_value(); - for item in options.into_package.iter_mut() { - match item { - ResolveIntoPackage::ExportsField { conditions, .. } => { - conditions.insert("import".to_string(), ConditionValue::Unset); - conditions.insert("require".to_string(), ConditionValue::Set); - } - ResolveIntoPackage::MainField(_) | ResolveIntoPackage::Default(_) => {} - } + for conditions in get_condition_maps(&mut options) { + conditions.insert("import".to_string(), ConditionValue::Unset); + conditions.insert("require".to_string(), ConditionValue::Set); } Ok(options.into()) } diff --git a/crates/turbopack-ecmascript/src/typescript/resolve.rs b/crates/turbopack-ecmascript/src/typescript/resolve.rs index ec3ac3bd1dd9a..9731b76a5808a 100644 --- a/crates/turbopack-ecmascript/src/typescript/resolve.rs +++ b/crates/turbopack-ecmascript/src/typescript/resolve.rs @@ -20,8 +20,8 @@ use turbopack_core::{ handle_resolve_error, node::node_cjs_resolve_options, options::{ - ConditionValue, ImportMap, ImportMapVc, ImportMapping, ResolveIntoPackage, - ResolveModules, ResolveOptionsVc, + ConditionValue, ImportMap, ImportMapVc, ImportMapping, ResolveInPackage, + ResolveIntoPackage, ResolveModules, ResolveOptionsVc, }, origin::{ResolveOrigin, ResolveOriginVc}, parse::{Request, RequestVc}, @@ -417,14 +417,12 @@ async fn apply_typescript_types_options( .drain(..) .filter_map(|into| { if let ResolveIntoPackage::ExportsField { - field, mut conditions, unspecified_conditions, } = into { conditions.insert("types".to_string(), ConditionValue::Set); Some(ResolveIntoPackage::ExportsField { - field, conditions, unspecified_conditions, }) @@ -439,6 +437,14 @@ async fn apply_typescript_types_options( resolve_options .into_package .push(ResolveIntoPackage::Default("index".to_string())); + for item in resolve_options.in_package.iter_mut() { + match item { + ResolveInPackage::ImportsField { conditions, .. } => { + conditions.insert("types".to_string(), ConditionValue::Set); + } + _ => {} + } + } Ok(resolve_options.into()) } diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/index.js b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/index.js new file mode 100644 index 0000000000000..f1e020421cbfe --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/index.js @@ -0,0 +1 @@ +export default "dep"; diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/package.json b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/package.json new file mode 100644 index 0000000000000..d814501e55981 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/package.json @@ -0,0 +1,3 @@ +{ + "name": "dep" +} diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/foo.js b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/foo.js new file mode 100644 index 0000000000000..60c6c8d8b04f9 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/foo.js @@ -0,0 +1 @@ +export default "foo"; diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/import.mjs b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/import.mjs new file mode 100644 index 0000000000000..7d3341883b8e4 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/import.mjs @@ -0,0 +1 @@ +export default "import"; diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/index.js b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/index.js new file mode 100644 index 0000000000000..accb6aeaa8182 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/index.js @@ -0,0 +1,7 @@ +import foo from "#foo"; +import dep from "#dep"; +import pattern from "#pattern/pat.js"; +import conditionalImport from "#conditional"; +const conditionalRequire = require("#conditional"); + +console.log(foo, dep, pattern, conditionalImport, conditionalRequire); diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/package.json b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/package.json new file mode 100644 index 0000000000000..2790b239a509c --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/package.json @@ -0,0 +1,15 @@ +{ + "name": "subpath-imports", + "imports": { + "#foo": "./foo.js", + "#dep": "dep", + "#conditional": { + "import": "./import.mjs", + "require": "./require.cjs" + }, + "#pattern/*.js": "./*.js" + }, + "dependencies": { + "dep": "./dep" + } +} diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/pat.js b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/pat.js new file mode 100644 index 0000000000000..6ffed9800b7b9 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/pat.js @@ -0,0 +1 @@ +export default "pat"; diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/require.cjs b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/require.cjs new file mode 100644 index 0000000000000..e7da5bd67c152 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/require.cjs @@ -0,0 +1 @@ +module.exports = "require"; diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_31b659.js b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_31b659.js new file mode 100644 index 0000000000000..53fe46a525bae --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_31b659.js @@ -0,0 +1,1738 @@ +(globalThis.TURBOPACK = globalThis.TURBOPACK || []).push([ + "output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_31b659.js", + {}, + {"otherChunks":[{"path":"output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js","included":["[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/index.js (ecmascript)"]}],"runtimeModuleIds":["[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/index.js (ecmascript)"]} +]); +(() => { +if (!Array.isArray(globalThis.TURBOPACK)) { + return; +} +/* eslint-disable @next/next/no-assign-module-variable */ + +/** @typedef {import('../types').ChunkRegistration} ChunkRegistration */ +/** @typedef {import('../types').ModuleFactory} ModuleFactory */ + +/** @typedef {import('../types').ChunkPath} ChunkPath */ +/** @typedef {import('../types').ModuleId} ModuleId */ +/** @typedef {import('../types').GetFirstModuleChunk} GetFirstModuleChunk */ +/** @typedef {import('../types').ChunkList} ChunkList */ + +/** @typedef {import('../types').Module} Module */ +/** @typedef {import('../types').ChunkData} ChunkData */ +/** @typedef {import('../types').SourceInfo} SourceInfo */ +/** @typedef {import('../types').SourceType} SourceType */ +/** @typedef {import('../types').SourceType.Runtime} SourceTypeRuntime */ +/** @typedef {import('../types').SourceType.Parent} SourceTypeParent */ +/** @typedef {import('../types').SourceType.Update} SourceTypeUpdate */ +/** @typedef {import('../types').Exports} Exports */ +/** @typedef {import('../types').EsmNamespaceObject} EsmNamespaceObject */ +/** @typedef {import('../types').RequireContext} RequireContext */ +/** @typedef {import('../types').RequireContextMap} RequireContextMap */ + +/** @typedef {import('../types').RefreshHelpers} RefreshHelpers */ +/** @typedef {import('../types').RefreshContext} RefreshContext */ +/** @typedef {import('../types/hot').Hot} Hot */ +/** @typedef {import('../types/hot').HotData} HotData */ +/** @typedef {import('../types/hot').AcceptCallback} AcceptCallback */ +/** @typedef {import('../types/hot').AcceptErrorHandler} AcceptErrorHandler */ +/** @typedef {import('../types/hot').HotState} HotState */ +/** @typedef {import('../types/protocol').PartialUpdate} PartialUpdate */ +/** @typedef {import('../types/protocol').ChunkListUpdate} ChunkListUpdate */ +/** @typedef {import('../types/protocol').EcmascriptMergedUpdate} EcmascriptMergedUpdate */ +/** @typedef {import('../types/protocol').EcmascriptMergedChunkUpdate} EcmascriptMergedChunkUpdate */ +/** @typedef {import('../types/protocol').EcmascriptModuleEntry} EcmascriptModuleEntry */ + +/** @typedef {import('../types/runtime').ModuleEffect} ModuleEffect */ + +/** @type {Object.} */ +const moduleFactories = { __proto__: null }; +/** @type {Object.} */ +const moduleCache = { __proto__: null }; +/** + * Maps module IDs to persisted data between executions of their hot module + * implementation (`hot.data`). + * + * @type {Map} + */ +const moduleHotData = new Map(); +/** + * Maps module instances to their hot module state. + * + * @type {Map} + */ +const moduleHotState = new Map(); +/** + * Module IDs that are instantiated as part of the runtime of a chunk. + * + * @type {Set} + */ +const runtimeModules = new Set(); +/** + * Map from module ID to the chunks that contain this module. + * + * In HMR, we need to keep track of which modules are contained in which so + * chunks. This is so we don't eagerly dispose of a module when it is removed + * from chunk A, but still exists in chunk B. + * + * @type {Map>} + */ +const moduleChunksMap = new Map(); +/** + * Map from chunk path to all modules it contains. + * @type {Map>} + */ +const chunkModulesMap = new Map(); +/** + * Chunk lists that contain a runtime. When these chunk lists receive an update + * that can't be reconciled with the current state of the page, we need to + * reload the runtime entirely. + * @type {Set} + */ +const runtimeChunkLists = new Set(); +/** + * Map from chunk list to the chunk paths it contains. + * @type {Map>} + */ +const chunkListChunksMap = new Map(); +/** + * Map from chunk path to the chunk lists it belongs to. + * @type {Map>} + */ +const chunkChunkListsMap = new Map(); + +const hOP = Object.prototype.hasOwnProperty; + +const toStringTag = typeof Symbol !== "undefined" && Symbol.toStringTag; + +/** + * @param {any} obj + * @param {PropertyKey} name + * @param {PropertyDescriptor & ThisType} options + */ +function defineProp(obj, name, options) { + if (!hOP.call(obj, name)) Object.defineProperty(obj, name, options); +} + +/** + * Adds the getters to the exports object + * + * @param {Exports} exports + * @param {Record any>} getters + */ +function esm(exports, getters) { + defineProp(exports, "__esModule", { value: true }); + if (toStringTag) defineProp(exports, toStringTag, { value: "Module" }); + for (const key in getters) { + defineProp(exports, key, { get: getters[key], enumerable: true }); + } +} + +/** + * Makes the module an ESM with exports + * + * @param {Module} module + * @param {Record any>} getters + */ +function makeEsm(module, getters) { + esm((module.namespaceObject = module.exports), getters); +} + +/** + * Adds the getters to the exports object + * + * @param {Exports} exports + * @param {Record} props + */ +function cjs(exports, props) { + for (const key in props) { + defineProp(exports, key, { get: () => props[key], enumerable: true }); + } +} + +/** + * @param {Module} module + * @param {any} value + */ +function exportValue(module, value) { + module.exports = value; +} + +/** + * @param {Module} module + * @param {any} namespace + */ +function exportNamespace(module, namespace) { + module.exports = module.namespaceObject = namespace; +} + +/** + * @param {Record} obj + * @param {string} key + */ +function createGetter(obj, key) { + return () => obj[key]; +} + +/** + * @param {any} obj + * @returns {any} prototype of the object + */ +const getProto = Object.getPrototypeOf + ? (obj) => Object.getPrototypeOf(obj) + : (obj) => obj.__proto__; + +/** Prototypes that are not expanded for exports */ +const LEAF_PROTOTYPES = [null, getProto({}), getProto([]), getProto(getProto)]; + +/** + * @param {Exports} raw + * @param {EsmNamespaceObject} ns + * @param {boolean} [allowExportDefault] false: will have the raw module as default export, true: will have the default property as default export + */ +function interopEsm(raw, ns, allowExportDefault) { + /** @type {Object. any>} */ + const getters = { __proto__: null }; + for ( + let current = raw; + (typeof current === "object" || typeof current === "function") && + !LEAF_PROTOTYPES.includes(current); + current = getProto(current) + ) { + for (const key of Object.getOwnPropertyNames(current)) { + getters[key] = createGetter(raw, key); + } + } + if (!(allowExportDefault && "default" in getters)) { + getters["default"] = () => raw; + } + esm(ns, getters); +} + +/** + * @param {Module} sourceModule + * @param {ModuleId} id + * @returns {EsmNamespaceObject} + */ +function esmImport(sourceModule, id) { + const module = getOrInstantiateModuleFromParent(id, sourceModule); + if (module.error) throw module.error; + if (module.namespaceObject) return module.namespaceObject; + const raw = module.exports; + const ns = (module.namespaceObject = {}); + interopEsm(raw, ns, raw.__esModule); + return ns; +} + +/** + * @param {Module} sourceModule + * @param {ModuleId} id + * @returns {Exports} + */ +function commonJsRequire(sourceModule, id) { + const module = getOrInstantiateModuleFromParent(id, sourceModule); + if (module.error) throw module.error; + return module.exports; +} + +/** + * @param {Module} sourceModule + * @param {RequireContextMap} map + * @returns {RequireContext} + */ +function requireContext(sourceModule, map) { + /** + * @param {ModuleId} id + * @returns {Exports} + */ + function requireContext(id) { + const entry = map[id]; + + if (!entry) { + throw new Error( + `module ${id} is required from a require.context, but is not in the context` + ); + } + + return entry.internal + ? commonJsRequire(sourceModule, entry.id()) + : externalRequire(entry.id(), false); + } + + /** + * @returns {ModuleId[]} + */ + requireContext.keys = () => { + return Object.keys(map); + }; + + /** + * @param {ModuleId} id + * @returns {ModuleId} + */ + requireContext.resolve = (id) => { + const entry = map[id]; + + if (!entry) { + throw new Error( + `module ${id} is resolved from a require.context, but is not in the context` + ); + } + + return entry.id(); + }; + + return requireContext; +} + +/** + * @param {ModuleId} id + * @param {boolean} esm + * @returns {Exports | EsmNamespaceObject} + */ +function externalRequire(id, esm) { + let raw; + try { + raw = require(id); + } catch (err) { + // TODO(alexkirsz) This can happen when a client-side module tries to load + // an external module we don't provide a shim for (e.g. querystring, url). + // For now, we fail semi-silently, but in the future this should be a + // compilation error. + throw new Error(`Failed to load external module ${id}: ${err}`); + } + if (!esm) { + return raw; + } + const ns = {}; + interopEsm(raw, ns, raw.__esModule); + return ns; +} +externalRequire.resolve = (name, opt) => { + return require.resolve(name, opt); +}; + +/** @type {Map | true>} */ +const availableModules = new Map(); + +/** @type {Map | true>} */ +const availableModuleChunks = new Map(); + +/** + * @param {SourceInfo} source + * @param {ChunkData} chunkData + * @returns {Promise} + */ +async function loadChunk(source, chunkData) { + if (typeof chunkData === "string") { + return loadChunkPath(source, chunkData); + } + + const includedList = chunkData.included || []; + const modulesPromises = includedList.map((included) => { + if (moduleFactories[included]) return true; + return availableModules.get(included); + }); + if (modulesPromises.length > 0 && modulesPromises.every((p) => p)) { + // When all included items are already loaded or loading, we can skip loading ourselves + return Promise.all(modulesPromises); + } + + const includedModuleChunksList = chunkData.moduleChunks || []; + const moduleChunksPromises = includedModuleChunksList + .map((included) => { + // TODO(alexkirsz) Do we need this check? + // if (moduleFactories[included]) return true; + return availableModuleChunks.get(included); + }) + .filter((p) => p); + + let promise; + if (moduleChunksPromises.length > 0) { + // Some module chunks are already loaded or loading. + + if (moduleChunksPromises.length == includedModuleChunksList.length) { + // When all included module chunks are already loaded or loading, we can skip loading ourselves + return Promise.all(moduleChunksPromises); + } + + const moduleChunksToLoad = new Set(); + for (const moduleChunk of includedModuleChunksList) { + if (!availableModuleChunks.has(moduleChunk)) { + moduleChunksToLoad.add(moduleChunk); + } + } + + for (const moduleChunkToLoad of moduleChunksToLoad) { + const promise = loadChunkPath(source, moduleChunkToLoad); + + availableModuleChunks.set(moduleChunkToLoad, promise); + + moduleChunksPromises.push(promise); + } + + promise = Promise.all(moduleChunksPromises); + } else { + promise = loadChunkPath(source, chunkData.path); + + // Mark all included module chunks as loading if they are not already loaded or loading. + for (const includedModuleChunk of includedModuleChunksList) { + if (!availableModuleChunks.has(includedModuleChunk)) { + availableModuleChunks.set(includedModuleChunk, promise); + } + } + } + + for (const included of includedList) { + if (!availableModules.has(included)) { + // It might be better to race old and new promises, but it's rare that the new promise will be faster than a request started earlier. + // In production it's even more rare, because the chunk optimization tries to deduplicate modules anyway. + availableModules.set(included, promise); + } + } + + return promise; +} + +/** + * @param {SourceInfo} source + * @param {ChunkPath} chunkPath + * @returns {Promise} + */ +async function loadChunkPath(source, chunkPath) { + try { + await BACKEND.loadChunk(chunkPath, source); + } catch (error) { + let loadReason; + switch (source.type) { + case SourceTypeRuntime: + loadReason = `as a runtime dependency of chunk ${source.chunkPath}`; + break; + case SourceTypeParent: + loadReason = `from module ${source.parentId}`; + break; + case SourceTypeUpdate: + loadReason = "from an HMR update"; + break; + } + throw new Error( + `Failed to load chunk ${chunkPath} ${loadReason}${ + error ? `: ${error}` : "" + }` + ); + } +} + +/** @type {SourceTypeRuntime} */ +const SourceTypeRuntime = 0; +/** @type {SourceTypeParent} */ +const SourceTypeParent = 1; +/** @type {SourceTypeUpdate} */ +const SourceTypeUpdate = 2; + +/** + * + * @param {ModuleId} id + * @param {SourceInfo} source + * @returns {Module} + */ +function instantiateModule(id, source) { + /** @type {ModuleFactory} */ + const moduleFactory = moduleFactories[id]; + if (typeof moduleFactory !== "function") { + // This can happen if modules incorrectly handle HMR disposes/updates, + // e.g. when they keep a `setTimeout` around which still executes old code + // and contains e.g. a `require("something")` call. + let instantiationReason; + switch (source.type) { + case SourceTypeRuntime: + instantiationReason = `as a runtime entry of chunk ${source.chunkPath}`; + break; + case SourceTypeParent: + instantiationReason = `because it was required from module ${source.parentId}`; + break; + case SourceTypeUpdate: + instantiationReason = "because of an HMR update"; + break; + } + throw new Error( + `Module ${id} was instantiated ${instantiationReason}, but the module factory is not available. It might have been deleted in an HMR update.` + ); + } + + const hotData = moduleHotData.get(id); + const { hot, hotState } = createModuleHot(hotData); + + /** @type {Module} */ + const module = { + exports: {}, + error: undefined, + loaded: false, + id, + parents: undefined, + children: [], + namespaceObject: undefined, + hot, + }; + moduleCache[id] = module; + moduleHotState.set(module, hotState); + + switch (source.type) { + case SourceTypeRuntime: + runtimeModules.add(id); + module.parents = []; + break; + case SourceTypeParent: + // No need to add this module as a child of the parent module here, this + // has already been taken care of in `getOrInstantiateModuleFromParent`. + module.parents = [source.parentId]; + break; + case SourceTypeUpdate: + module.parents = source.parents || []; + break; + } + + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { + moduleFactory.call(module.exports, { + e: module.exports, + r: commonJsRequire.bind(null, module), + x: externalRequire, + f: requireContext.bind(null, module), + i: esmImport.bind(null, module), + s: makeEsm.bind(null, module), + j: cjs.bind(null, module.exports), + v: exportValue.bind(null, module), + n: exportNamespace.bind(null, module), + m: module, + c: moduleCache, + l: loadChunk.bind(null, { type: SourceTypeParent, parentId: id }), + g: globalThis, + k: refresh, + __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), + }); + }); + } catch (error) { + module.error = error; + throw error; + } + + module.loaded = true; + if (module.namespaceObject && module.exports !== module.namespaceObject) { + // in case of a circular dependency: cjs1 -> esm2 -> cjs1 + interopEsm(module.exports, module.namespaceObject); + } + + return module; +} + +/** + * NOTE(alexkirsz) Webpack has an "module execution" interception hook that + * Next.js' React Refresh runtime hooks into to add module context to the + * refresh registry. + * + * @param {Module} module + * @param {(ctx: RefreshContext) => void} executeModule + */ +function runModuleExecutionHooks(module, executeModule) { + const cleanupReactRefreshIntercept = + typeof globalThis.$RefreshInterceptModuleExecution$ === "function" + ? globalThis.$RefreshInterceptModuleExecution$(module.id) + : () => {}; + + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); + + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); + } +} + +/** + * Retrieves a module from the cache, or instantiate it if it is not cached. + * + * @param {ModuleId} id + * @param {Module} sourceModule + * @returns {Module} + */ +function getOrInstantiateModuleFromParent(id, sourceModule) { + if (!sourceModule.hot.active) { + console.warn( + `Unexpected import of module ${id} from module ${sourceModule.id}, which was deleted by an HMR update` + ); + } + + const module = moduleCache[id]; + + if (sourceModule.children.indexOf(id) === -1) { + sourceModule.children.push(id); + } + + if (module) { + if (module.parents.indexOf(sourceModule.id) === -1) { + module.parents.push(sourceModule.id); + } + + return module; + } + + return instantiateModule(id, { + type: SourceTypeParent, + parentId: sourceModule.id, + }); +} + +/** + * This is adapted from https://github.com/vercel/next.js/blob/3466862d9dc9c8bb3131712134d38757b918d1c0/packages/react-refresh-utils/internal/ReactRefreshModule.runtime.ts + * + * @param {Module} module + * @param {RefreshHelpers} helpers + */ +function registerExportsAndSetupBoundaryForReactRefresh(module, helpers) { + const currentExports = module.exports; + const prevExports = module.hot.data.prevExports ?? null; + + helpers.registerExportsForReactRefresh(currentExports, module.id); + + // A module can be accepted automatically based on its exports, e.g. when + // it is a Refresh Boundary. + if (helpers.isReactRefreshBoundary(currentExports)) { + // Save the previous exports on update so we can compare the boundary + // signatures. + module.hot.dispose((data) => { + data.prevExports = currentExports; + }); + // Unconditionally accept an update to this module, we'll check if it's + // still a Refresh Boundary later. + module.hot.accept(); + + // This field is set when the previous version of this module was a + // Refresh Boundary, letting us know we need to check for invalidation or + // enqueue an update. + if (prevExports !== null) { + // A boundary can become ineligible if its exports are incompatible + // with the previous exports. + // + // For example, if you add/remove/change exports, we'll want to + // re-execute the importing modules, and force those components to + // re-render. Similarly, if you convert a class component to a + // function, we want to invalidate the boundary. + if ( + helpers.shouldInvalidateReactRefreshBoundary( + prevExports, + currentExports + ) + ) { + module.hot.invalidate(); + } else { + helpers.scheduleUpdate(); + } + } + } else { + // Since we just executed the code for the module, it's possible that the + // new exports made it ineligible for being a boundary. + // We only care about the case when we were _previously_ a boundary, + // because we already accepted this update (accidental side effect). + const isNoLongerABoundary = prevExports !== null; + if (isNoLongerABoundary) { + module.hot.invalidate(); + } + } +} + +/** + * @param {ModuleId[]} dependencyChain + * @returns {string} + */ +function formatDependencyChain(dependencyChain) { + return `Dependency chain: ${dependencyChain.join(" -> ")}`; +} + +/** + * @param {EcmascriptModuleEntry} entry + * @returns {ModuleFactory} + * @private + */ +function _eval({ code, url, map }) { + code += `\n\n//# sourceURL=${location.origin}${url}`; + if (map) code += `\n//# sourceMappingURL=${map}`; + return eval(code); +} + +/** + * @param {Map} added + * @param {Map} modified + * @returns {{outdatedModules: Set, newModuleFactories: Map}} + */ +function computeOutdatedModules(added, modified) { + const outdatedModules = new Set(); + const newModuleFactories = new Map(); + + for (const [moduleId, entry] of added) { + if (entry != null) { + newModuleFactories.set(moduleId, _eval(entry)); + } + } + + for (const [moduleId, entry] of modified) { + const effect = getAffectedModuleEffects(moduleId); + + switch (effect.type) { + case "unaccepted": + throw new Error( + `cannot apply update: unaccepted module. ${formatDependencyChain( + effect.dependencyChain + )}.` + ); + case "self-declined": + throw new Error( + `cannot apply update: self-declined module. ${formatDependencyChain( + effect.dependencyChain + )}.` + ); + case "accepted": + newModuleFactories.set(moduleId, _eval(entry)); + for (const outdatedModuleId of effect.outdatedModules) { + outdatedModules.add(outdatedModuleId); + } + break; + // TODO(alexkirsz) Dependencies: handle dependencies effects. + } + } + + return { outdatedModules, newModuleFactories }; +} + +/** + * @param {Iterable} outdatedModules + * @returns {{ moduleId: ModuleId, errorHandler: true | Function }[]} + */ +function computeOutdatedSelfAcceptedModules(outdatedModules) { + const outdatedSelfAcceptedModules = []; + for (const moduleId of outdatedModules) { + const module = moduleCache[moduleId]; + const hotState = moduleHotState.get(module); + if (module && hotState.selfAccepted && !hotState.selfInvalidated) { + outdatedSelfAcceptedModules.push({ + moduleId, + errorHandler: hotState.selfAccepted, + }); + } + } + return outdatedSelfAcceptedModules; +} + +/** + * Adds, deletes, and moves modules between chunks. This must happen before the + * dispose phase as it needs to know which modules were removed from all chunks, + * which we can only compute *after* taking care of added and moved modules. + * + * @param {Map>} chunksAddedModules + * @param {Map>} chunksDeletedModules + * @returns {{ disposedModules: Set }} + */ +function updateChunksPhase(chunksAddedModules, chunksDeletedModules) { + for (const [chunkPath, addedModuleIds] of chunksAddedModules) { + for (const moduleId of addedModuleIds) { + addModuleToChunk(moduleId, chunkPath); + } + } + + const disposedModules = new Set(); + for (const [chunkPath, addedModuleIds] of chunksDeletedModules) { + for (const moduleId of addedModuleIds) { + if (removeModuleFromChunk(moduleId, chunkPath)) { + disposedModules.add(moduleId); + } + } + } + + return { disposedModules }; +} + +/** + * @param {Iterable} outdatedModules + * @param {Set} disposedModules + * @return {{ outdatedModuleParents: Map> }} + */ +function disposePhase(outdatedModules, disposedModules) { + for (const moduleId of outdatedModules) { + disposeModule(moduleId, "replace"); + } + + for (const moduleId of disposedModules) { + disposeModule(moduleId, "clear"); + } + + // Removing modules from the module cache is a separate step. + // We also want to keep track of previous parents of the outdated modules. + const outdatedModuleParents = new Map(); + for (const moduleId of outdatedModules) { + const oldModule = moduleCache[moduleId]; + outdatedModuleParents.set(moduleId, oldModule?.parents); + delete moduleCache[moduleId]; + } + + // TODO(alexkirsz) Dependencies: remove outdated dependency from module + // children. + + return { outdatedModuleParents }; +} + +/** + * Disposes of an instance of a module. + * + * Returns the persistent hot data that should be kept for the next module + * instance. + * + * NOTE: mode = "replace" will not remove modules from the moduleCache. + * This must be done in a separate step afterwards. + * This is important because all modules need to be diposed to update the + * parent/child relationships before they are actually removed from the moduleCache. + * If this would be done in this method, following disposeModulecalls won't find + * the module from the module id in the cache. + * + * @param {ModuleId} moduleId + * @param {"clear" | "replace"} mode + */ +function disposeModule(moduleId, mode) { + const module = moduleCache[moduleId]; + if (!module) { + return; + } + + const hotState = moduleHotState.get(module); + const data = {}; + + // Run the `hot.dispose` handler, if any, passing in the persistent + // `hot.data` object. + for (const disposeHandler of hotState.disposeHandlers) { + disposeHandler(data); + } + + // This used to warn in `getOrInstantiateModuleFromParent` when a disposed + // module is still importing other modules. + module.hot.active = false; + + moduleHotState.delete(module); + + // TODO(alexkirsz) Dependencies: delete the module from outdated deps. + + // Remove the disposed module from its children's parents list. + // It will be added back once the module re-instantiates and imports its + // children again. + for (const childId of module.children) { + const child = moduleCache[childId]; + if (!child) { + continue; + } + + const idx = child.parents.indexOf(module.id); + if (idx >= 0) { + child.parents.splice(idx, 1); + } + } + + switch (mode) { + case "clear": + delete moduleCache[module.id]; + moduleHotData.delete(module.id); + break; + case "replace": + moduleHotData.set(module.id, data); + break; + default: + invariant(mode, (mode) => `invalid mode: ${mode}`); + } +} + +/** + * + * @param {{ moduleId: ModuleId, errorHandler: true | Function }[]} outdatedSelfAcceptedModules + * @param {Map} newModuleFactories + * @param {Map>} outdatedModuleParents + */ +function applyPhase( + outdatedSelfAcceptedModules, + newModuleFactories, + outdatedModuleParents +) { + // Update module factories. + for (const [moduleId, factory] of newModuleFactories.entries()) { + moduleFactories[moduleId] = factory; + } + + // TODO(alexkirsz) Run new runtime entries here. + + // TODO(alexkirsz) Dependencies: call accept handlers for outdated deps. + + // Re-instantiate all outdated self-accepted modules. + for (const { moduleId, errorHandler } of outdatedSelfAcceptedModules) { + try { + instantiateModule(moduleId, { + type: SourceTypeUpdate, + parents: outdatedModuleParents.get(moduleId), + }); + } catch (err) { + if (typeof errorHandler === "function") { + try { + errorHandler(err, { moduleId, module: moduleCache[moduleId] }); + } catch (_) { + // Ignore error. + } + } + } + } +} + +/** + * Utility function to ensure all variants of an enum are handled. + * @param {never} never + * @param {(arg: any) => string} computeMessage + * @returns {never} + */ +function invariant(never, computeMessage) { + throw new Error(`Invariant: ${computeMessage(never)}`); +} + +/** + * + * @param {ChunkPath} chunkListPath + * @param {PartialUpdate} update + */ +function applyUpdate(chunkListPath, update) { + switch (update.type) { + case "ChunkListUpdate": + applyChunkListUpdate(chunkListPath, update); + break; + default: + invariant(update, (update) => `Unknown update type: ${update.type}`); + } +} + +/** + * + * @param {ChunkPath} chunkListPath + * @param {ChunkListUpdate} update + */ +function applyChunkListUpdate(chunkListPath, update) { + if (update.merged != null) { + for (const merged of update.merged) { + switch (merged.type) { + case "EcmascriptMergedUpdate": + applyEcmascriptMergedUpdate(chunkListPath, merged); + break; + default: + invariant(merged, (merged) => `Unknown merged type: ${merged.type}`); + } + } + } + + if (update.chunks != null) { + for (const [chunkPath, chunkUpdate] of Object.entries(update.chunks)) { + switch (chunkUpdate.type) { + case "added": + BACKEND.loadChunk(chunkPath, { type: SourceTypeUpdate }); + break; + case "total": + BACKEND.reloadChunk?.(chunkPath); + break; + case "deleted": + BACKEND.unloadChunk?.(chunkPath); + break; + case "partial": + invariant( + chunkUpdate.instruction, + (instruction) => + `Unknown partial instruction: ${JSON.stringify(instruction)}.` + ); + default: + invariant( + chunkUpdate, + (chunkUpdate) => `Unknown chunk update type: ${chunkUpdate.type}` + ); + } + } + } +} + +/** + * @param {ChunkPath} chunkPath + * @param {EcmascriptMergedUpdate} update + */ +function applyEcmascriptMergedUpdate(chunkPath, update) { + const { entries = {}, chunks = {} } = update; + const { added, modified, deleted, chunksAdded, chunksDeleted } = + computeChangedModules(entries, chunks); + const { outdatedModules, newModuleFactories } = computeOutdatedModules( + added, + modified + ); + const outdatedSelfAcceptedModules = + computeOutdatedSelfAcceptedModules(outdatedModules); + const { disposedModules } = updateChunksPhase(chunksAdded, chunksDeleted); + const { outdatedModuleParents } = disposePhase( + outdatedModules, + disposedModules + ); + applyPhase( + outdatedSelfAcceptedModules, + newModuleFactories, + outdatedModuleParents + ); +} + +/** + * @param {Record} entries + * @param {Record} updates + * @returns {{ + * added: Map, + * modified: Map, + * deleted: Set, + * chunksAdded: Map>, + * chunksDeleted: Map>, + * }} + */ +function computeChangedModules(entries, updates) { + const chunksAdded = new Map(); + const chunksDeleted = new Map(); + const added = new Map(); + const modified = new Map(); + const deleted = new Set(); + + for (const [chunkPath, mergedChunkUpdate] of Object.entries(updates)) { + switch (mergedChunkUpdate.type) { + case "added": { + const updateAdded = new Set(mergedChunkUpdate.modules); + for (const moduleId of updateAdded) { + added.set(moduleId, entries[moduleId]); + } + chunksAdded.set(chunkPath, updateAdded); + break; + } + case "deleted": { + // We could also use `mergedChunkUpdate.modules` here. + const updateDeleted = new Set(chunkModulesMap.get(chunkPath)); + for (const moduleId of updateDeleted) { + deleted.add(moduleId); + } + chunksDeleted.set(chunkPath, updateDeleted); + break; + } + case "partial": { + const updateAdded = new Set(mergedChunkUpdate.added); + const updateDeleted = new Set(mergedChunkUpdate.deleted); + for (const moduleId of updateAdded) { + added.set(moduleId, entries[moduleId]); + } + for (const moduleId of updateDeleted) { + deleted.add([moduleId, chunkPath]); + } + chunksAdded.set(chunkPath, updateAdded); + chunksDeleted.set(chunkPath, updateDeleted); + break; + } + default: + invariant( + mergedChunkUpdate, + (mergedChunkUpdate) => + `Unknown merged chunk update type: ${mergedChunkUpdate.type}` + ); + } + } + + // If a module was added from one chunk and deleted from another in the same update, + // consider it to be modified, as it means the module was moved from one chunk to another + // AND has new code in a single update. + for (const moduleId of added.keys()) { + if (deleted.has(moduleId)) { + added.delete(moduleId); + deleted.delete(moduleId); + } + } + + for (const [moduleId, entry] of Object.entries(entries)) { + // Modules that haven't been added to any chunk but have new code are considered + // to be modified. + // This needs to be under the previous loop, as we need it to get rid of modules + // that were added and deleted in the same update. + if (!added.has(moduleId)) { + modified.set(moduleId, entry); + } + } + + return { added, deleted, modified, chunksAdded, chunksDeleted }; +} + +/** + * + * @param {ModuleId} moduleId + * @returns {ModuleEffect} + */ +function getAffectedModuleEffects(moduleId) { + const outdatedModules = new Set(); + + /** @typedef {{moduleId?: ModuleId, dependencyChain: ModuleId[]}} QueueItem */ + + /** @type {QueueItem[]} */ + const queue = [ + { + moduleId, + dependencyChain: [], + }, + ]; + + while (queue.length > 0) { + const { moduleId, dependencyChain } = + /** @type {QueueItem} */ queue.shift(); + outdatedModules.add(moduleId); + + // We've arrived at the runtime of the chunk, which means that nothing + // else above can accept this update. + if (moduleId === undefined) { + return { + type: "unaccepted", + dependencyChain, + }; + } + + const module = moduleCache[moduleId]; + const hotState = moduleHotState.get(module); + + if ( + // The module is not in the cache. Since this is a "modified" update, + // it means that the module was never instantiated before. + !module || // The module accepted itself without invalidating globalThis. + // TODO is that right? + (hotState.selfAccepted && !hotState.selfInvalidated) + ) { + continue; + } + + if (hotState.selfDeclined) { + return { + type: "self-declined", + dependencyChain, + moduleId, + }; + } + + if (runtimeModules.has(moduleId)) { + queue.push({ + moduleId: undefined, + dependencyChain: [...dependencyChain, moduleId], + }); + continue; + } + + for (const parentId of module.parents) { + const parent = moduleCache[parentId]; + + if (!parent) { + // TODO(alexkirsz) Is this even possible? + continue; + } + + // TODO(alexkirsz) Dependencies: check accepted and declined + // dependencies here. + + queue.push({ + moduleId: parentId, + dependencyChain: [...dependencyChain, moduleId], + }); + } + } + + return { + type: "accepted", + moduleId, + outdatedModules, + }; +} + +/** + * @param {ChunkPath} chunkListPath + * @param {import('../types/protocol').ServerMessage} update + */ +function handleApply(chunkListPath, update) { + switch (update.type) { + case "partial": { + // This indicates that the update is can be applied to the current state of the application. + applyUpdate(chunkListPath, update.instruction); + break; + } + case "restart": { + // This indicates that there is no way to apply the update to the + // current state of the application, and that the application must be + // restarted. + BACKEND.restart(); + break; + } + case "notFound": { + // This indicates that the chunk list no longer exists: either the dynamic import which created it was removed, + // or the page itself was deleted. + // If it is a dynamic import, we simply discard all modules that the chunk has exclusive access to. + // If it is a runtime chunk list, we restart the application. + if (runtimeChunkLists.has(chunkListPath)) { + BACKEND.restart(); + } else { + disposeChunkList(chunkListPath); + } + break; + } + default: + throw new Error(`Unknown update type: ${update.type}`); + } +} + +/** + * @param {HotData} [hotData] + * @returns {{hotState: HotState, hot: Hot}} + */ +function createModuleHot(hotData) { + /** @type {HotState} */ + const hotState = { + selfAccepted: false, + selfDeclined: false, + selfInvalidated: false, + disposeHandlers: [], + }; + + /** + * TODO(alexkirsz) Support full (dep, callback, errorHandler) form. + * + * @param {string | string[] | AcceptErrorHandler} [dep] + * @param {AcceptCallback} [_callback] + * @param {AcceptErrorHandler} [_errorHandler] + */ + function accept(dep, _callback, _errorHandler) { + if (dep === undefined) { + hotState.selfAccepted = true; + } else if (typeof dep === "function") { + hotState.selfAccepted = dep; + } else { + throw new Error("unsupported `accept` signature"); + } + } + + /** @type {Hot} */ + const hot = { + // TODO(alexkirsz) This is not defined in the HMR API. It was used to + // decide whether to warn whenever an HMR-disposed module required other + // modules. We might want to remove it. + active: true, + + data: hotData ?? {}, + + accept: accept, + + decline: (dep) => { + if (dep === undefined) { + hotState.selfDeclined = true; + } else { + throw new Error("unsupported `decline` signature"); + } + }, + + dispose: (callback) => { + hotState.disposeHandlers.push(callback); + }, + + addDisposeHandler: (callback) => { + hotState.disposeHandlers.push(callback); + }, + + removeDisposeHandler: (callback) => { + const idx = hotState.disposeHandlers.indexOf(callback); + if (idx >= 0) { + hotState.disposeHandlers.splice(idx, 1); + } + }, + + invalidate: () => { + hotState.selfInvalidated = true; + // TODO(alexkirsz) The original HMR code had management-related code + // here. + }, + + // NOTE(alexkirsz) This is part of the management API, which we don't + // implement, but the Next.js React Refresh runtime uses this to decide + // whether to schedule an update. + status: () => "idle", + + // NOTE(alexkirsz) Since we always return "idle" for now, these are no-ops. + addStatusHandler: (_handler) => {}, + removeStatusHandler: (_handler) => {}, + }; + + return { hot, hotState }; +} + +/** + * Adds a module to a chunk. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + */ +function addModuleToChunk(moduleId, chunkPath) { + let moduleChunks = moduleChunksMap.get(moduleId); + if (!moduleChunks) { + moduleChunks = new Set([chunkPath]); + moduleChunksMap.set(moduleId, moduleChunks); + } else { + moduleChunks.add(chunkPath); + } + + let chunkModules = chunkModulesMap.get(chunkPath); + if (!chunkModules) { + chunkModules = new Set([moduleId]); + chunkModulesMap.set(chunkPath, chunkModules); + } else { + chunkModules.add(moduleId); + } +} + +/** + * Returns the first chunk that included a module. + * This is used by the Node.js backend, hence why it's marked as unused in this + * file. + * + * @type {GetFirstModuleChunk} + */ +function getFirstModuleChunk(moduleId) { + const moduleChunkPaths = moduleChunksMap.get(moduleId); + if (moduleChunkPaths == null) { + return null; + } + + return moduleChunkPaths.values().next().value; +} + +/** + * Removes a module from a chunk. Returns true there are no remaining chunks + * including this module. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + * @returns {boolean} + */ +function removeModuleFromChunk(moduleId, chunkPath) { + const moduleChunks = moduleChunksMap.get(moduleId); + moduleChunks.delete(chunkPath); + + const chunkModules = chunkModulesMap.get(chunkPath); + chunkModules.delete(moduleId); + + const noRemainingModules = chunkModules.size === 0; + if (noRemainingModules) { + chunkModulesMap.delete(chunkPath); + } + + const noRemainingChunks = moduleChunks.size === 0; + if (noRemainingChunks) { + moduleChunksMap.delete(moduleId); + } + + return noRemainingChunks; +} + +/** + * Diposes of a chunk list and its corresponding exclusive chunks. + * + * @param {ChunkPath} chunkListPath + * @returns {boolean} Whether the chunk list was disposed of. + */ +function disposeChunkList(chunkListPath) { + const chunkPaths = chunkListChunksMap.get(chunkListPath); + if (chunkPaths == null) { + return false; + } + chunkListChunksMap.delete(chunkListPath); + + for (const chunkPath of chunkPaths) { + const chunkChunkLists = chunkChunkListsMap.get(chunkPath); + chunkChunkLists.delete(chunkListPath); + + if (chunkChunkLists.size === 0) { + chunkChunkListsMap.delete(chunkPath); + disposeChunk(chunkPath); + } + } + + // We must also dispose of the chunk list's chunk itself to ensure it may + // be reloaded properly in the future. + BACKEND.unloadChunk(chunkListPath); + + return true; +} + +/** + * Disposes of a chunk and its corresponding exclusive modules. + * + * @param {ChunkPath} chunkPath + * @returns {boolean} Whether the chunk was disposed of. + */ +function disposeChunk(chunkPath) { + // This should happen whether or not the chunk has any modules in it. For instance, + // CSS chunks have no modules in them, but they still need to be unloaded. + BACKEND.unloadChunk(chunkPath); + + const chunkModules = chunkModulesMap.get(chunkPath); + if (chunkModules == null) { + return false; + } + chunkModules.delete(chunkPath); + + for (const moduleId of chunkModules) { + const moduleChunks = moduleChunksMap.get(moduleId); + moduleChunks.delete(chunkPath); + + const noRemainingChunks = moduleChunks.size === 0; + if (noRemainingChunks) { + moduleChunksMap.delete(moduleId); + disposeModule(moduleId, "clear"); + availableModules.delete(moduleId); + } + } + + return true; +} + +/** + * Instantiates a runtime module. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + * @returns {Module} + */ +function instantiateRuntimeModule(moduleId, chunkPath) { + return instantiateModule(moduleId, { type: SourceTypeRuntime, chunkPath }); +} + +/** + * Gets or instantiates a runtime module. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + * @returns {Module} + */ +function getOrInstantiateRuntimeModule(moduleId, chunkPath) { + const module = moduleCache[moduleId]; + if (module) { + if (module.error) { + throw module.error; + } + return module; + } + + return instantiateModule(moduleId, { type: SourceTypeRuntime, chunkPath }); +} + +/** + * Returns the path of a chunk defined by its data. + * + * @param {ChunkData} chunkData + * @returns {ChunkPath} the chunk path + */ +function getChunkPath(chunkData) { + return typeof chunkData === "string" ? chunkData : chunkData.path; +} + +/** + * Subscribes to chunk list updates from the update server and applies them. + * + * @param {ChunkList} chunkList + */ +function registerChunkList(chunkList) { + globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS.push([ + chunkList.path, + handleApply.bind(null, chunkList.path), + ]); + + // Adding chunks to chunk lists and vice versa. + const chunks = new Set(chunkList.chunks.map(getChunkPath)); + chunkListChunksMap.set(chunkList.path, chunks); + for (const chunkPath of chunks) { + let chunkChunkLists = chunkChunkListsMap.get(chunkPath); + if (!chunkChunkLists) { + chunkChunkLists = new Set([chunkList.path]); + chunkChunkListsMap.set(chunkPath, chunkChunkLists); + } else { + chunkChunkLists.add(chunkList.path); + } + } + + if (chunkList.source === "entry") { + markChunkListAsRuntime(chunkList.path); + } +} + +/** + * Marks a chunk list as a runtime chunk list. There can be more than one + * runtime chunk list. For instance, integration tests can have multiple chunk + * groups loaded at runtime, each with its own chunk list. + * + * @param {ChunkPath} chunkListPath + */ +function markChunkListAsRuntime(chunkListPath) { + runtimeChunkLists.add(chunkListPath); +} + +/** + * @param {ChunkRegistration} chunkRegistration + */ +function registerChunk([chunkPath, chunkModules, runtimeParams]) { + for (const [moduleId, moduleFactory] of Object.entries(chunkModules)) { + if (!moduleFactories[moduleId]) { + moduleFactories[moduleId] = moduleFactory; + } + addModuleToChunk(moduleId, chunkPath); + } + + return BACKEND.registerChunk(chunkPath, runtimeParams); +} + +globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS = + globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS || []; + +const chunkListsToRegister = globalThis.TURBOPACK_CHUNK_LISTS || []; +for (const chunkList of chunkListsToRegister) { + registerChunkList(chunkList); +} +globalThis.TURBOPACK_CHUNK_LISTS = { + push: (chunkList) => { + registerChunkList(chunkList); + }, +}; + +globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS = + globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS || []; +/** @typedef {import('../types/backend').RuntimeBackend} RuntimeBackend */ +/** @typedef {import('../types/runtime.dom').ChunkResolver} ChunkResolver */ +/** @typedef {import('../types').ChunkPath} ChunkPath */ +/** @typedef {import('../types').SourceInfo} SourceInfo */ + +/** @type {RuntimeBackend} */ +let BACKEND; + +(() => { + BACKEND = { + async registerChunk(chunkPath, params) { + const resolver = getOrCreateResolver(chunkPath); + resolver.resolve(); + + if (params == null) { + return; + } + + for (const otherChunkData of params.otherChunks) { + const otherChunkPath = getChunkPath(otherChunkData); + // Chunk might have started loading, so we want to avoid triggering another load. + getOrCreateResolver(otherChunkPath); + } + + // This waits for chunks to be loaded, but also marks included items as available. + await Promise.all( + params.otherChunks.map((otherChunkData) => + loadChunk({ type: SourceTypeRuntime, chunkPath }, otherChunkData) + ) + ); + + if (params.runtimeModuleIds.length > 0) { + for (const moduleId of params.runtimeModuleIds) { + getOrInstantiateRuntimeModule(moduleId, chunkPath); + } + } + }, + + loadChunk(chunkPath, source) { + return doLoadChunk(chunkPath, source); + }, + + unloadChunk(chunkPath) { + deleteResolver(chunkPath); + + if (chunkPath.endsWith(".css")) { + const links = document.querySelectorAll(`link[href="/${chunkPath}"]`); + for (const link of Array.from(links)) { + link.remove(); + } + } else if (chunkPath.endsWith(".js")) { + // Unloading a JS chunk would have no effect, as it lives in the JS + // runtime once evaluated. + // However, we still want to remove the script tag from the DOM to keep + // the HTML somewhat consistent from the user's perspective. + const scripts = document.querySelectorAll( + `script[src="/${chunkPath}"]` + ); + for (const script of Array.from(scripts)) { + script.remove(); + } + } else { + throw new Error(`can't infer type of chunk from path ${chunkPath}`); + } + }, + + reloadChunk(chunkPath) { + return new Promise((resolve, reject) => { + if (!chunkPath.endsWith(".css")) { + reject(new Error("The DOM backend can only reload CSS chunks")); + return; + } + + const encodedChunkPath = chunkPath + .split("/") + .map((p) => encodeURIComponent(p)) + .join("/"); + + const previousLink = document.querySelector( + `link[rel=stylesheet][href^="/${encodedChunkPath}"]` + ); + + if (previousLink == null) { + reject(new Error(`No link element found for chunk ${chunkPath}`)); + return; + } + + const link = document.createElement("link"); + link.rel = "stylesheet"; + link.href = `/${encodedChunkPath}`; + link.onerror = () => { + reject(); + }; + link.onload = () => { + // First load the new CSS, then remove the old one. This prevents visible + // flickering that would happen in-between removing the previous CSS and + // loading the new one. + previousLink.remove(); + + // CSS chunks do not register themselves, and as such must be marked as + // loaded instantly. + resolve(); + }; + + // Make sure to insert the new CSS right after the previous one, so that + // its precedence is higher. + previousLink.parentElement.insertBefore(link, previousLink.nextSibling); + }); + }, + + restart: () => self.location.reload(), + }; + + /** + * Maps chunk paths to the corresponding resolver. + * + * @type {Map} + */ + const chunkResolvers = new Map(); + + /** + * @param {ChunkPath} chunkPath + * @returns {ChunkResolver} + */ + function getOrCreateResolver(chunkPath) { + let resolver = chunkResolvers.get(chunkPath); + if (!resolver) { + let resolve; + let reject; + const promise = new Promise((innerResolve, innerReject) => { + resolve = innerResolve; + reject = innerReject; + }); + resolver = { + resolved: false, + promise, + resolve: () => { + resolver.resolved = true; + resolve(); + }, + reject, + }; + chunkResolvers.set(chunkPath, resolver); + } + return resolver; + } + + function deleteResolver(chunkPath) { + chunkResolvers.delete(chunkPath); + } + + /** + * Loads the given chunk, and returns a promise that resolves once the chunk + * has been loaded. + * + * @param {ChunkPath} chunkPath + * @param {SourceInfo} source + */ + async function doLoadChunk(chunkPath, source) { + const resolver = getOrCreateResolver(chunkPath); + if (resolver.resolved) { + return resolver.promise; + } + + if (source.type === SourceTypeRuntime) { + // We don't need to load chunks references from runtime code, as they're already + // present in the DOM. + + if (chunkPath.endsWith(".css")) { + // CSS chunks do not register themselves, and as such must be marked as + // loaded instantly. + resolver.resolve(); + } + + // We need to wait for JS chunks to register themselves within `registerChunk` + // before we can start instantiating runtime modules, hence the absence of + // `resolver.resolve()` in this branch. + + return resolver.promise; + } + + if (chunkPath.endsWith(".css")) { + const link = document.createElement("link"); + link.rel = "stylesheet"; + link.href = `/${chunkPath}`; + link.onerror = () => { + resolver.reject(); + }; + link.onload = () => { + // CSS chunks do not register themselves, and as such must be marked as + // loaded instantly. + resolver.resolve(); + }; + document.body.appendChild(link); + } else if (chunkPath.endsWith(".js")) { + const script = document.createElement("script"); + script.src = `/${chunkPath}`; + // We'll only mark the chunk as loaded once the script has been executed, + // which happens in `registerChunk`. Hence the absence of `resolve()` in + // this branch. + script.onerror = () => { + resolver.reject(); + }; + document.body.appendChild(script); + } else { + throw new Error(`can't infer type of chunk from path ${chunkPath}`); + } + + return resolver.promise; + } +})(); +const chunksToRegister = globalThis.TURBOPACK; +globalThis.TURBOPACK = { push: registerChunk }; +chunksToRegister.forEach(registerChunk); +})(); \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_31b659.js.map b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_31b659.js.map new file mode 100644 index 0000000000000..a12b83d3337ca --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_31b659.js.map @@ -0,0 +1,4 @@ +{ + "version": 3, + "sections": [] +} \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_a5e0f3.js b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_a5e0f3.js new file mode 100644 index 0000000000000..b307c63a818d2 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_a5e0f3.js @@ -0,0 +1,11 @@ +(globalThis.TURBOPACK = globalThis.TURBOPACK || []).push([ + "output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_a5e0f3.js", + {}, +]); +(globalThis.TURBOPACK_CHUNK_LISTS = globalThis.TURBOPACK_CHUNK_LISTS || []).push({ + "path": "output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_a5e0f3.js", + "chunks": [ + "output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js" + ], + "source": "entry" +}); \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js new file mode 100644 index 0000000000000..8d81e144627c8 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js @@ -0,0 +1,57 @@ +(globalThis.TURBOPACK = globalThis.TURBOPACK || []).push(["output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js", { + +"[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/require.cjs (ecmascript)": (function({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname, m: module, e: exports }) { !function() { + +module.exports = "require"; + +}.call(this) }), +"[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/import.mjs (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +__turbopack_esm__({ + "default": ()=>__TURBOPACK__default__export__ +}); +const __TURBOPACK__default__export__ = "import"; + +})()), +"[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/pat.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +__turbopack_esm__({ + "default": ()=>__TURBOPACK__default__export__ +}); +const __TURBOPACK__default__export__ = "pat"; + +})()), +"[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/index.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +__turbopack_esm__({ + "default": ()=>__TURBOPACK__default__export__ +}); +const __TURBOPACK__default__export__ = "dep"; + +})()), +"[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/foo.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +__turbopack_esm__({ + "default": ()=>__TURBOPACK__default__export__ +}); +const __TURBOPACK__default__export__ = "foo"; + +})()), +"[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/index.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$foo$2e$js__$28$ecmascript$29$__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/foo.js (ecmascript)"); +var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$dep$2f$index$2e$js__$28$ecmascript$29$__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/index.js (ecmascript)"); +var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$pat$2e$js__$28$ecmascript$29$__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/pat.js (ecmascript)"); +var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$import$2e$mjs__$28$ecmascript$29$__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/import.mjs (ecmascript)"); +"__TURBOPACK__ecmascript__hoisting__location__"; +; +; +; +; +const conditionalRequire = __turbopack_require__("[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/require.cjs (ecmascript)"); +console.log(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$foo$2e$js__$28$ecmascript$29$__["default"], __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$dep$2f$index$2e$js__$28$ecmascript$29$__["default"], __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$pat$2e$js__$28$ecmascript$29$__["default"], __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$subpath$2d$imports$2f$input$2f$import$2e$mjs__$28$ecmascript$29$__["default"], conditionalRequire); + +})()), +}]); + +//# sourceMappingURL=crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js.map \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js.map b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js.map new file mode 100644 index 0000000000000..ffc4d4a074ef5 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/output/crates_turbopack-tests_tests_snapshot_imports_subpath-imports_input_index_b53fce.js.map @@ -0,0 +1,16 @@ +{ + "version": 3, + "sections": [ + {"offset": {"line": 4, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/require.cjs"],"sourcesContent":["module.exports = \"require\";\n"],"names":[],"mappings":"AAAA,OAAO,OAAO,GAAG"}}, + {"offset": {"line": 5, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 9, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/import.mjs"],"sourcesContent":["export default \"import\";\n"],"names":[],"mappings":";;;uCAAe"}}, + {"offset": {"line": 13, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 17, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/pat.js"],"sourcesContent":["export default \"pat\";\n"],"names":[],"mappings":";;;uCAAe"}}, + {"offset": {"line": 21, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 25, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/dep/index.js"],"sourcesContent":["export default \"dep\";\n"],"names":[],"mappings":";;;uCAAe"}}, + {"offset": {"line": 29, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 33, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/foo.js"],"sourcesContent":["export default \"foo\";\n"],"names":[],"mappings":";;;uCAAe"}}, + {"offset": {"line": 37, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 41, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/subpath-imports/input/index.js"],"sourcesContent":["import foo from \"#foo\";\nimport dep from \"#dep\";\nimport pattern from \"#pattern/pat.js\";\nimport conditionalImport from \"#conditional\";\nconst conditionalRequire = require(\"#conditional\");\n\nconsole.log(foo, dep, pattern, conditionalImport, conditionalRequire);\n"],"names":[],"mappings":";;;;;;;;;AAIA,MAAM,qBAAqB;AAE3B,QAAQ,GAAG,0vBAAuC"}}, + {"offset": {"line": 52, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] +} \ No newline at end of file diff --git a/crates/turbopack/src/resolve.rs b/crates/turbopack/src/resolve.rs index dc6c7e4ed2ab7..b98809b4bb533 100644 --- a/crates/turbopack/src/resolve.rs +++ b/crates/turbopack/src/resolve.rs @@ -1,12 +1,10 @@ -use std::collections::BTreeMap; - use anyhow::Result; use turbo_tasks_fs::{FileSystem, FileSystemPathVc}; use turbopack_core::resolve::{ find_context_file, options::{ - ConditionValue, ImportMap, ImportMapping, ResolveInPackage, ResolveIntoPackage, - ResolveModules, ResolveOptions, ResolveOptionsVc, + ConditionValue, ImportMap, ImportMapping, ResolutionConditions, ResolveInPackage, + ResolveIntoPackage, ResolveModules, ResolveOptions, ResolveOptionsVc, }, AliasMap, AliasPattern, FindContextFileResult, }; @@ -111,6 +109,53 @@ async fn base_resolve_options( let plugins = opt.plugins.clone(); + let conditions = { + let mut conditions: ResolutionConditions = [ + ("import".to_string(), ConditionValue::Unknown), + ("require".to_string(), ConditionValue::Unknown), + ] + .into_iter() + .collect(); + if opt.browser { + conditions.insert("browser".to_string(), ConditionValue::Set); + } + if opt.module { + conditions.insert("module".to_string(), ConditionValue::Set); + } + if let Some(environment) = emulating { + for condition in environment.resolve_conditions().await?.iter() { + conditions.insert(condition.to_string(), ConditionValue::Set); + } + } + for condition in opt.custom_conditions.iter() { + conditions.insert(condition.to_string(), ConditionValue::Set); + } + // Infer some well-known conditions + let dev = conditions.get("development").cloned(); + let prod = conditions.get("production").cloned(); + if prod.is_none() { + conditions.insert( + "production".to_string(), + if matches!(dev, Some(ConditionValue::Set)) { + ConditionValue::Unset + } else { + ConditionValue::Unknown + }, + ); + } + if dev.is_none() { + conditions.insert( + "development".to_string(), + if matches!(prod, Some(ConditionValue::Set)) { + ConditionValue::Unset + } else { + ConditionValue::Unknown + }, + ); + } + conditions + }; + Ok(ResolveOptions { extensions: if let Some(environment) = emulating { environment.resolve_extensions().await?.clone_value() @@ -152,57 +197,10 @@ async fn base_resolve_options( mods }, into_package: { - let mut resolve_into = Vec::new(); - resolve_into.push(ResolveIntoPackage::ExportsField { - field: "exports".to_string(), - conditions: { - let mut conditions: BTreeMap = [ - ("import".to_string(), ConditionValue::Unknown), - ("require".to_string(), ConditionValue::Unknown), - ] - .into_iter() - .collect(); - if opt.browser { - conditions.insert("browser".to_string(), ConditionValue::Set); - } - if opt.module { - conditions.insert("module".to_string(), ConditionValue::Set); - } - if let Some(environment) = emulating { - for condition in environment.resolve_conditions().await?.iter() { - conditions.insert(condition.to_string(), ConditionValue::Set); - } - } - for condition in opt.custom_conditions.iter() { - conditions.insert(condition.to_string(), ConditionValue::Set); - } - // Infer some well-known conditions - let dev = conditions.get("development").cloned(); - let prod = conditions.get("production").cloned(); - if prod.is_none() { - conditions.insert( - "production".to_string(), - if matches!(dev, Some(ConditionValue::Set)) { - ConditionValue::Unset - } else { - ConditionValue::Unknown - }, - ); - } - if dev.is_none() { - conditions.insert( - "development".to_string(), - if matches!(prod, Some(ConditionValue::Set)) { - ConditionValue::Unset - } else { - ConditionValue::Unknown - }, - ); - } - conditions - }, + let mut resolve_into = vec![ResolveIntoPackage::ExportsField { + conditions: conditions.clone(), unspecified_conditions: ConditionValue::Unset, - }); + }]; if opt.browser { resolve_into.push(ResolveIntoPackage::MainField("browser".to_string())); } @@ -214,7 +212,10 @@ async fn base_resolve_options( resolve_into }, in_package: { - let mut resolve_in = Vec::new(); + let mut resolve_in = vec![ResolveInPackage::ImportsField { + conditions, + unspecified_conditions: ConditionValue::Unset, + }]; if opt.browser { resolve_in.push(ResolveInPackage::AliasField("browser".to_string())); }