diff --git a/Cargo.lock b/Cargo.lock index e5049c3e8d9..4e77999a743 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3027,6 +3027,7 @@ dependencies = [ "moon_toolchain_plugin", "moon_typescript_lang", "moon_vcs", + "moon_workspace", "once_cell", "open", "petgraph", @@ -3734,15 +3735,12 @@ dependencies = [ "moon_cache", "moon_common", "moon_config", - "moon_hash", "moon_project", - "moon_project_builder", - "moon_project_constraints", "moon_project_expander", "moon_query", "moon_task", "moon_test_utils2", - "moon_vcs", + "moon_workspace", "petgraph", "rustc-hash 2.0.0", "scc", @@ -4009,6 +4007,7 @@ dependencies = [ "moon_rust_platform", "moon_system_platform", "moon_vcs", + "moon_workspace", "proto_core", "starbase_events", "starbase_sandbox", @@ -4162,6 +4161,29 @@ dependencies = [ "tracing", ] +[[package]] +name = "moon_workspace" +version = "0.0.1" +dependencies = [ + "miette", + "moon_cache", + "moon_common", + "moon_config", + "moon_hash", + "moon_project", + "moon_project_builder", + "moon_project_constraints", + "moon_project_graph", + "moon_vcs", + "petgraph", + "rustc-hash 2.0.0", + "serde", + "starbase_events", + "starbase_utils", + "thiserror", + "tracing", +] + [[package]] name = "native-tls" version = "0.2.12" diff --git a/crates/action-graph/tests/action_graph_test.rs b/crates/action-graph/tests/action_graph_test.rs index d7758dadbb0..4e5b60f5e4e 100644 --- a/crates/action-graph/tests/action_graph_test.rs +++ b/crates/action-graph/tests/action_graph_test.rs @@ -1472,7 +1472,9 @@ mod action_graph { } #[tokio::test] - #[should_panic(expected = "No project has been configured with the name or alias unknown.")] + #[should_panic( + expected = "No project has been configured with the identifier or alias unknown." + )] async fn errors_for_unknown_project() { let sandbox = create_sandbox("tasks"); let container = ActionGraphContainer::new(sandbox.path()).await; diff --git a/crates/action-graph/tests/snapshots/action_graph_test__action_graph__run_task_by_target__runs_all.snap b/crates/action-graph/tests/snapshots/action_graph_test__action_graph__run_task_by_target__runs_all.snap index 2e6f50c8634..36cd2a7a823 100644 --- a/crates/action-graph/tests/snapshots/action_graph_test__action_graph__run_task_by_target__runs_all.snap +++ b/crates/action-graph/tests/snapshots/action_graph_test__action_graph__run_task_by_target__runs_all.snap @@ -5,26 +5,26 @@ expression: graph.to_dot() digraph { 0 [ label="SyncWorkspace" ] 1 [ label="SetupToolchain(system)" ] - 2 [ label="SyncProject(system, server)" ] - 3 [ label="RunTask(server:build)" ] + 2 [ label="SyncProject(system, base)" ] + 3 [ label="RunTask(base:build)" ] 4 [ label="SyncProject(system, client)" ] - 5 [ label="SyncProject(system, common)" ] - 6 [ label="SyncProject(system, base)" ] + 5 [ label="SyncProject(system, server)" ] + 6 [ label="SyncProject(system, common)" ] 7 [ label="RunTask(client:build)" ] 8 [ label="RunTask(common:build)" ] - 9 [ label="RunTask(base:build)" ] + 9 [ label="RunTask(server:build)" ] 1 -> 0 [ ] 2 -> 1 [ ] 3 -> 2 [ ] - 6 -> 1 [ ] 5 -> 1 [ ] - 5 -> 6 [ ] + 6 -> 1 [ ] + 6 -> 2 [ ] 4 -> 1 [ ] - 4 -> 2 [ ] 4 -> 5 [ ] - 8 -> 5 [ ] + 4 -> 6 [ ] + 8 -> 6 [ ] + 9 -> 5 [ ] 7 -> 4 [ ] 7 -> 8 [ ] - 7 -> 3 [ ] - 9 -> 6 [ ] + 7 -> 9 [ ] } diff --git a/crates/app/Cargo.toml b/crates/app/Cargo.toml index 9f72f4bc000..e4095646c8e 100644 --- a/crates/app/Cargo.toml +++ b/crates/app/Cargo.toml @@ -30,6 +30,7 @@ moon_task = { path = "../task" } moon_toolchain = { path = "../toolchain" } moon_toolchain_plugin = { path = "../toolchain-plugin" } moon_vcs = { path = "../vcs" } +moon_workspace = { path = "../workspace" } async-recursion = { workspace = true } async-trait = { workspace = true } bytes = "1.7.2" diff --git a/crates/app/src/commands/docker/scaffold.rs b/crates/app/src/commands/docker/scaffold.rs index 470ae66d526..f174319704a 100644 --- a/crates/app/src/commands/docker/scaffold.rs +++ b/crates/app/src/commands/docker/scaffold.rs @@ -300,7 +300,7 @@ async fn scaffold_sources_project( } debug!( - id = project_id.as_str(), + project_id = project_id.as_str(), globs = ?include_globs, "Copying sources from project {}", color::id(project_id), @@ -318,7 +318,7 @@ async fn scaffold_sources_project( // they can be explicit in config or on the command line! if !dep_cfg.is_root_scope() { debug!( - id = project_id.as_str(), + project_id = project_id.as_str(), dep_id = dep_cfg.id.as_str(), "Including dependency project" ); diff --git a/crates/app/src/commands/node/run_script.rs b/crates/app/src/commands/node/run_script.rs index 570bc2f0404..191c994cd0f 100644 --- a/crates/app/src/commands/node/run_script.rs +++ b/crates/app/src/commands/node/run_script.rs @@ -44,10 +44,9 @@ pub async fn run_script(session: CliSession, args: RunScriptArgs) -> AppResult { // Otherwise try and find the project in the graph } else if let Some(project_id) = &args.project { - let mut project_graph = session.build_project_graph().await?; - project_graph.load(project_id).await?; + let project_graph = session.get_project_graph().await?; - command.cwd(&project_graph.build().await?.get(project_id)?.root); + command.cwd(&project_graph.get(project_id)?.root); // This should rarely happen... } else { diff --git a/crates/app/src/commands/project.rs b/crates/app/src/commands/project.rs index 049e07222f3..6f49b42de54 100644 --- a/crates/app/src/commands/project.rs +++ b/crates/app/src/commands/project.rs @@ -17,10 +17,10 @@ pub struct ProjectArgs { #[instrument(skip_all)] pub async fn project(session: CliSession, args: ProjectArgs) -> AppResult { - let mut project_graph_builder = session.build_project_graph().await?; - project_graph_builder.load(&args.id).await?; - - let project_graph = project_graph_builder.build().await?; + let project_graph = session + .get_project_graph() + .await? + .into_focused(&args.id, false)?; let project = project_graph.get(&args.id)?; let config = &project.config; diff --git a/crates/app/src/commands/task.rs b/crates/app/src/commands/task.rs index 1fbc87cdcb0..bb0be3f8305 100644 --- a/crates/app/src/commands/task.rs +++ b/crates/app/src/commands/task.rs @@ -22,10 +22,7 @@ pub async fn task(session: CliSession, args: TaskArgs) -> AppResult { return Err(AppError::ProjectIdRequired.into()); }; - let mut project_graph_builder = session.build_project_graph().await?; - project_graph_builder.load(project_locator).await?; - - let project_graph = project_graph_builder.build().await?; + let project_graph = session.get_project_graph().await?; let project = project_graph.get(project_locator)?; let task = project.get_task(&args.target.task_id)?; diff --git a/crates/app/src/components.rs b/crates/app/src/components.rs index 22fb1169fdf..3b73ed34c31 100644 --- a/crates/app/src/components.rs +++ b/crates/app/src/components.rs @@ -5,9 +5,9 @@ use moon_action_context::ActionContext; use moon_action_graph::ActionGraph; use moon_action_pipeline::ActionPipeline; use moon_platform::PlatformManager; -use moon_project_graph::{ +use moon_workspace::{ ExtendProjectData, ExtendProjectEvent, ExtendProjectGraphData, ExtendProjectGraphEvent, - ProjectGraphBuilderContext, + WorkspaceBuilderContext, }; use starbase_events::{Emitter, EventState}; use std::sync::Arc; @@ -53,10 +53,10 @@ pub async fn run_action_pipeline( Ok(results) } -pub async fn create_project_graph_context( +pub async fn create_workspace_graph_context( session: &CliSession, -) -> miette::Result { - let context = ProjectGraphBuilderContext { +) -> miette::Result { + let context = WorkspaceBuilderContext { config_loader: &session.config_loader, extend_project: Emitter::::new(), extend_project_graph: Emitter::::new(), diff --git a/crates/app/src/session.rs b/crates/app/src/session.rs index bdb9d178841..7e91869ab24 100644 --- a/crates/app/src/session.rs +++ b/crates/app/src/session.rs @@ -14,9 +14,10 @@ use moon_console_reporter::DefaultReporter; use moon_env::MoonEnvironment; use moon_extension_plugin::*; use moon_plugin::{PluginHostData, PluginId}; -use moon_project_graph::{ProjectGraph, ProjectGraphBuilder}; +use moon_project_graph::ProjectGraph; use moon_toolchain_plugin::*; use moon_vcs::{BoxedVcs, Git}; +use moon_workspace::WorkspaceBuilder; use once_cell::sync::OnceCell; use proto_core::ProtoEnvironment; use semver::Version; @@ -89,10 +90,6 @@ impl CliSession { ActionGraphBuilder::new(project_graph) } - pub async fn build_project_graph(&self) -> AppResult { - ProjectGraphBuilder::new(create_project_graph_context(self).await?).await - } - pub fn get_app_context(&self) -> AppResult> { Ok(Arc::new(AppContext { cli_version: self.cli_version.clone(), @@ -140,18 +137,11 @@ impl CliSession { } pub async fn get_project_graph(&self) -> AppResult> { - if let Some(item) = self.project_graph.get() { - return Ok(Arc::clone(item)); + if self.project_graph.get().is_none() { + self.load_workspace_graph().await?; } - let cache_engine = self.get_cache_engine()?; - let context = create_project_graph_context(self).await?; - let builder = ProjectGraphBuilder::generate(context, &cache_engine).await?; - let graph = Arc::new(builder.build().await?); - - let _ = self.project_graph.set(Arc::clone(&graph)); - - Ok(graph) + Ok(self.project_graph.get().map(Arc::clone).unwrap()) } pub async fn get_toolchain_registry(&self) -> AppResult> { @@ -207,6 +197,17 @@ impl CliSession { Commands::Bin(_) | Commands::Docker { .. } | Commands::Node { .. } | Commands::Teardown ) } + + async fn load_workspace_graph(&self) -> AppResult<()> { + let cache_engine = self.get_cache_engine()?; + let context = create_workspace_graph_context(self).await?; + let builder = WorkspaceBuilder::new_with_cache(context, &cache_engine).await?; + let result = builder.build().await?; + + let _ = self.project_graph.set(Arc::new(result.project_graph)); + + Ok(()) + } } #[async_trait] diff --git a/crates/cache-item/src/lib.rs b/crates/cache-item/src/lib.rs index 1cb90bf7473..9a261abf4ab 100644 --- a/crates/cache-item/src/lib.rs +++ b/crates/cache-item/src/lib.rs @@ -7,7 +7,7 @@ pub use cache_mode::*; #[macro_export] macro_rules! cache_item { ($item:item) => { - #[derive(Debug, Default, Eq, PartialEq, serde::Deserialize, serde::Serialize)] + #[derive(Debug, Default, /* Eq, */ PartialEq, serde::Deserialize, serde::Serialize)] #[serde(default, rename_all = "camelCase")] $item }; diff --git a/crates/cli/tests/action_graph_test.rs b/crates/cli/tests/action_graph_test.rs index fb3a1f22cd2..ddaaa530445 100644 --- a/crates/cli/tests/action_graph_test.rs +++ b/crates/cli/tests/action_graph_test.rs @@ -24,7 +24,7 @@ mod action_graph { let dot = assert.output(); // Snapshot is not deterministic - assert_eq!(dot.split('\n').count(), 450); + assert_eq!(dot.split('\n').count(), 448); } #[test] diff --git a/crates/cli/tests/docker_file_test.rs b/crates/cli/tests/docker_file_test.rs index ac905739e91..50f58b5c6ad 100644 --- a/crates/cli/tests/docker_file_test.rs +++ b/crates/cli/tests/docker_file_test.rs @@ -14,7 +14,7 @@ mod dockerfile { }); assert.inner.stderr(predicate::str::contains( - "No project has been configured with the name or alias missing.", + "No project has been configured with the identifier or alias missing.", )); } diff --git a/crates/cli/tests/snapshots/project_test__unknown_project.snap b/crates/cli/tests/snapshots/project_test__unknown_project.snap index f75447f9249..5c5d05668da 100644 --- a/crates/cli/tests/snapshots/project_test__unknown_project.snap +++ b/crates/cli/tests/snapshots/project_test__unknown_project.snap @@ -2,9 +2,9 @@ source: crates/cli/tests/project_test.rs expression: get_assert_stderr_output(&assert.inner) --- -Error: project_graph::unknown_project +Error: project_graph::unknown_id - × No project has been configured with the name or alias unknown. + × No project has been configured with the identifier or alias unknown. diff --git a/crates/cli/tests/snapshots/run_test__errors_for_unknown_project.snap b/crates/cli/tests/snapshots/run_test__errors_for_unknown_project.snap index d206f9886f4..2a65ad938f1 100644 --- a/crates/cli/tests/snapshots/run_test__errors_for_unknown_project.snap +++ b/crates/cli/tests/snapshots/run_test__errors_for_unknown_project.snap @@ -2,9 +2,9 @@ source: crates/cli/tests/run_test.rs expression: assert.output() --- -Error: project_graph::unknown_project +Error: project_graph::unknown_id - × No project has been configured with the name or alias unknown. + × No project has been configured with the identifier or alias unknown. diff --git a/crates/project-builder/src/project_builder.rs b/crates/project-builder/src/project_builder.rs index e94d5b6b59f..1e701df095b 100644 --- a/crates/project-builder/src/project_builder.rs +++ b/crates/project-builder/src/project_builder.rs @@ -48,7 +48,7 @@ impl<'app> ProjectBuilder<'app> { context: ProjectBuilderContext<'app>, ) -> miette::Result { trace!( - id = id.as_str(), + project_id = id.as_str(), source = source.as_str(), "Building project {} from source", color::id(id) @@ -87,7 +87,7 @@ impl<'app> ProjectBuilder<'app> { )?; trace!( - id = self.id.as_str(), + project_id = self.id.as_str(), lookup = ?global_config.order, "Inheriting global file groups and tasks", ); @@ -99,7 +99,7 @@ impl<'app> ProjectBuilder<'app> { /// Inherit the local config and then detect applicable language and platform fields. #[instrument(skip_all)] - pub async fn inherit_local_config(&mut self, config: ProjectConfig) -> miette::Result<()> { + pub async fn inherit_local_config(&mut self, config: &ProjectConfig) -> miette::Result<()> { // Use configured language or detect from environment self.language = if config.language == LanguageType::Unknown { let mut language = detect_project_language(&self.project_root); @@ -109,7 +109,7 @@ impl<'app> ProjectBuilder<'app> { } trace!( - id = self.id.as_str(), + project_id = self.id.as_str(), language = ?language, "Unknown project language, detecting from environment", ); @@ -128,7 +128,7 @@ impl<'app> ProjectBuilder<'app> { ); trace!( - id = self.id.as_str(), + project_id = self.id.as_str(), language = ?self.language, platform = ?self.platform, "Unknown tasks platform, inferring from language and toolchain", @@ -152,7 +152,7 @@ impl<'app> ProjectBuilder<'app> { } } - self.local_config = Some(config); + self.local_config = Some(config.to_owned()); Ok(()) } @@ -161,7 +161,7 @@ impl<'app> ProjectBuilder<'app> { #[instrument(skip_all)] pub async fn load_local_config(&mut self) -> miette::Result<()> { debug!( - id = self.id.as_str(), + project_id = self.id.as_str(), "Attempting to load {} (optional)", color::file( self.source @@ -174,7 +174,7 @@ impl<'app> ProjectBuilder<'app> { .config_loader .load_project_config(&self.project_root)?; - self.inherit_local_config(config).await?; + self.inherit_local_config(&config).await?; Ok(()) } @@ -278,7 +278,7 @@ impl<'app> ProjectBuilder<'app> { } trace!( - id = self.id.as_str(), + project_id = self.id.as_str(), dep = dep_id.as_str(), task = task_config.target.as_str(), "Marking arbitrary project as an implicit dependency because of a task dependency" @@ -303,7 +303,7 @@ impl<'app> ProjectBuilder<'app> { if !deps.is_empty() { trace!( - id = self.id.as_str(), + project_id = self.id.as_str(), deps = ?deps.keys().map(|k| k.as_str()).collect::>(), "Depends on {} projects", deps.len(), @@ -323,7 +323,7 @@ impl<'app> ProjectBuilder<'app> { // Inherit global first if let Some(global) = &self.global_config { trace!( - id = self.id.as_str(), + project_id = self.id.as_str(), groups = ?global.config.file_groups.keys().map(|k| k.as_str()).collect::>(), "Inheriting global file groups", ); @@ -336,7 +336,7 @@ impl<'app> ProjectBuilder<'app> { // Override with local second if let Some(local) = &self.local_config { trace!( - id = self.id.as_str(), + project_id = self.id.as_str(), groups = ?local.file_groups.keys().map(|k| k.as_str()).collect::>(), "Using local file groups", ); diff --git a/crates/project-expander/src/project_expander.rs b/crates/project-expander/src/project_expander.rs index 9edc3d2c245..4e7e17ce10d 100644 --- a/crates/project-expander/src/project_expander.rs +++ b/crates/project-expander/src/project_expander.rs @@ -23,7 +23,7 @@ impl<'graph, 'query> ProjectExpander<'graph, 'query> { let mut project = self.context.project.to_owned(); debug!( - id = project.id.as_str(), + project_id = project.id.as_str(), "Expanding project {}", color::id(&project.id) ); diff --git a/crates/project-graph/Cargo.toml b/crates/project-graph/Cargo.toml index 8d94fd295b8..4bad9b7b3ed 100644 --- a/crates/project-graph/Cargo.toml +++ b/crates/project-graph/Cargo.toml @@ -9,29 +9,26 @@ repository = "https://github.com/moonrepo/moon" publish = false [dependencies] -moon_cache = { path = "../cache" } moon_common = { path = "../common" } moon_config = { path = "../config" } -moon_hash = { path = "../hash" } moon_project = { path = "../project" } -moon_project_builder = { path = "../project-builder" } -moon_project_constraints = { path = "../project-constraints" } moon_project_expander = { path = "../project-expander" } moon_query = { path = "../query" } moon_task = { path = "../task" } -moon_vcs = { path = "../vcs" } miette = { workspace = true } petgraph = { workspace = true } rustc-hash = { workspace = true } scc = { workspace = true } serde = { workspace = true, features = ["rc"] } -starbase_events = { workspace = true } -starbase_utils = { workspace = true, features = ["glob", "json"] } +starbase_utils = { workspace = true, features = ["json"] } thiserror = { workspace = true } tracing = { workspace = true } [dev-dependencies] +moon_cache = { path = "../cache" } moon_test_utils2 = { path = "../test-utils" } +moon_workspace = { path = "../workspace" } +starbase_events = { workspace = true } starbase_sandbox = { workspace = true } tokio = { workspace = true } diff --git a/crates/project-graph/src/lib.rs b/crates/project-graph/src/lib.rs index a7d6047246c..62ca884fba2 100644 --- a/crates/project-graph/src/lib.rs +++ b/crates/project-graph/src/lib.rs @@ -1,16 +1,7 @@ -mod project_events; mod project_graph; -mod project_graph_builder; -mod project_graph_cache; mod project_graph_error; -mod project_graph_hash; mod project_matcher; -mod projects_locator; -pub use project_events::*; pub use project_graph::*; -pub use project_graph_builder::*; -pub use project_graph_cache::*; pub use project_graph_error::*; pub use project_matcher::*; -pub use projects_locator::*; diff --git a/crates/project-graph/src/project_graph.rs b/crates/project-graph/src/project_graph.rs index bd150fc8bf6..a00515106cd 100644 --- a/crates/project-graph/src/project_graph.rs +++ b/crates/project-graph/src/project_graph.rs @@ -19,12 +19,12 @@ use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; use tracing::{debug, instrument}; -pub type GraphType = DiGraph; +pub type ProjectGraphType = DiGraph; pub type ProjectsCache = FxHashMap>; #[derive(Serialize)] pub struct ProjectGraphCache<'graph> { - graph: &'graph GraphType, + graph: &'graph ProjectGraphType, projects: &'graph ProjectsCache, } @@ -51,7 +51,7 @@ pub struct ProjectGraph { fs_cache: HashMap>, /// Directed-acyclic graph (DAG) of non-expanded projects and their dependencies. - graph: GraphType, + graph: ProjectGraphType, /// Graph node information, mapped by project ID. nodes: FxHashMap, @@ -70,7 +70,11 @@ pub struct ProjectGraph { } impl ProjectGraph { - pub fn new(graph: GraphType, nodes: FxHashMap, workspace_root: &Path) -> Self { + pub fn new( + graph: ProjectGraphType, + nodes: FxHashMap, + workspace_root: &Path, + ) -> Self { debug!("Creating project graph"); Self { @@ -123,13 +127,13 @@ impl ProjectGraph { /// Return a project with the provided name or alias from the graph. /// If the project does not exist or has been misconfigured, return an error. #[instrument(name = "get_project", skip(self))] - pub fn get(&self, project_locator: &str) -> miette::Result> { - self.internal_get(project_locator) + pub fn get(&self, id_or_alias: &str) -> miette::Result> { + self.internal_get(id_or_alias) } /// Return an unexpanded project with the provided name or alias from the graph. - pub fn get_unexpanded(&self, project_locator: &str) -> miette::Result<&Project> { - let id = self.resolve_id(project_locator); + pub fn get_unexpanded(&self, id_or_alias: &str) -> miette::Result<&Project> { + let id = self.resolve_id(id_or_alias); let node = self .nodes @@ -216,12 +220,8 @@ impl ProjectGraph { .collect() } - pub fn into_focused( - &self, - project_locator: &Id, - with_dependents: bool, - ) -> miette::Result { - let project = self.get(project_locator)?; + pub fn into_focused(&self, id_or_alias: &Id, with_dependents: bool) -> miette::Result { + let project = self.get(id_or_alias)?; let upstream = self.dependencies_of(&project)?; let downstream = self.dependents_of(&project)?; let mut nodes = FxHashMap::default(); @@ -438,23 +438,23 @@ impl ProjectGraph { Ok(id) } - fn resolve_id(&self, alias_or_id: &str) -> Id { - Id::raw(if self.nodes.contains_key(alias_or_id) { - alias_or_id + fn resolve_id(&self, id_or_alias: &str) -> Id { + Id::raw(if self.nodes.contains_key(id_or_alias) { + id_or_alias } else { self.nodes .iter() .find(|(_, node)| { node.alias .as_ref() - .is_some_and(|alias| alias == alias_or_id) + .is_some_and(|alias| alias == id_or_alias) || node .original_id .as_ref() - .is_some_and(|id| id == alias_or_id) + .is_some_and(|id| id == id_or_alias) }) .map(|(id, _)| id.as_str()) - .unwrap_or(alias_or_id) + .unwrap_or(id_or_alias) }) } diff --git a/crates/project-graph/src/project_graph_builder.rs b/crates/project-graph/src/project_graph_builder.rs deleted file mode 100644 index e61123d63f8..00000000000 --- a/crates/project-graph/src/project_graph_builder.rs +++ /dev/null @@ -1,691 +0,0 @@ -use crate::project_events::{ExtendProjectEvent, ExtendProjectGraphEvent}; -use crate::project_graph::{GraphType, ProjectGraph, ProjectNode}; -use crate::project_graph_cache::ProjectsCacheState; -use crate::project_graph_error::ProjectGraphError; -use crate::project_graph_hash::ProjectGraphHash; -use crate::projects_locator::locate_projects_with_globs; -use moon_cache::CacheEngine; -use moon_common::path::{is_root_level_source, to_virtual_string, WorkspaceRelativePathBuf}; -use moon_common::{color, consts, Id}; -use moon_config::{ - ConfigLoader, DependencyScope, InheritedTasksManager, ProjectConfig, ProjectsSourcesList, - ToolchainConfig, WorkspaceConfig, WorkspaceProjects, -}; -use moon_project::Project; -use moon_project_builder::{ProjectBuilder, ProjectBuilderContext}; -use moon_project_constraints::{enforce_project_type_relationships, enforce_tag_relationships}; -use moon_vcs::BoxedVcs; -use petgraph::graph::DiGraph; -use petgraph::prelude::NodeIndex; -use petgraph::visit::IntoNodeReferences; -use petgraph::Direction; -use rustc_hash::{FxHashMap, FxHashSet}; -use serde::{Deserialize, Serialize}; -use starbase_events::Emitter; -use starbase_utils::{glob, json}; -use std::collections::BTreeMap; -use std::path::Path; -use std::sync::Arc; -use tracing::{debug, instrument, trace}; - -pub struct ProjectGraphBuilderContext<'app> { - pub config_loader: &'app ConfigLoader, - pub extend_project: Emitter, - pub extend_project_graph: Emitter, - pub inherited_tasks: &'app InheritedTasksManager, - pub strict_project_ids: bool, - pub toolchain_config: &'app ToolchainConfig, - pub vcs: Option>, - pub working_dir: &'app Path, - pub workspace_config: &'app WorkspaceConfig, - pub workspace_root: &'app Path, -} - -#[derive(Deserialize, Serialize)] -pub struct ProjectGraphBuilder<'app> { - #[serde(skip)] - context: Option>>, - - /// Mapping of project IDs to project aliases. - aliases: FxHashMap, - - /// Loaded project configuration files. - #[serde(skip)] - configs: FxHashMap, - - /// The DAG instance. - graph: GraphType, - - /// Is this a monorepo or polyrepo. - monorepo: bool, - - /// Nodes (projects) inserted into the graph. - nodes: FxHashMap, - - /// Projects that have explicitly renamed themselves. - /// Maps original ID to renamed ID. - renamed_ids: FxHashMap, - - /// The root project ID (only if a monorepo). - root_id: Option, - - /// Mapping of project IDs to file system sources, - /// derived from the `workspace.projects` setting. - sources: FxHashMap, -} - -impl<'app> ProjectGraphBuilder<'app> { - /// Create a new project graph instance without reading from the - /// cache, and preloading all project sources and aliases. - pub async fn new( - context: ProjectGraphBuilderContext<'app>, - ) -> miette::Result> { - debug!("Building project graph"); - - let mut graph = ProjectGraphBuilder { - context: Some(Arc::new(context)), - configs: FxHashMap::default(), - aliases: FxHashMap::default(), - graph: DiGraph::new(), - monorepo: false, - nodes: FxHashMap::default(), - renamed_ids: FxHashMap::default(), - root_id: None, - sources: FxHashMap::default(), - }; - - graph.preload().await?; - - Ok(graph) - } - - /// Create a project graph with all projects inserted as nodes, - /// and read from the file system cache when applicable. - #[instrument(name = "generate_project_graph", skip_all)] - pub async fn generate( - context: ProjectGraphBuilderContext<'app>, - cache_engine: &CacheEngine, - ) -> miette::Result> { - let is_vcs_enabled = context - .vcs - .as_ref() - .expect("VCS is required for project graph caching!") - .is_enabled(); - let mut graph = Self::new(context).await?; - - // No VCS to hash with, so abort caching - if !is_vcs_enabled { - graph.load_all().await?; - - return Ok(graph); - } - - // Hash the project graph based on the preloaded state - let mut graph_contents = ProjectGraphHash::new(); - graph_contents.add_sources(&graph.sources); - graph_contents.add_aliases(&graph.aliases); - graph_contents.add_configs(graph.hash_required_configs().await?); - graph_contents.gather_env(); - - let hash = cache_engine - .hash - .save_manifest_without_hasher("Project graph", &graph_contents)?; - - debug!(hash, "Generated hash for project graph"); - - // Check the current state and cache - let mut state = cache_engine - .state - .load_state::("projects.json")?; - let cache_path = cache_engine.state.resolve_path("partialProjectGraph.json"); - - if hash == state.data.last_hash && cache_path.exists() { - debug!( - cache = ?cache_path, - "Loading project graph with {} projects from cache", - graph.sources.len(), - ); - - let mut cache: ProjectGraphBuilder = json::read_file(cache_path)?; - cache.configs = graph.configs; - cache.context = graph.context; - - return Ok(cache); - } - - // Build the graph, update the state, and save the cache - debug!( - "Generating project graph with {} projects", - graph.sources.len(), - ); - - graph.load_all().await?; - - state.data.last_hash = hash; - state.data.projects = graph.sources.clone(); - state.save()?; - - json::write_file(cache_path, &graph, false)?; - - Ok(graph) - } - - /// Build the project graph and return a new structure. - #[instrument(name = "build_project_graph", skip_all)] - pub async fn build(mut self) -> miette::Result { - self.enforce_constraints()?; - - let context = self.context.take().unwrap(); - let mut nodes = FxHashMap::default(); - - for (id, index) in self.nodes { - let Some(source) = self.sources.remove(&id) else { - continue; - }; - - nodes.insert( - id, - ProjectNode { - index, - source, - ..ProjectNode::default() - }, - ); - } - - for (id, alias) in self.aliases { - nodes.entry(id).and_modify(|node| { - node.alias = Some(alias); - }); - } - - for (original_id, id) in self.renamed_ids { - nodes.entry(id).and_modify(|node| { - node.original_id = Some(original_id); - }); - } - - let mut graph = ProjectGraph::new(self.graph, nodes, context.workspace_root); - graph.working_dir = context.working_dir.to_owned(); - - Ok(graph) - } - - /// Load a single project by name or alias into the graph. - pub async fn load(&mut self, project_locator: &str) -> miette::Result<()> { - self.internal_load(project_locator, &mut FxHashSet::default()) - .await?; - - Ok(()) - } - - /// Load all projects into the graph, as configured in the workspace. - pub async fn load_all(&mut self) -> miette::Result<()> { - let ids = self.sources.keys().cloned().collect::>(); - - for id in ids { - self.internal_load(&id, &mut FxHashSet::default()).await?; - } - - Ok(()) - } - - #[instrument(name = "load", skip(self, cycle))] - async fn internal_load( - &mut self, - project_locator: &str, - cycle: &mut FxHashSet, - ) -> miette::Result<(Id, NodeIndex)> { - let id = self.resolve_id(project_locator); - - // Already loaded, exit early with existing index - if let Some(index) = self.nodes.get(&id) { - trace!( - id = id.as_str(), - "Project already exists in the project graph, skipping load", - ); - - return Ok((id, *index)); - } - - // Check that the project ID is configured - trace!( - id = id.as_str(), - "Project does not exist in the project graph, attempting to load", - ); - - let Some(source) = self.sources.get(&id).map(|s| s.to_owned()) else { - return Err(ProjectGraphError::UnconfiguredID(id).into()); - }; - - // Create the project - let mut project = self.build_project(id, source).await?; - let id = project.id.clone(); - - cycle.insert(id.clone()); - - // Create dependent projects - let mut edges = vec![]; - - for dep_config in &mut project.dependencies { - let loaded_dep_id = if cycle.contains(&dep_config.id) { - debug!( - id = id.as_str(), - dependency_id = dep_config.id.as_str(), - "Encountered a dependency cycle (from project); will disconnect nodes to avoid recursion", - ); - - continue; - - // Don't link the root project to any project, but still load it - } else if dep_config.is_root_scope() { - Box::pin(self.internal_load(&dep_config.id, cycle)).await?.0 - - // Otherwise link projects - } else { - let dep = Box::pin(self.internal_load(&dep_config.id, cycle)).await?; - edges.push((dep.1, dep_config.scope)); - dep.0 - }; - - if loaded_dep_id != dep_config.id { - dep_config.id = loaded_dep_id; - } - } - - // Add to the graph - let index = self.graph.add_node(project); - - self.nodes.insert(id.clone(), index); - - for edge in edges { - self.graph.add_edge(index, edge.0, edge.1); - } - - cycle.clear(); - - Ok((id, index)) - } - - /// Create and build the project with the provided ID and source. - #[instrument(skip(self))] - async fn build_project( - &mut self, - id: Id, - source: WorkspaceRelativePathBuf, - ) -> miette::Result { - debug!(id = id.as_str(), "Building project {}", color::id(&id)); - - let context = self.context(); - - if !source.to_path(context.workspace_root).exists() { - return Err(ProjectGraphError::MissingAtSource(source.to_string()).into()); - } - - let mut builder = ProjectBuilder::new( - &id, - &source, - ProjectBuilderContext { - config_loader: context.config_loader, - monorepo: self.monorepo, - root_project_id: self.root_id.as_ref(), - toolchain_config: context.toolchain_config, - workspace_root: context.workspace_root, - }, - )?; - - if let Some(config) = self.configs.remove(&id) { - builder.inherit_local_config(config).await?; - } else { - builder.load_local_config().await?; - } - - builder.inherit_global_config(context.inherited_tasks)?; - - let extended_data = context - .extend_project - .emit(ExtendProjectEvent { - project_id: id.to_owned(), - project_source: source.to_owned(), - workspace_root: context.workspace_root.to_owned(), - }) - .await?; - - // Inherit implicit dependencies - for dep_config in extended_data.dependencies { - builder.extend_with_dependency(dep_config); - } - - // Inherit inferred tasks - for (task_id, task_config) in extended_data.tasks { - builder.extend_with_task(task_id, task_config); - } - - // Inherit alias before building in case the project - // references itself in tasks or dependencies - if let Some(alias) = self.aliases.get(&id) { - builder.set_alias(alias); - } - - let project = builder.build().await?; - - Ok(project) - } - - /// Enforce project constraints and boundaries after all nodes have been inserted. - #[instrument(skip_all)] - fn enforce_constraints(&self) -> miette::Result<()> { - debug!("Enforcing project constraints"); - - let context = self.context(); - let type_relationships = context - .workspace_config - .constraints - .enforce_project_type_relationships; - let tag_relationships = &context.workspace_config.constraints.tag_relationships; - - if !type_relationships && tag_relationships.is_empty() { - return Ok(()); - } - - let default_scope = DependencyScope::Build; - - for (project_index, project) in self.graph.node_references() { - let deps: Vec<_> = self - .graph - .neighbors_directed(project_index, Direction::Outgoing) - .flat_map(|dep_index| { - self.graph.node_weight(dep_index).map(|dep| { - ( - dep, - // Is this safe? - self.graph - .find_edge(project_index, dep_index) - .and_then(|ei| self.graph.edge_weight(ei)) - .unwrap_or(&default_scope), - ) - }) - }) - .collect(); - - for (dep, dep_scope) in deps { - if type_relationships { - enforce_project_type_relationships(project, dep, dep_scope)?; - } - - for (source_tag, required_tags) in tag_relationships { - enforce_tag_relationships(project, source_tag, dep, required_tags)?; - } - } - } - - Ok(()) - } - - /// When caching the project graph, we must hash all project and workspace - /// config files that are required to invalidate the cache. - async fn hash_required_configs( - &self, - ) -> miette::Result> { - let context = self.context(); - let config_names = context.config_loader.get_project_file_names(); - let mut configs = vec![]; - - // Hash all project-level config files - for source in self.sources.values() { - for name in &config_names { - configs.push(source.join(name).as_str().to_owned()); - } - } - - // Hash all workspace-level config files - for file in glob::walk( - context.workspace_root.join(consts::CONFIG_DIRNAME), - ["*.pkl", "tasks/**/*.pkl", "*.yml", "tasks/**/*.yml"], - )? { - configs.push(to_virtual_string( - file.strip_prefix(context.workspace_root).unwrap(), - )?); - } - - context - .vcs - .as_ref() - .unwrap() - .get_file_hashes(&configs, true, 500) - .await - } - - /// Preload the graph with project sources from the workspace configuration. - /// If globs are provided, walk the file system and gather sources. - /// Then extend the graph with aliases, derived from all event subscribers. - async fn preload(&mut self) -> miette::Result<()> { - let context = self.context(); - let mut globs = vec![]; - let mut sources = vec![]; - - // Locate all project sources - let mut add_sources = |map: &FxHashMap| { - for (id, source) in map { - sources.push((id.to_owned(), WorkspaceRelativePathBuf::from(source))); - } - }; - - match &context.workspace_config.projects { - WorkspaceProjects::Sources(map) => { - add_sources(map); - } - WorkspaceProjects::Globs(list) => { - globs.extend(list); - } - WorkspaceProjects::Both(cfg) => { - globs.extend(&cfg.globs); - add_sources(&cfg.sources); - } - }; - - if !sources.is_empty() { - debug!( - sources = ?sources, - "Using configured project sources", - ); - } - - if !globs.is_empty() { - debug!( - globs = ?globs, - "Locating projects with globs", - ); - - locate_projects_with_globs(&context, &globs, &mut sources)?; - } - - // Load all config files first so that ID renaming occurs - self.preload_configs(&mut sources)?; - - // Extend graph from subscribers - debug!("Extending project graph from subscribers"); - - let aliases = context - .extend_project_graph - .emit(ExtendProjectGraphEvent { - sources: sources.clone(), - workspace_root: context.workspace_root.to_owned(), - }) - .await? - .aliases; - - // Determine if a polyrepo or monorepo - let polyrepo = sources.len() == 1 - && sources - .first() - .map(|(_, source)| is_root_level_source(source)) - .unwrap_or_default(); - - self.monorepo = !polyrepo; - - // Find the root project - self.root_id = if self.monorepo { - sources.iter().find_map(|(id, source)| { - if is_root_level_source(source) { - Some(id.to_owned()) - } else { - None - } - }) - } else { - None - }; - - // Set our data and warn/error against problems - for (id, source) in sources { - if let Some(existing_source) = self.sources.get(&id) { - if existing_source == &source { - continue; - } - - return Err(ProjectGraphError::DuplicateId { - id: id.clone(), - old_source: existing_source.to_string(), - new_source: source.to_string(), - } - .into()); - } else { - self.sources.insert(id, source); - } - } - - let mut dupe_aliases = FxHashMap::::default(); - - for (id, alias) in aliases { - let id = match self.renamed_ids.get(&id) { - Some(new_id) => new_id, - None => &id, - }; - - // Skip aliases that match its own ID - if id == &alias { - continue; - } - - // Skip aliases that would override an ID - if self.sources.contains_key(alias.as_str()) { - debug!( - "Skipping alias {} (for project {}) as it conflicts with the project {}", - color::label(&alias), - color::id(id), - color::id(&alias), - ); - - continue; - } - - if let Some(existing_id) = dupe_aliases.get(&alias) { - // Skip if the existing ID is already for this ID. - // This scenario is possible when multiple platforms - // extract the same aliases (Bun vs Node, etc). - if existing_id == id { - continue; - } - - return Err(ProjectGraphError::DuplicateAlias { - alias: alias.clone(), - old_id: existing_id.to_owned(), - new_id: id.clone(), - } - .into()); - } - - dupe_aliases.insert(alias.clone(), id.to_owned()); - self.aliases.insert(id.to_owned(), alias); - } - - Ok(()) - } - - fn preload_configs(&mut self, sources: &mut ProjectsSourcesList) -> miette::Result<()> { - let context = self.context(); - let mut configs = FxHashMap::default(); - let mut renamed_ids = FxHashMap::default(); - let mut dupe_original_ids = FxHashSet::default(); - - for (id, source) in sources { - debug!( - id = id.as_str(), - "Attempting to load {} (optional)", - color::file(source.join(context.config_loader.get_debug_label("moon", false))) - ); - - let config = context - .config_loader - .load_project_config_from_source(context.workspace_root, source)?; - - // Track ID renames - if let Some(new_id) = &config.id { - if new_id != id { - if renamed_ids.contains_key(id) { - dupe_original_ids.insert(id.to_owned()); - } else { - renamed_ids.insert(id.to_owned(), new_id.to_owned()); - } - - *id = new_id.to_owned(); - } - } - - configs.insert(config.id.clone().unwrap_or(id.to_owned()), config); - } - - if !dupe_original_ids.is_empty() { - trace!( - original_ids = ?dupe_original_ids.iter().collect::>(), - "Found multiple projects with the same original ID before being renamed to a custom ID; will ignore these IDs within lookups" - ); - - for dupe_id in dupe_original_ids { - renamed_ids.remove(&dupe_id); - } - } - - debug!("Loaded {} project configs", configs.len()); - - self.configs.extend(configs); - self.renamed_ids.extend(renamed_ids); - - Ok(()) - } - - fn context(&self) -> Arc> { - Arc::clone(self.context.as_ref().unwrap()) - } - - fn resolve_id(&self, project_locator: &str) -> Id { - let id = if self.sources.contains_key(project_locator) { - Id::raw(project_locator) - } else { - match self.aliases.iter().find_map(|(id, alias)| { - if alias == project_locator { - Some(id) - } else { - None - } - }) { - Some(project_id) => project_id.to_owned(), - None => Id::raw(project_locator), - } - }; - - if self - .context - .as_ref() - .is_some_and(|ctx| ctx.strict_project_ids) - { - return id; - } - - match self.renamed_ids.get(&id) { - Some(new_id) => new_id.to_owned(), - None => id, - } - } -} diff --git a/crates/project-graph/src/project_graph_cache.rs b/crates/project-graph/src/project_graph_cache.rs deleted file mode 100644 index f1eba0572c9..00000000000 --- a/crates/project-graph/src/project_graph_cache.rs +++ /dev/null @@ -1,11 +0,0 @@ -use moon_cache::cache_item; -use moon_common::path::WorkspaceRelativePathBuf; -use moon_common::Id; -use rustc_hash::FxHashMap; - -cache_item!( - pub struct ProjectsCacheState { - pub last_hash: String, - pub projects: FxHashMap, - } -); diff --git a/crates/project-graph/src/project_graph_error.rs b/crates/project-graph/src/project_graph_error.rs index 7c274951460..0af71fea63e 100644 --- a/crates/project-graph/src/project_graph_error.rs +++ b/crates/project-graph/src/project_graph_error.rs @@ -5,43 +5,11 @@ use thiserror::Error; #[derive(Error, Debug, Diagnostic)] pub enum ProjectGraphError { - #[diagnostic(code(project_graph::duplicate_alias))] - #[error( - "Project {} is already using the alias {}, unable to set the alias for project {}.\nTry changing the alias to something unique to move forward.", - .old_id.style(Style::Id), - .alias.style(Style::Label), - .new_id.style(Style::Id), - )] - DuplicateAlias { - alias: String, - old_id: Id, - new_id: Id, - }, - - #[diagnostic(code(project_graph::duplicate_id))] - #[error( - "A project already exists with the name {} (existing source {}, new source {}).\nTry renaming the project folder to make it unique, or configure the {} setting in {}.", - .id.style(Style::Id), - .old_source.style(Style::File), - .new_source.style(Style::File), - "id".style(Style::Property), - "moon.yml".style(Style::File) - )] - DuplicateId { - id: Id, - old_source: String, - new_source: String, - }, - - #[diagnostic(code(project_graph::missing_source))] - #[error("No project exists at source path {}.", .0.style(Style::File))] - MissingAtSource(String), - #[diagnostic(code(project_graph::missing_from_path))] #[error("No project could be located starting from path {}.", .0.style(Style::Path))] MissingFromPath(PathBuf), - #[diagnostic(code(project_graph::unknown_project))] - #[error("No project has been configured with the name or alias {}.", .0.style(Style::Id))] + #[diagnostic(code(project_graph::unknown_id))] + #[error("No project has been configured with the identifier or alias {}.", .0.style(Style::Id))] UnconfiguredID(Id), } diff --git a/crates/project-graph/tests/project_graph_test.rs b/crates/project-graph/tests/project_graph_test.rs index 7e4f1ec3c7f..5c0a0ee16ae 100644 --- a/crates/project-graph/tests/project_graph_test.rs +++ b/crates/project-graph/tests/project_graph_test.rs @@ -4,13 +4,15 @@ use moon_config::{ WorkspaceProjectsConfig, }; use moon_project::{FileGroup, Project}; -use moon_project_graph::{ - ExtendProjectData, ExtendProjectEvent, ExtendProjectGraphData, ExtendProjectGraphEvent, - ProjectGraph, ProjectGraphBuilder, -}; +use moon_project_graph::ProjectGraph; use moon_query::build_query; use moon_task::Target; use moon_test_utils2::*; +use moon_workspace::{ + ExtendProjectData, ExtendProjectEvent, ExtendProjectGraphData, ExtendProjectGraphEvent, + WorkspaceProjectsCacheState, +}; +use petgraph::prelude::*; use rustc_hash::{FxHashMap, FxHashSet}; use starbase_events::EventState; use starbase_sandbox::{assert_snapshot, create_sandbox, Sandbox}; @@ -51,7 +53,7 @@ mod project_graph { } #[tokio::test] - #[should_panic(expected = "No project has been configured with the name or alias z")] + #[should_panic(expected = "No project has been configured with the identifier or alias z")] async fn errors_unknown_id() { let graph = generate_project_graph("dependencies").await; @@ -84,7 +86,7 @@ mod project_graph { } #[tokio::test] - #[should_panic(expected = "A project already exists with the name id")] + #[should_panic(expected = "A project already exists with the identifier id")] async fn errors_duplicate_ids() { generate_project_graph("dupe-folder-conflict").await; } @@ -110,12 +112,11 @@ mod project_graph { // Move files so that we can infer a compatible root project name fs::copy_dir_all(sandbox.path(), sandbox.path(), &root).unwrap(); - let mut container = ProjectGraphContainer::new(&root); + let mut mock = create_project_graph_mocker(&root); - container.workspace_config.projects = WorkspaceProjects::Globs(string_vec!["*", "."]); + mock.workspace_config.projects = WorkspaceProjects::Globs(string_vec!["*", "."]); - let context = container.create_context(); - let graph = container.build_graph(context).await; + let graph = mock.build_project_graph().await; assert_eq!( get_ids_from_projects(graph.get_all().unwrap()), @@ -126,13 +127,11 @@ mod project_graph { #[tokio::test] async fn globs_with_config() { let sandbox = create_sandbox("locate-configs"); - let mut container = ProjectGraphContainer::new(sandbox.path()); + let mut mock = create_project_graph_mocker(sandbox.path()); - container.workspace_config.projects = - WorkspaceProjects::Globs(string_vec!["*/moon.yml"]); + mock.workspace_config.projects = WorkspaceProjects::Globs(string_vec!["*/moon.yml"]); - let context = container.create_context(); - let graph = container.build_graph(context).await; + let graph = mock.build_project_graph().await; assert_eq!(get_ids_from_projects(graph.get_all().unwrap()), ["a", "c"]); } @@ -140,16 +139,14 @@ mod project_graph { #[tokio::test] async fn paths() { let sandbox = create_sandbox("dependencies"); - let mut container = ProjectGraphContainer::new(sandbox.path()); + let mut mock = create_project_graph_mocker(sandbox.path()); - container.workspace_config.projects = - WorkspaceProjects::Sources(FxHashMap::from_iter([ - (Id::raw("c"), "c".into()), - (Id::raw("b"), "b".into()), - ])); + mock.workspace_config.projects = WorkspaceProjects::Sources(FxHashMap::from_iter([ + (Id::raw("c"), "c".into()), + (Id::raw("b"), "b".into()), + ])); - let context = container.create_context(); - let graph = container.build_graph(context).await; + let graph = mock.build_project_graph().await; assert_eq!(get_ids_from_projects(graph.get_all().unwrap()), ["b", "c"]); } @@ -157,19 +154,17 @@ mod project_graph { #[tokio::test] async fn paths_and_globs() { let sandbox = create_sandbox("dependencies"); - let mut container = ProjectGraphContainer::new(sandbox.path()); + let mut mock = create_project_graph_mocker(sandbox.path()); - container.workspace_config.projects = - WorkspaceProjects::Both(WorkspaceProjectsConfig { - globs: string_vec!["{a,c}"], - sources: FxHashMap::from_iter([ - (Id::raw("b"), "b".into()), - (Id::raw("root"), ".".into()), - ]), - }); + mock.workspace_config.projects = WorkspaceProjects::Both(WorkspaceProjectsConfig { + globs: string_vec!["{a,c}"], + sources: FxHashMap::from_iter([ + (Id::raw("b"), "b".into()), + (Id::raw("root"), ".".into()), + ]), + }); - let context = container.create_context(); - let graph = container.build_graph(context).await; + let graph = mock.build_project_graph().await; assert_eq!( get_ids_from_projects(graph.get_all().unwrap()), @@ -212,9 +207,10 @@ mod project_graph { sandbox.enable_git(); sandbox.create_file(".gitignore", "*-other"); - let container = ProjectGraphContainer::with_vcs(sandbox.path()); - let context = container.create_context(); - let graph = container.build_graph(context).await; + let mut mock = create_project_graph_mocker(sandbox.path()); + mock.with_vcs(); + + let graph = mock.build_project_graph().await; assert_eq!( get_ids_from_projects(graph.get_all().unwrap()), @@ -243,24 +239,22 @@ mod project_graph { mod cache { use super::*; use moon_cache::CacheEngine; - use moon_project_graph::ProjectsCacheState; + use moon_workspace::ProjectBuildData; - const CACHE_PATH: &str = ".moon/cache/states/partialProjectGraph.json"; - const STATE_PATH: &str = ".moon/cache/states/projects.json"; + const CACHE_PATH: &str = ".moon/cache/states/workspaceGraph.json"; + const STATE_PATH: &str = ".moon/cache/states/projectsBuildData.json"; async fn do_generate(root: &Path) -> ProjectGraph { let cache_engine = CacheEngine::new(root).unwrap(); - let container = ProjectGraphContainer::with_vcs(root); - let context = container.create_context(); - let mut builder = ProjectGraphBuilder::generate(context, &cache_engine) - .await - .unwrap(); - builder.load_all().await.unwrap(); + let mut mock = create_project_graph_mocker(root); + mock.with_vcs(); - let graph = builder.build().await.unwrap(); - graph.get_all().unwrap(); - graph + mock.build_project_graph_with_options(ProjectGraphMockOptions { + cache: Some(cache_engine), + ..Default::default() + }) + .await } async fn generate_cached_project_graph( @@ -310,16 +304,44 @@ mod project_graph { }) .await; - let state: ProjectsCacheState = + let state: WorkspaceProjectsCacheState = json::read_file(sandbox.path().join(STATE_PATH)).unwrap(); assert_eq!( state.projects, FxHashMap::from_iter([ - (Id::raw("a"), "a".into()), - (Id::raw("b"), "b".into()), - (Id::raw("c"), "c".into()), - (Id::raw("d"), "d".into()), + ( + Id::raw("a"), + ProjectBuildData { + node_index: Some(NodeIndex::from(2)), + source: "a".into(), + ..Default::default() + } + ), + ( + Id::raw("b"), + ProjectBuildData { + node_index: Some(NodeIndex::from(1)), + source: "b".into(), + ..Default::default() + } + ), + ( + Id::raw("c"), + ProjectBuildData { + node_index: Some(NodeIndex::from(0)), + source: "c".into(), + ..Default::default() + } + ), + ( + Id::raw("d"), + ProjectBuildData { + node_index: Some(NodeIndex::from(3)), + source: "d".into(), + ..Default::default() + } + ), ]) ); @@ -339,13 +361,13 @@ mod project_graph { }) .await; - let state1: ProjectsCacheState = + let state1: WorkspaceProjectsCacheState = json::read_file(sandbox.path().join(STATE_PATH)).unwrap(); func(&sandbox); do_generate(sandbox.path()).await; - let state2: ProjectsCacheState = + let state2: WorkspaceProjectsCacheState = json::read_file(sandbox.path().join(STATE_PATH)).unwrap(); assert_ne!(state1.last_hash, state2.last_hash); @@ -440,14 +462,14 @@ mod project_graph { async fn generate_inheritance_project_graph(fixture: &str) -> ProjectGraph { let sandbox = create_sandbox(fixture); + let mut mock = create_project_graph_mocker(sandbox.path()); - generate_project_graph_with_changes(sandbox.path(), |container| { - container.inherited_tasks = container - .config_loader - .load_tasks_manager_from(sandbox.path(), sandbox.path().join(".moon")) - .unwrap(); - }) - .await + mock.inherited_tasks = mock + .config_loader + .load_tasks_manager_from(sandbox.path(), sandbox.path().join(".moon")) + .unwrap(); + + mock.build_project_graph().await } #[tokio::test] @@ -710,9 +732,9 @@ mod project_graph { #[tokio::test] async fn no_depends_on() { let sandbox = create_sandbox("dependency-types"); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); - let graph = container.build_graph_for(context, &["no-depends-on"]).await; + let mock = create_project_graph_mocker(sandbox.path()); + + let graph = mock.build_project_graph_for(&["no-depends-on"]).await; assert_eq!(map_ids(graph.ids()), ["no-depends-on"]); } @@ -720,11 +742,9 @@ mod project_graph { #[tokio::test] async fn some_depends_on() { let sandbox = create_sandbox("dependency-types"); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); - let graph = container - .build_graph_for(context, &["some-depends-on"]) - .await; + let mock = create_project_graph_mocker(sandbox.path()); + + let graph = mock.build_project_graph_for(&["some-depends-on"]).await; assert_eq!(map_ids(graph.ids()), ["a", "c", "some-depends-on"]); } @@ -732,11 +752,9 @@ mod project_graph { #[tokio::test] async fn from_task_deps() { let sandbox = create_sandbox("dependency-types"); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); - let graph = container - .build_graph_for(context, &["from-task-deps"]) - .await; + let mock = create_project_graph_mocker(sandbox.path()); + + let graph = mock.build_project_graph_for(&["from-task-deps"]).await; assert_eq!(map_ids(graph.ids()), ["b", "c", "from-task-deps"]); @@ -749,11 +767,9 @@ mod project_graph { #[tokio::test] async fn from_root_task_deps() { let sandbox = create_sandbox("dependency-types"); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); - let graph = container - .build_graph_for(context, &["from-root-task-deps"]) - .await; + let mock = create_project_graph_mocker(sandbox.path()); + + let graph = mock.build_project_graph_for(&["from-root-task-deps"]).await; assert_eq!(map_ids(graph.ids()), ["root", "from-root-task-deps"]); @@ -765,11 +781,9 @@ mod project_graph { #[tokio::test] async fn self_task_deps() { let sandbox = create_sandbox("dependency-types"); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); - let graph = container - .build_graph_for(context, &["self-task-deps"]) - .await; + let mock = create_project_graph_mocker(sandbox.path()); + + let graph = mock.build_project_graph_for(&["self-task-deps"]).await; assert_eq!(map_ids(graph.ids()), ["self-task-deps"]); } @@ -785,8 +799,8 @@ mod project_graph { async fn generate_aliases_project_graph_for_fixture(fixture: &str) -> ProjectGraph { let sandbox = create_sandbox(fixture); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); + let mock = create_project_graph_mocker(sandbox.path()); + let context = mock.create_context(); // Set aliases for projects context @@ -841,7 +855,11 @@ mod project_graph { ) .await; - container.build_graph(context).await + mock.build_project_graph_with_options(ProjectGraphMockOptions { + context: Some(context), + ..Default::default() + }) + .await } #[tokio::test] @@ -970,7 +988,7 @@ mod project_graph { } #[tokio::test] - #[should_panic(expected = "Project one is already using the alias @test")] + #[should_panic(expected = "Project two is already using the alias @test")] async fn errors_duplicate_aliases() { generate_aliases_project_graph_for_fixture("aliases-conflict").await; } @@ -978,8 +996,8 @@ mod project_graph { #[tokio::test] async fn ignores_duplicate_aliases_if_ids_match() { let sandbox = create_sandbox("aliases-conflict"); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); + let mock = create_project_graph_mocker(sandbox.path()); + let context = mock.create_context(); context .extend_project_graph @@ -999,7 +1017,12 @@ mod project_graph { ) .await; - let graph = container.build_graph(context).await; + let graph = mock + .build_project_graph_with_options(ProjectGraphMockOptions { + context: Some(context), + ..Default::default() + }) + .await; assert!(graph.get("@one").is_ok()); assert!(graph.get("@two").is_ok()); @@ -1016,16 +1039,13 @@ mod project_graph { func(&sandbox); - let mut container = ProjectGraphContainer::new(sandbox.path()); + let mut mock = create_project_graph_mocker(sandbox.path()); - container - .workspace_config + mock.workspace_config .constraints .enforce_project_type_relationships = true; - let context = container.create_context(); - - container.build_graph(context).await + mock.build_project_graph().await } #[tokio::test] @@ -1152,29 +1172,19 @@ mod project_graph { func(&sandbox); - let mut container = ProjectGraphContainer::new(sandbox.path()); + let mut mock = create_project_graph_mocker(sandbox.path()); - container - .workspace_config - .constraints - .tag_relationships - .insert( - Id::raw("warrior"), - vec![Id::raw("barbarian"), Id::raw("paladin"), Id::raw("druid")], - ); - - container - .workspace_config - .constraints - .tag_relationships - .insert( - Id::raw("mage"), - vec![Id::raw("wizard"), Id::raw("sorcerer"), Id::raw("druid")], - ); + mock.workspace_config.constraints.tag_relationships.insert( + Id::raw("warrior"), + vec![Id::raw("barbarian"), Id::raw("paladin"), Id::raw("druid")], + ); - let context = container.create_context(); + mock.workspace_config.constraints.tag_relationships.insert( + Id::raw("mage"), + vec![Id::raw("wizard"), Id::raw("sorcerer"), Id::raw("druid")], + ); - container.build_graph(context).await + mock.build_project_graph().await } #[tokio::test] @@ -1428,9 +1438,9 @@ mod project_graph { #[tokio::test] async fn renders_partial() { let sandbox = create_sandbox("dependencies"); - let container = ProjectGraphContainer::new(sandbox.path()); - let context = container.create_context(); - let graph = container.build_graph_for(context, &["b"]).await; + let mock = create_project_graph_mocker(sandbox.path()); + + let graph = mock.build_project_graph_for(&["b"]).await; assert_snapshot!(graph.to_dot()); } @@ -1475,7 +1485,7 @@ mod project_graph { } #[tokio::test] - #[should_panic(expected = "A project already exists with the name foo")] + #[should_panic(expected = "A project already exists with the identifier foo")] async fn errors_duplicate_ids_from_rename() { generate_project_graph("custom-id-conflict").await; } diff --git a/crates/task-builder/src/tasks_builder.rs b/crates/task-builder/src/tasks_builder.rs index f27f01531d1..77a5a88e612 100644 --- a/crates/task-builder/src/tasks_builder.rs +++ b/crates/task-builder/src/tasks_builder.rs @@ -135,7 +135,7 @@ impl<'proj> TasksBuilder<'proj> { } trace!( - id = self.project_id, + project_id = self.project_id, tasks = ?global_config.tasks.keys().map(|k| k.as_str()).collect::>(), "Filtering global tasks", ); @@ -214,7 +214,7 @@ impl<'proj> TasksBuilder<'proj> { } trace!( - id = self.project_id, + project_id = self.project_id, tasks = ?local_config.tasks.keys().map(|k| k.as_str()).collect::>(), "Loading local tasks", ); diff --git a/crates/task-hasher/tests/task_hasher_test.rs b/crates/task-hasher/tests/task_hasher_test.rs index c6e4557fcdc..a341fc18f25 100644 --- a/crates/task-hasher/tests/task_hasher_test.rs +++ b/crates/task-hasher/tests/task_hasher_test.rs @@ -1,7 +1,7 @@ use moon_config::{GlobPath, HasherConfig, HasherWalkStrategy, PortablePath}; use moon_project::Project; use moon_task_hasher::{TaskHash, TaskHasher}; -use moon_test_utils2::{ProjectGraph, ProjectGraphContainer}; +use moon_test_utils2::{create_project_graph_mocker, ProjectGraph}; use moon_vcs::BoxedVcs; use starbase_sandbox::create_sandbox; use std::fs; @@ -32,13 +32,13 @@ fn create_hasher_configs() -> (HasherConfig, HasherConfig) { } async fn generate_project_graph(workspace_root: &Path) -> (ProjectGraph, Arc) { - let mut graph_builder = ProjectGraphContainer::with_vcs(workspace_root); - let context = graph_builder.create_context(); + let mut mock = create_project_graph_mocker(workspace_root); + mock.with_vcs(); create_out_files(workspace_root); - let graph = graph_builder.build_graph(context).await; - let vcs = graph_builder.vcs.take().unwrap(); + let graph = mock.build_project_graph().await; + let vcs = mock.vcs.take().unwrap(); (graph, vcs) } diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index 60ebb16a5a7..09b8cce8012 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -15,6 +15,7 @@ moon_config = { path = "../config" } moon_console = { path = "../console" } moon_project_graph = { path = "../project-graph" } moon_vcs = { path = "../vcs" } +moon_workspace = { path = "../workspace" } proto_core = { workspace = true } starbase_events = { workspace = true } starbase_sandbox = { workspace = true } diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 326322ab64b..17f392f4482 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -2,9 +2,11 @@ mod app_context; mod platform_manager; mod project_graph; mod sandbox; +mod workspace_mocker; pub use app_context::*; pub use platform_manager::*; pub use project_graph::*; pub use sandbox::*; pub use starbase_sandbox::{predicates, pretty_assertions}; +pub use workspace_mocker::*; diff --git a/crates/test-utils/src/project_graph.rs b/crates/test-utils/src/project_graph.rs index 9706048fc8a..94174b76510 100644 --- a/crates/test-utils/src/project_graph.rs +++ b/crates/test-utils/src/project_graph.rs @@ -1,140 +1,18 @@ -use moon_config::{ - ConfigLoader, InheritedTasksEntry, InheritedTasksManager, NodeConfig, - PartialInheritedTasksConfig, PartialTaskConfig, ToolchainConfig, WorkspaceConfig, - WorkspaceProjects, WorkspaceProjectsConfig, -}; -use moon_project_graph::{ - ExtendProjectEvent, ExtendProjectGraphEvent, ProjectGraphBuilder, ProjectGraphBuilderContext, -}; -use moon_vcs::{BoxedVcs, Git}; -use proto_core::ProtoConfig; -use starbase_events::Emitter; +use crate::workspace_mocker::*; use starbase_sandbox::create_sandbox; -use std::collections::BTreeMap; -use std::path::{Path, PathBuf}; -use std::sync::Arc; +use std::path::Path; pub use moon_project_graph::ProjectGraph; -#[derive(Default)] -pub struct ProjectGraphContainer { - pub config_loader: ConfigLoader, - pub inherited_tasks: InheritedTasksManager, - pub toolchain_config: ToolchainConfig, - pub workspace_config: WorkspaceConfig, - pub workspace_root: PathBuf, - pub vcs: Option>, -} - -impl ProjectGraphContainer { - pub fn new(root: &Path) -> Self { - let proto_config = ProtoConfig::default(); - let config_loader = ConfigLoader::default(); - let mut graph = Self { - inherited_tasks: config_loader.load_tasks_manager(root).unwrap(), - toolchain_config: config_loader - .load_toolchain_config(root, &proto_config) - .unwrap(), - workspace_root: root.to_path_buf(), - config_loader, - ..Default::default() - }; - - // Add a global task to all projects - graph.inherited_tasks.configs.insert( - "*".into(), - InheritedTasksEntry { - input: ".moon/tasks.yml".into(), - config: PartialInheritedTasksConfig { - tasks: Some(BTreeMap::from_iter([( - "global".try_into().unwrap(), - PartialTaskConfig::default(), - )])), - ..PartialInheritedTasksConfig::default() - }, - }, - ); - - // Always use the node platform - if graph.toolchain_config.node.is_none() { - graph.toolchain_config.node = Some(NodeConfig::default()); - } - - // Use folders as project names - if root.join(".moon/workspace.yml").exists() { - graph.workspace_config = graph.config_loader.load_workspace_config(root).unwrap(); - } else { - let mut projects = WorkspaceProjectsConfig { - globs: vec![ - "*".into(), - "!.home".into(), - "!.moon".into(), - "!.proto".into(), - ], - ..WorkspaceProjectsConfig::default() - }; - - if root.join("moon.yml").exists() { - projects - .sources - .insert("root".try_into().unwrap(), ".".into()); - } - - graph.workspace_config.projects = WorkspaceProjects::Both(projects); - } - - graph - } - - pub fn with_vcs(root: &Path) -> Self { - let mut container = Self::new(root); - container.vcs = Some(Arc::new(Box::new(Git::load(root, "master", &[]).unwrap()))); - container - } - - pub fn create_context(&self) -> ProjectGraphBuilderContext { - ProjectGraphBuilderContext { - config_loader: &self.config_loader, - extend_project: Emitter::::new(), - extend_project_graph: Emitter::::new(), - inherited_tasks: &self.inherited_tasks, - strict_project_ids: self.workspace_config.experiments.strict_project_ids, - toolchain_config: &self.toolchain_config, - vcs: self.vcs.clone(), - working_dir: &self.workspace_root, - workspace_config: &self.workspace_config, - workspace_root: &self.workspace_root, - } - } +pub fn create_project_graph_mocker(root: &Path) -> WorkspaceMocker { + let mut mock = WorkspaceMocker::new(root); - pub async fn build_graph<'l>(&self, context: ProjectGraphBuilderContext<'l>) -> ProjectGraph { - let mut builder = ProjectGraphBuilder::new(context).await.unwrap(); - builder.load_all().await.unwrap(); + mock.with_default_configs() + .with_default_projects() + .with_default_toolchain() + .with_global_tasks(); - let graph = builder.build().await.unwrap(); - graph.get_all().unwrap(); - graph - } - - pub async fn build_graph_for<'l>( - &self, - context: ProjectGraphBuilderContext<'l>, - ids: &[&str], - ) -> ProjectGraph { - let mut builder = ProjectGraphBuilder::new(context).await.unwrap(); - - for id in ids { - builder.load(id).await.unwrap(); - } - - let graph = builder.build().await.unwrap(); - - for id in ids { - graph.get(id).unwrap(); - } - - graph - } + mock } pub async fn generate_project_graph(fixture: &str) -> ProjectGraph { @@ -142,18 +20,7 @@ pub async fn generate_project_graph(fixture: &str) -> ProjectGraph { } pub async fn generate_project_graph_from_sandbox(root: &Path) -> ProjectGraph { - generate_project_graph_with_changes(root, |_| {}).await -} - -pub async fn generate_project_graph_with_changes(root: &Path, mut op: F) -> ProjectGraph -where - F: FnMut(&mut ProjectGraphContainer), -{ - let mut container = ProjectGraphContainer::new(root); - - op(&mut container); - - let context = container.create_context(); - - container.build_graph(context).await + create_project_graph_mocker(root) + .build_project_graph() + .await } diff --git a/crates/test-utils/src/workspace_mocker.rs b/crates/test-utils/src/workspace_mocker.rs new file mode 100644 index 00000000000..a8e488bfdc3 --- /dev/null +++ b/crates/test-utils/src/workspace_mocker.rs @@ -0,0 +1,176 @@ +use moon_cache::CacheEngine; +use moon_config::*; +use moon_project_graph::ProjectGraph; +use moon_vcs::{BoxedVcs, Git}; +use moon_workspace::*; +use proto_core::ProtoConfig; +use starbase_events::Emitter; +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +#[derive(Default)] +pub struct WorkspaceMocker { + pub config_loader: ConfigLoader, + pub inherited_tasks: InheritedTasksManager, + pub toolchain_config: ToolchainConfig, + pub workspace_config: WorkspaceConfig, + pub workspace_root: PathBuf, + pub vcs: Option>, +} + +impl WorkspaceMocker { + pub fn new(root: impl AsRef) -> Self { + Self { + workspace_root: root.as_ref().to_path_buf(), + ..Default::default() + } + } + + pub fn with_default_configs(&mut self) -> &mut Self { + let root = &self.workspace_root; + + self.inherited_tasks = self.config_loader.load_tasks_manager(root).unwrap(); + + self.toolchain_config = self + .config_loader + .load_toolchain_config(root, &ProtoConfig::default()) + .unwrap(); + + self.workspace_config = self.config_loader.load_workspace_config(root).unwrap(); + + self + } + + pub fn with_default_projects(&mut self) -> &mut Self { + if !self.workspace_root.join(".moon/workspace.yml").exists() { + // Use folders as project names + let mut projects = WorkspaceProjectsConfig { + globs: vec![ + "*".into(), + "!.home".into(), + "!.moon".into(), + "!.proto".into(), + ], + ..WorkspaceProjectsConfig::default() + }; + + // Include a root project conditionally + if self.workspace_root.join("moon.yml").exists() { + projects + .sources + .insert("root".try_into().unwrap(), ".".into()); + } + + self.workspace_config.projects = WorkspaceProjects::Both(projects); + } + + self + } + + pub fn with_default_toolchain(&mut self) -> &mut Self { + if self.toolchain_config.node.is_none() { + self.toolchain_config.node = Some(NodeConfig::default()); + } + + self + } + + pub fn with_global_tasks(&mut self) -> &mut Self { + self.inherited_tasks.configs.insert( + "*".into(), + InheritedTasksEntry { + input: ".moon/tasks.yml".into(), + config: PartialInheritedTasksConfig { + tasks: Some(BTreeMap::from_iter([( + "global".try_into().unwrap(), + PartialTaskConfig::default(), + )])), + ..PartialInheritedTasksConfig::default() + }, + }, + ); + + self + } + + pub fn with_vcs(&mut self) -> &mut Self { + self.vcs = Some(Arc::new(Box::new( + Git::load(&self.workspace_root, "master", &[]).unwrap(), + ))); + + self + } + + pub fn create_context(&self) -> WorkspaceBuilderContext { + WorkspaceBuilderContext { + config_loader: &self.config_loader, + extend_project: Emitter::::new(), + extend_project_graph: Emitter::::new(), + inherited_tasks: &self.inherited_tasks, + strict_project_ids: self.workspace_config.experiments.strict_project_ids, + toolchain_config: &self.toolchain_config, + vcs: self.vcs.clone(), + working_dir: &self.workspace_root, + workspace_config: &self.workspace_config, + workspace_root: &self.workspace_root, + } + } + + pub async fn build_project_graph(&self) -> ProjectGraph { + self.build_project_graph_with_options(ProjectGraphMockOptions::default()) + .await + } + + pub async fn build_project_graph_for(&self, ids: &[&str]) -> ProjectGraph { + self.build_project_graph_with_options(ProjectGraphMockOptions { + ids: Vec::from_iter(ids.iter().map(|id| id.to_string())), + ..Default::default() + }) + .await + } + + pub async fn build_project_graph_with_options<'l>( + &self, + mut options: ProjectGraphMockOptions<'l>, + ) -> ProjectGraph { + let context = options + .context + .take() + .unwrap_or_else(|| self.create_context()); + + let mut builder = match &options.cache { + Some(engine) => WorkspaceBuilder::new_with_cache(context, engine) + .await + .unwrap(), + None => WorkspaceBuilder::new(context).await.unwrap(), + }; + + if options.ids.is_empty() { + builder.load_projects().await.unwrap(); + } else { + for id in &options.ids { + builder.load_project(id).await.unwrap(); + } + } + + let project_graph = builder.build().await.unwrap().project_graph; + + if options.ids.is_empty() { + project_graph.get_all().unwrap(); + } else { + for id in &options.ids { + project_graph.get(id).unwrap(); + } + } + + project_graph + } +} + +#[derive(Default)] +pub struct ProjectGraphMockOptions<'l> { + pub cache: Option, + pub context: Option>, + pub ids: Vec, +} diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml new file mode 100644 index 00000000000..4ea75dadb98 --- /dev/null +++ b/crates/workspace/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "moon_workspace" +version = "0.0.1" +edition = "2021" +license = "MIT" +description = "Workspace utilities." +homepage = "https://moonrepo.dev/moon" +repository = "https://github.com/moonrepo/moon" +publish = false + +[dependencies] +moon_cache = { path = "../cache" } +moon_common = { path = "../common" } +moon_config = { path = "../config" } +moon_hash = { path = "../hash" } +moon_project = { path = "../project" } +moon_project_builder = { path = "../project-builder" } +moon_project_constraints = { path = "../project-constraints" } +moon_project_graph = { path = "../project-graph" } +moon_vcs = { path = "../vcs" } +miette = { workspace = true } +petgraph = { workspace = true } +rustc-hash = { workspace = true } +serde = { workspace = true } +starbase_events = { workspace = true } +starbase_utils = { workspace = true, features = ["glob", "json"] } +thiserror = { workspace = true } +tracing = { workspace = true } + +[lints] +workspace = true diff --git a/crates/workspace/src/lib.rs b/crates/workspace/src/lib.rs new file mode 100644 index 00000000000..84af662dc70 --- /dev/null +++ b/crates/workspace/src/lib.rs @@ -0,0 +1,12 @@ +mod project_build_data; +mod projects_locator; +mod repo_type; +mod workspace_builder; +mod workspace_builder_error; +mod workspace_cache; + +pub use project_build_data::*; +pub use repo_type::*; +pub use workspace_builder::*; +pub use workspace_builder_error::*; +pub use workspace_cache::*; diff --git a/crates/project-graph/src/project_events.rs b/crates/workspace/src/project_build_data.rs similarity index 59% rename from crates/project-graph/src/project_events.rs rename to crates/workspace/src/project_build_data.rs index a1979dc43ff..acb37b64337 100644 --- a/crates/project-graph/src/project_events.rs +++ b/crates/workspace/src/project_build_data.rs @@ -1,10 +1,32 @@ use moon_common::path::WorkspaceRelativePathBuf; use moon_common::Id; -use moon_config::{DependencyConfig, ProjectsAliasesList, ProjectsSourcesList, TaskConfig}; +use moon_config::{ + DependencyConfig, ProjectConfig, ProjectsAliasesList, ProjectsSourcesList, TaskConfig, +}; +use petgraph::graph::NodeIndex; use rustc_hash::FxHashMap; +use serde::{Deserialize, Serialize}; use starbase_events::Event; use std::path::PathBuf; +#[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] +#[serde(default)] +pub struct ProjectBuildData { + #[serde(skip_serializing_if = "Option::is_none")] + pub alias: Option, + + #[serde(skip)] + pub config: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub node_index: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub original_id: Option, + + pub source: WorkspaceRelativePathBuf, +} + // Extend the project graph with additional information. #[derive(Debug)] diff --git a/crates/project-graph/src/projects_locator.rs b/crates/workspace/src/projects_locator.rs similarity index 84% rename from crates/project-graph/src/projects_locator.rs rename to crates/workspace/src/projects_locator.rs index 969ea59519a..049bea202ea 100644 --- a/crates/project-graph/src/projects_locator.rs +++ b/crates/workspace/src/projects_locator.rs @@ -1,24 +1,13 @@ -use crate::project_graph_builder::ProjectGraphBuilderContext; +use crate::workspace_builder::WorkspaceBuilderContext; use moon_common::path::{is_root_level_source, to_virtual_string, WorkspaceRelativePathBuf}; use moon_common::{color, consts, Id}; use moon_config::{ProjectSourceEntry, ProjectsSourcesList}; use starbase_utils::{fs, glob}; -use std::path::Path; use tracing::{debug, instrument, warn}; /// Infer a project name from a source path, by using the name of /// the project folder. -fn infer_project_id_and_source( - path: &str, - workspace_root: &Path, -) -> miette::Result { - if path.is_empty() { - return Ok(( - Id::clean(fs::file_name(workspace_root))?, - WorkspaceRelativePathBuf::from("."), - )); - } - +fn infer_project_id_and_source(path: &str) -> miette::Result { let (id, source) = if path.contains('/') { (path.split('/').last().unwrap().to_owned(), path) } else { @@ -32,7 +21,7 @@ fn infer_project_id_and_source( /// for potential projects, and infer their name and source. #[instrument(skip_all)] pub fn locate_projects_with_globs<'glob, I, V>( - context: &ProjectGraphBuilderContext, + context: &WorkspaceBuilderContext, globs: I, sources: &mut ProjectsSourcesList, ) -> miette::Result<()> @@ -55,7 +44,10 @@ where } has_root_level = true; - sources.push(infer_project_id_and_source("", context.workspace_root)?); + sources.push(( + Id::clean(fs::file_name(context.workspace_root))?, + WorkspaceRelativePathBuf::from("."), + )); } else { locate_globs.push(glob); } @@ -80,7 +72,8 @@ where // Don't warn on dotfiles if project_root .file_name() - .map(|name| !name.to_string_lossy().starts_with('.')) + .and_then(|name| name.to_str()) + .map(|name| !name.starts_with('.')) .unwrap_or_default() { warn!( @@ -115,8 +108,7 @@ where } } - let (id, source) = - infer_project_id_and_source(&project_source, context.workspace_root)?; + let (id, source) = infer_project_id_and_source(&project_source)?; if id.starts_with(".") { debug!( diff --git a/crates/workspace/src/repo_type.rs b/crates/workspace/src/repo_type.rs new file mode 100644 index 00000000000..9b51c6bb2b6 --- /dev/null +++ b/crates/workspace/src/repo_type.rs @@ -0,0 +1,17 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Copy, Default, Deserialize, PartialEq, Serialize)] +#[serde(rename_all = "kebab-case")] +pub enum RepoType { + #[default] + Unknown, + Monorepo, + MonorepoWithRoot, + Polyrepo, +} + +impl RepoType { + pub fn is_monorepo(&self) -> bool { + matches!(self, Self::Monorepo | Self::MonorepoWithRoot) + } +} diff --git a/crates/workspace/src/workspace_builder.rs b/crates/workspace/src/workspace_builder.rs new file mode 100644 index 00000000000..f6a5294a478 --- /dev/null +++ b/crates/workspace/src/workspace_builder.rs @@ -0,0 +1,708 @@ +use crate::project_build_data::*; +use crate::projects_locator::locate_projects_with_globs; +use crate::repo_type::RepoType; +use crate::workspace_builder_error::WorkspaceBuilderError; +use crate::workspace_cache::*; +use moon_cache::CacheEngine; +use moon_common::{ + color, consts, + path::{is_root_level_source, to_virtual_string, WorkspaceRelativePathBuf}, + Id, +}; +use moon_config::{ + ConfigLoader, DependencyScope, InheritedTasksManager, ProjectsSourcesList, ToolchainConfig, + WorkspaceConfig, WorkspaceProjects, +}; +use moon_project::Project; +use moon_project_builder::{ProjectBuilder, ProjectBuilderContext}; +use moon_project_constraints::{enforce_project_type_relationships, enforce_tag_relationships}; +use moon_project_graph::{ProjectGraph, ProjectGraphError, ProjectGraphType, ProjectNode}; +use moon_vcs::BoxedVcs; +use petgraph::prelude::*; +use petgraph::visit::IntoNodeReferences; +use rustc_hash::{FxHashMap, FxHashSet}; +use serde::{Deserialize, Serialize}; +use starbase_events::Emitter; +use starbase_utils::{glob, json}; +use std::sync::Arc; +use std::{collections::BTreeMap, path::Path}; +use tracing::{debug, instrument, trace}; + +pub struct WorkspaceBuilderContext<'app> { + pub config_loader: &'app ConfigLoader, + pub extend_project: Emitter, + pub extend_project_graph: Emitter, + pub inherited_tasks: &'app InheritedTasksManager, + pub strict_project_ids: bool, + pub toolchain_config: &'app ToolchainConfig, + pub vcs: Option>, + pub working_dir: &'app Path, + pub workspace_config: &'app WorkspaceConfig, + pub workspace_root: &'app Path, +} + +pub struct WorkspaceBuildResult { + pub project_graph: ProjectGraph, +} + +#[derive(Deserialize, Serialize)] +pub struct WorkspaceBuilder<'app> { + #[serde(skip)] + context: Option>>, + + /// Mapping of project IDs to associated data required for building + /// the project itself. Currently we track the following: + /// - The alias, derived from manifests (`package.json`). + /// - Their `moon.yml` in the project root. + /// - Their file source location, relative from the workspace root. + project_data: FxHashMap, + + /// The project DAG. + project_graph: ProjectGraphType, + + /// Projects that have explicitly renamed themselves with the `id` setting. + /// Maps original ID to renamed ID. + renamed_project_ids: FxHashMap, + + /// The type of repository: monorepo or polyrepo. + repo_type: RepoType, + + /// The root project ID (only if a monorepo). + root_project_id: Option, +} + +impl<'app> WorkspaceBuilder<'app> { + #[instrument(skip_all)] + pub async fn new( + context: WorkspaceBuilderContext<'app>, + ) -> miette::Result> { + debug!("Building workspace graph (project and task graphs)"); + + let mut graph = WorkspaceBuilder { + context: Some(Arc::new(context)), + project_data: FxHashMap::default(), + project_graph: ProjectGraphType::default(), + renamed_project_ids: FxHashMap::default(), + repo_type: RepoType::Unknown, + root_project_id: None, + }; + + graph.preload_build_data().await?; + graph.determine_repo_type()?; + + Ok(graph) + } + + #[instrument(skip_all)] + pub async fn new_with_cache( + context: WorkspaceBuilderContext<'app>, + cache_engine: &CacheEngine, + ) -> miette::Result> { + let is_vcs_enabled = context + .vcs + .as_ref() + .expect("VCS is required for project graph caching!") + .is_enabled(); + let mut graph = Self::new(context).await?; + + // No VCS to hash with, so abort caching + if !is_vcs_enabled { + graph.load_projects().await?; + + return Ok(graph); + } + + // Hash the project graph based on the preloaded state + let mut graph_contents = WorkspaceGraphHash::default(); + graph_contents.add_projects(&graph.project_data); + graph_contents.add_configs(graph.hash_required_configs().await?); + graph_contents.gather_env(); + + let hash = cache_engine + .hash + .save_manifest_without_hasher("Workspace graph", &graph_contents)?; + + debug!(hash, "Generated hash for workspace graph"); + + // Check the current state and cache + let mut state = cache_engine + .state + .load_state::("projectsBuildData.json")?; + let cache_path = cache_engine.state.resolve_path("workspaceGraph.json"); + + if hash == state.data.last_hash && cache_path.exists() { + debug!( + cache = ?cache_path, + "Loading workspace graph with {} projects from cache", + graph.project_data.len(), + ); + + let mut cache: WorkspaceBuilder = json::read_file(cache_path)?; + cache.context = graph.context; + + return Ok(cache); + } + + // Build the graph, update the state, and save the cache + debug!( + "Preparing workspace graph with {} projects", + graph.project_data.len(), + ); + + graph.load_projects().await?; + + state.data.last_hash = hash; + state.data.projects = graph.project_data.clone(); + state.save()?; + + json::write_file(cache_path, &graph, false)?; + + Ok(graph) + } + + /// Build the project graph and return a new structure. + #[instrument(name = "build_workspace_graph", skip_all)] + pub async fn build(mut self) -> miette::Result { + self.enforce_constraints()?; + + let context = self.context.take().unwrap(); + + let project_nodes = self + .project_data + .into_iter() + .map(|(id, data)| { + ( + id, + ProjectNode { + alias: data.alias, + index: data.node_index.unwrap_or_default(), + original_id: data.original_id, + source: data.source, + }, + ) + }) + .collect::>(); + + let mut project_graph = + ProjectGraph::new(self.project_graph, project_nodes, context.workspace_root); + + project_graph.working_dir = context.working_dir.to_owned(); + + Ok(WorkspaceBuildResult { project_graph }) + } + + /// Load a single project by ID or alias into the graph. + pub async fn load_project(&mut self, id_or_alias: &str) -> miette::Result<()> { + self.internal_load_project(id_or_alias, &mut FxHashSet::default()) + .await?; + + Ok(()) + } + + /// Load all projects into the graph, as configured in the workspace. + pub async fn load_projects(&mut self) -> miette::Result<()> { + let ids = self.project_data.keys().cloned().collect::>(); + + for id in ids { + self.internal_load_project(&id, &mut FxHashSet::default()) + .await?; + } + + Ok(()) + } + + #[instrument(name = "load_project", skip(self, cycle))] + async fn internal_load_project( + &mut self, + id_or_alias: &str, + cycle: &mut FxHashSet, + ) -> miette::Result<(Id, NodeIndex)> { + let id = self.resolve_project_id(id_or_alias); + + { + let Some(build_data) = self.project_data.get(&id) else { + return Err(ProjectGraphError::UnconfiguredID(id).into()); + }; + + // Already loaded, exit early with existing index + if let Some(index) = &build_data.node_index { + trace!( + project_id = id.as_str(), + "Project already exists in the project graph, skipping load", + ); + + return Ok((id, *index)); + } + } + + // Not loaded, build the project + trace!( + project_id = id.as_str(), + "Project does not exist in the project graph, attempting to load", + ); + + let mut project = self.build_project(&id).await?; + + cycle.insert(id.clone()); + + // Then build dependency projects + let mut edges = vec![]; + + for dep_config in &mut project.dependencies { + if cycle.contains(&dep_config.id) { + debug!( + project_id = id.as_str(), + dependency_id = dep_config.id.as_str(), + "Encountered a dependency cycle (from project); will disconnect nodes to avoid recursion", + ); + + continue; + } + + let dep = Box::pin(self.internal_load_project(&dep_config.id, cycle)).await?; + let dep_id = dep.0; + + // Don't link the root project to any project, but still load it + if !dep_config.is_root_scope() { + edges.push((dep.1, dep_config.scope)); + } + + // TODO is this needed? + if dep_id != dep_config.id { + dep_config.id = dep_id; + } + } + + // And finally add to the graph + let index = self.project_graph.add_node(project); + + self.project_data.get_mut(&id).unwrap().node_index = Some(index); + + for edge in edges { + self.project_graph.add_edge(index, edge.0, edge.1); + } + + cycle.clear(); + + Ok((id, index)) + } + + /// Create and build the project with the provided ID and source. + #[instrument(skip(self))] + async fn build_project(&mut self, id: &Id) -> miette::Result { + debug!( + project_id = id.as_str(), + "Building project {}", + color::id(id) + ); + + let context = self.context(); + let build_data = self.project_data.get(id).unwrap(); + + if !build_data.source.to_path(context.workspace_root).exists() { + return Err(WorkspaceBuilderError::MissingProjectAtSource( + build_data.source.to_string(), + ) + .into()); + } + + let mut builder = ProjectBuilder::new( + id, + &build_data.source, + ProjectBuilderContext { + config_loader: context.config_loader, + monorepo: self.repo_type.is_monorepo(), + root_project_id: self.root_project_id.as_ref(), + toolchain_config: context.toolchain_config, + workspace_root: context.workspace_root, + }, + )?; + + if let Some(config) = &build_data.config { + builder.inherit_local_config(config).await?; + } else { + builder.load_local_config().await?; + } + + builder.inherit_global_config(context.inherited_tasks)?; + + let extended_data = context + .extend_project + .emit(ExtendProjectEvent { + project_id: id.to_owned(), + project_source: build_data.source.to_owned(), + workspace_root: context.workspace_root.to_owned(), + }) + .await?; + + // Inherit implicit dependencies + for dep_config in extended_data.dependencies { + builder.extend_with_dependency(dep_config); + } + + // Inherit inferred tasks + for (task_id, task_config) in extended_data.tasks { + builder.extend_with_task(task_id, task_config); + } + + // Inherit alias before building in case the project + // references itself in tasks or dependencies + if let Some(alias) = &build_data.alias { + builder.set_alias(alias); + } + + let project = builder.build().await?; + + Ok(project) + } + + /// Determine the repository type/structure based on the number of project + /// sources, and where the point to. + fn determine_repo_type(&mut self) -> miette::Result<()> { + let single_project = self.project_data.len() == 1; + let mut has_root_project = false; + let mut root_project_id = None; + + for (id, build_data) in &self.project_data { + if is_root_level_source(&build_data.source) { + has_root_project = true; + root_project_id = Some(id.to_owned()); + break; + } + } + + self.repo_type = match (single_project, has_root_project) { + (true, true) => RepoType::Polyrepo, + (false, true) => RepoType::MonorepoWithRoot, + (false, false) | (true, false) => RepoType::Monorepo, + }; + + if self.repo_type == RepoType::MonorepoWithRoot { + self.root_project_id = root_project_id; + } + + Ok(()) + } + + /// Enforce project constraints and boundaries after all nodes have been inserted. + #[instrument(skip_all)] + fn enforce_constraints(&self) -> miette::Result<()> { + debug!("Enforcing project constraints"); + + let context = self.context(); + let type_relationships = context + .workspace_config + .constraints + .enforce_project_type_relationships; + let tag_relationships = &context.workspace_config.constraints.tag_relationships; + + if !type_relationships && tag_relationships.is_empty() { + return Ok(()); + } + + let default_scope = DependencyScope::Build; + + for (project_index, project) in self.project_graph.node_references() { + let deps: Vec<_> = self + .project_graph + .neighbors_directed(project_index, Direction::Outgoing) + .flat_map(|dep_index| { + self.project_graph.node_weight(dep_index).map(|dep| { + ( + dep, + // Is this safe? + self.project_graph + .find_edge(project_index, dep_index) + .and_then(|ei| self.project_graph.edge_weight(ei)) + .unwrap_or(&default_scope), + ) + }) + }) + .collect(); + + for (dep, dep_scope) in deps { + if type_relationships { + enforce_project_type_relationships(project, dep, dep_scope)?; + } + + for (source_tag, required_tags) in tag_relationships { + enforce_tag_relationships(project, source_tag, dep, required_tags)?; + } + } + } + + Ok(()) + } + + /// When caching the graph, we must hash all project and workspace + /// config files that are required to invalidate the cache. + async fn hash_required_configs( + &self, + ) -> miette::Result> { + let context = self.context(); + let config_names = context.config_loader.get_project_file_names(); + let mut configs = vec![]; + + // Hash all project-level config files + for build_data in self.project_data.values() { + for name in &config_names { + configs.push(build_data.source.join(name).to_string()); + } + } + + // Hash all workspace-level config files + for file in glob::walk( + context.workspace_root.join(consts::CONFIG_DIRNAME), + ["*.pkl", "tasks/**/*.pkl", "*.yml", "tasks/**/*.yml"], + )? { + configs.push(to_virtual_string( + file.strip_prefix(context.workspace_root).unwrap(), + )?); + } + + context + .vcs + .as_ref() + .expect("VCS required!") + .get_file_hashes(&configs, true, 500) + .await + } + + /// Preload the graph with project sources from the workspace configuration. + /// If globs are provided, walk the file system and gather sources. + /// Then extend the graph with aliases, derived from all event subscribers. + async fn preload_build_data(&mut self) -> miette::Result<()> { + let context = self.context(); + let mut globs = vec![]; + let mut sources = vec![]; + + // Gather all project sources + let mut add_sources = |map: &FxHashMap| { + for (id, source) in map { + sources.push((id.to_owned(), WorkspaceRelativePathBuf::from(source))); + } + }; + + match &context.workspace_config.projects { + WorkspaceProjects::Sources(map) => { + add_sources(map); + } + WorkspaceProjects::Globs(list) => { + globs.extend(list); + } + WorkspaceProjects::Both(cfg) => { + globs.extend(&cfg.globs); + add_sources(&cfg.sources); + } + }; + + if !sources.is_empty() { + debug!( + sources = ?sources, + "Using configured project sources", + ); + } + + if !globs.is_empty() { + debug!( + globs = ?globs, + "Locating projects with globs", + ); + + locate_projects_with_globs(&context, &globs, &mut sources)?; + } + + // Load projects and configs first + self.load_project_build_data(sources)?; + + // Then load aliases and extend projects + self.load_project_aliases().await?; + + Ok(()) + } + + async fn load_project_aliases(&mut self) -> miette::Result<()> { + let context = self.context(); + + debug!("Extending project graph with aliases"); + + let aliases = context + .extend_project_graph + .emit(ExtendProjectGraphEvent { + sources: self + .project_data + .iter() + .map(|(id, build_data)| (id.to_owned(), build_data.source.to_owned())) + .collect(), + workspace_root: context.workspace_root.to_owned(), + }) + .await? + .aliases; + + let mut dupe_aliases = FxHashMap::::default(); + + for (id, alias) in aliases { + let id = self.renamed_project_ids.get(&id).unwrap_or(&id); + + // Skip aliases that match its own ID + if id == &alias { + continue; + } + + // Skip aliases that would override an ID + if self.project_data.contains_key(alias.as_str()) { + debug!( + "Skipping alias {} for project {} as it conflicts with the existing project {}", + color::label(&alias), + color::id(id), + color::id(&alias), + ); + + continue; + } + + if let Some(existing_id) = dupe_aliases.get(&alias) { + // Skip if the existing ID is already for this ID. + // This scenario is possible when multiple platforms + // extract the same aliases (Bun vs Node, etc). + if existing_id == id { + continue; + } + + return Err(WorkspaceBuilderError::DuplicateProjectAlias { + alias: alias.clone(), + old_id: existing_id.to_owned(), + new_id: id.clone(), + } + .into()); + } + + dupe_aliases.insert(alias.clone(), id.to_owned()); + + self.project_data + .get_mut(id) + .expect("Project build data not found!") + .alias = Some(alias); + } + + Ok(()) + } + + fn load_project_build_data(&mut self, sources: ProjectsSourcesList) -> miette::Result<()> { + let context = self.context(); + let config_label = context.config_loader.get_debug_label("moon", false); + let mut project_data: FxHashMap = FxHashMap::default(); + let mut renamed_ids = FxHashMap::default(); + let mut dupe_original_ids = FxHashSet::default(); + + debug!("Loading projects"); + + for (mut id, source) in sources { + trace!( + project_id = id.as_str(), + "Attempting to load {} (optional)", + color::file(source.join(&config_label)) + ); + + let config = context + .config_loader + .load_project_config_from_source(context.workspace_root, &source)?; + + let mut build_data = ProjectBuildData { + source, + ..Default::default() + }; + + // Track ID renames + if let Some(new_id) = &config.id { + if new_id != &id { + build_data.original_id = Some(id.clone()); + + if renamed_ids.contains_key(&id) { + dupe_original_ids.insert(id.clone()); + } else { + renamed_ids.insert(id.clone(), new_id.to_owned()); + } + + id = new_id.to_owned(); + } + } + + // Check for duplicate IDs + if let Some(existing_data) = project_data.get(&id) { + if existing_data.source != build_data.source { + return Err(WorkspaceBuilderError::DuplicateProjectId { + id: id.clone(), + old_source: existing_data.source.to_string(), + new_source: build_data.source.to_string(), + } + .into()); + } + } + + // Otherwise persist the build data + build_data.config = Some(config); + project_data.insert(id, build_data); + } + + if !dupe_original_ids.is_empty() { + trace!( + original_ids = ?dupe_original_ids.iter().collect::>(), + "Found multiple renamed projects with the same original ID; will ignore these IDs within lookups" + ); + + for dupe_id in dupe_original_ids { + renamed_ids.remove(&dupe_id); + } + } + + debug!("Loaded {} projects", project_data.len()); + + self.project_data.extend(project_data); + self.renamed_project_ids.extend(renamed_ids); + + Ok(()) + } + + fn resolve_project_id(&self, id_or_alias: &str) -> Id { + let id = if self.project_data.contains_key(id_or_alias) { + Id::raw(id_or_alias) + } else { + match self.project_data.iter().find_map(|(id, build_data)| { + if build_data + .alias + .as_ref() + .is_some_and(|alias| alias == id_or_alias) + { + Some(id) + } else { + None + } + }) { + Some(project_id) => project_id.to_owned(), + None => Id::raw(id_or_alias), + } + }; + + if self + .context + .as_ref() + .is_some_and(|ctx| ctx.strict_project_ids) + { + return id; + } + + match self.renamed_project_ids.get(&id) { + Some(new_id) => new_id.to_owned(), + None => id, + } + } + + fn context(&self) -> Arc> { + Arc::clone( + self.context + .as_ref() + .expect("Missing workspace builder context!"), + ) + } +} diff --git a/crates/workspace/src/workspace_builder_error.rs b/crates/workspace/src/workspace_builder_error.rs new file mode 100644 index 00000000000..bab5aa163a6 --- /dev/null +++ b/crates/workspace/src/workspace_builder_error.rs @@ -0,0 +1,38 @@ +use miette::Diagnostic; +use moon_common::{Id, Style, Stylize}; +use thiserror::Error; + +#[derive(Error, Debug, Diagnostic)] +pub enum WorkspaceBuilderError { + #[diagnostic(code(project_graph::duplicate_alias))] + #[error( + "Project {} is already using the alias {}, unable to use the alias for project {}.\nTry changing the alias to something unique to move forward.", + .old_id.style(Style::Id), + .alias.style(Style::Label), + .new_id.style(Style::Id), + )] + DuplicateProjectAlias { + alias: String, + old_id: Id, + new_id: Id, + }, + + #[diagnostic(code(project_graph::duplicate_id))] + #[error( + "A project already exists with the identifier {} (existing source {}, new source {}).\nTry renaming the project folder to make it unique, or configure the {} setting in {}.", + .id.style(Style::Id), + .old_source.style(Style::File), + .new_source.style(Style::File), + "id".style(Style::Property), + "moon.yml".style(Style::File) + )] + DuplicateProjectId { + id: Id, + old_source: String, + new_source: String, + }, + + #[diagnostic(code(project_graph::missing_source))] + #[error("No project exists at source path {}.", .0.style(Style::File))] + MissingProjectAtSource(String), +} diff --git a/crates/project-graph/src/project_graph_hash.rs b/crates/workspace/src/workspace_cache.rs similarity index 66% rename from crates/project-graph/src/project_graph_hash.rs rename to crates/workspace/src/workspace_cache.rs index 9a67056eef2..2b34e81a4c1 100644 --- a/crates/project-graph/src/project_graph_hash.rs +++ b/crates/workspace/src/workspace_cache.rs @@ -1,3 +1,5 @@ +use crate::project_build_data::ProjectBuildData; +use moon_cache::cache_item; use moon_common::path::WorkspaceRelativePathBuf; use moon_common::{is_docker, Id}; use moon_hash::hash_content; @@ -5,11 +7,17 @@ use rustc_hash::FxHashMap; use std::collections::BTreeMap; use std::env; +cache_item!( + pub struct WorkspaceProjectsCacheState { + pub last_hash: String, + pub projects: FxHashMap, + } +); + hash_content!( - pub struct ProjectGraphHash<'graph> { - // Data derived from the project graph builder. - aliases: BTreeMap<&'graph Id, &'graph String>, - sources: BTreeMap<&'graph Id, &'graph WorkspaceRelativePathBuf>, + pub struct WorkspaceGraphHash<'graph> { + // Data derived from the workspace graph builder. + projects: BTreeMap<&'graph Id, &'graph ProjectBuildData>, // Project and workspace configs required for cache invalidation. configs: BTreeMap, @@ -17,7 +25,7 @@ hash_content!( // Environment variables required for cache invalidation. env: BTreeMap, - // The project graph stores absolute file paths, which breaks moon when + // The graph stores absolute file paths, which breaks moon when // running tasks inside and outside of a container at the same time. // This flag helps to continuously bust the cache. in_docker: bool, @@ -30,30 +38,27 @@ hash_content!( } ); -impl<'cfg> ProjectGraphHash<'cfg> { - pub fn new() -> Self { - ProjectGraphHash { - aliases: BTreeMap::default(), - sources: BTreeMap::default(), +impl<'graph> Default for WorkspaceGraphHash<'graph> { + fn default() -> Self { + WorkspaceGraphHash { + projects: BTreeMap::default(), configs: BTreeMap::default(), env: BTreeMap::default(), in_docker: is_docker(), version: env::var("MOON_VERSION").unwrap_or_default(), } } +} - pub fn add_aliases(&mut self, aliases: &'cfg FxHashMap) { - self.aliases.extend(aliases.iter()); +impl<'graph> WorkspaceGraphHash<'graph> { + pub fn add_projects(&mut self, projects: &'graph FxHashMap) { + self.projects.extend(projects.iter()); } pub fn add_configs(&mut self, configs: BTreeMap) { self.configs.extend(configs); } - pub fn add_sources(&mut self, sources: &'cfg FxHashMap) { - self.sources.extend(sources.iter()); - } - pub fn gather_env(&mut self) { for key in [ // Task options