diff --git a/libs/test-cli/src/main.rs b/libs/test-cli/src/main.rs index a0b3223bf3f9..74e118acad44 100644 --- a/libs/test-cli/src/main.rs +++ b/libs/test-cli/src/main.rs @@ -196,10 +196,10 @@ async fn main() -> anyhow::Result<()> { ); } - let schema = if let Some(file_path) = file_path { - read_datamodel_from_file(&file_path)? - } else if let Some(url) = url { - minimal_schema_from_url(&url)? + let schema = if let Some(file_path) = &file_path { + read_datamodel_from_file(file_path)? + } else if let Some(url) = &url { + minimal_schema_from_url(url)? } else { unreachable!() }; @@ -207,10 +207,15 @@ async fn main() -> anyhow::Result<()> { let api = schema_core::schema_api(Some(schema.clone()), None)?; let params = IntrospectParams { - schema, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: file_path.unwrap_or_else(|| "schema.prisma".to_string()), + content: schema, + }], + }, force: false, composite_type_depth: composite_type_depth.unwrap_or(0), - schemas: None, + namespaces: None, }; let introspected = api.introspect(params).await.map_err(|err| anyhow::anyhow!("{err:?}"))?; @@ -240,7 +245,12 @@ async fn main() -> anyhow::Result<()> { let api = schema_core::schema_api(Some(schema.clone()), None)?; api.create_database(CreateDatabaseParams { - datasource: DatasourceParam::SchemaString(SchemaContainer { schema }), + datasource: DatasourceParam::Schema(SchemasContainer { + files: vec![SchemaContainer { + path: cmd.schema_path.to_owned(), + content: schema, + }], + }), }) .await?; } @@ -252,7 +262,12 @@ async fn main() -> anyhow::Result<()> { let input = CreateMigrationInput { migrations_directory_path: cmd.migrations_path, - prisma_schema, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: cmd.schema_path, + content: prisma_schema, + }], + }, migration_name: cmd.name, draft: true, }; @@ -315,10 +330,15 @@ async fn generate_dmmf(cmd: &DmmfCommand) -> anyhow::Result<()> { let api = schema_core::schema_api(Some(skeleton.clone()), None)?; let params = IntrospectParams { - schema: skeleton, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: skeleton, + }], + }, force: false, composite_type_depth: -1, - schemas: None, + namespaces: None, }; let introspected = api.introspect(params).await.map_err(|err| anyhow::anyhow!("{err:?}"))?; @@ -355,7 +375,12 @@ async fn schema_push(cmd: &SchemaPush) -> anyhow::Result<()> { let response = api .schema_push(SchemaPushInput { - schema, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: cmd.schema_path.clone(), + content: schema, + }], + }, force: cmd.force, }) .await?; @@ -414,8 +439,13 @@ async fn migrate_diff(cmd: &MigrateDiff) -> anyhow::Result<()> { let api = schema_core::schema_api(None, Some(Arc::new(DiffHost)))?; let to = if let Some(to_schema_datamodel) = &cmd.to_schema_datamodel { - DiffTarget::SchemaDatamodel(SchemaContainer { - schema: to_schema_datamodel.clone(), + let to_schema_datamodel_str = std::fs::read_to_string(to_schema_datamodel)?; + + DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: to_schema_datamodel.to_owned(), + content: to_schema_datamodel_str, + }], }) } else { todo!("can't handle {:?} yet", cmd) diff --git a/prisma-fmt/src/code_actions.rs b/prisma-fmt/src/code_actions.rs index 891940b6b0e4..d9ba2154cf9e 100644 --- a/prisma-fmt/src/code_actions.rs +++ b/prisma-fmt/src/code_actions.rs @@ -72,7 +72,7 @@ pub(crate) fn available_actions( ) -> Vec { let mut actions = Vec::new(); - let validated_schema = psl::validate_multi_file(schema_files); + let validated_schema = psl::validate_multi_file(&schema_files); let config = &validated_schema.configuration; diff --git a/prisma-fmt/src/lint.rs b/prisma-fmt/src/lint.rs index 1dfc94d4c256..366c1f22ef62 100644 --- a/prisma-fmt/src/lint.rs +++ b/prisma-fmt/src/lint.rs @@ -14,7 +14,7 @@ pub struct MiniError { pub(crate) fn run(schema: SchemaFileInput) -> String { let schema = match schema { SchemaFileInput::Single(file) => psl::validate(file.into()), - SchemaFileInput::Multiple(files) => psl::validate_multi_file(files), + SchemaFileInput::Multiple(files) => psl::validate_multi_file(&files), }; let diagnostics = &schema.diagnostics; diff --git a/prisma-fmt/src/validate.rs b/prisma-fmt/src/validate.rs index 37f0b034ad30..67b12c45ce25 100644 --- a/prisma-fmt/src/validate.rs +++ b/prisma-fmt/src/validate.rs @@ -29,7 +29,8 @@ pub(crate) fn validate(params: &str) -> Result<(), String> { } pub fn run(input_schema: SchemaFileInput, no_color: bool) -> Result { - let validate_schema = psl::validate_multi_file(input_schema.into()); + let sources: Vec<(String, psl::SourceFile)> = input_schema.into(); + let validate_schema = psl::validate_multi_file(&sources); let diagnostics = &validate_schema.diagnostics; if !diagnostics.has_errors() { diff --git a/prisma-fmt/tests/code_actions/test_api.rs b/prisma-fmt/tests/code_actions/test_api.rs index b92f98ec856b..fdcb707deeb9 100644 --- a/prisma-fmt/tests/code_actions/test_api.rs +++ b/prisma-fmt/tests/code_actions/test_api.rs @@ -15,12 +15,11 @@ const TARGET_SCHEMA_FILE: &str = "_target.prisma"; static UPDATE_EXPECT: Lazy = Lazy::new(|| std::env::var("UPDATE_EXPECT").is_ok()); fn parse_schema_diagnostics(files: &[(String, String)], initiating_file_name: &str) -> Option> { - let schema = psl::validate_multi_file( - files - .iter() - .map(|(name, content)| (name.to_owned(), SourceFile::from(content))) - .collect(), - ); + let sources: Vec<_> = files + .iter() + .map(|(name, content)| (name.to_owned(), SourceFile::from(content))) + .collect(); + let schema = psl::validate_multi_file(&sources); let file_id = schema.db.file_id(initiating_file_name).unwrap(); let source = schema.db.source(file_id); diff --git a/psl/parser-database/src/lib.rs b/psl/parser-database/src/lib.rs index acbd1fedb56b..a9da56835e4c 100644 --- a/psl/parser-database/src/lib.rs +++ b/psl/parser-database/src/lib.rs @@ -228,6 +228,11 @@ impl ParserDatabase { self.asts.iter().map(|ast| ast.2.as_str()) } + /// Iterate all source file contents and their file paths. + pub fn iter_file_sources(&self) -> impl Iterator { + self.asts.iter().map(|ast| (ast.1.as_str(), ast.2)) + } + /// The name of the file. pub fn file_name(&self, file_id: FileId) -> &str { self.asts[file_id].0.as_str() diff --git a/psl/psl-core/src/lib.rs b/psl/psl-core/src/lib.rs index 85fe3933924a..e0c3b1841f32 100644 --- a/psl/psl-core/src/lib.rs +++ b/psl/psl-core/src/lib.rs @@ -78,13 +78,13 @@ pub fn validate(file: SourceFile, connectors: ConnectorRegistry<'_>) -> Validate /// The most general API for dealing with Prisma schemas. It accumulates what analysis and /// validation information it can, and returns it along with any error and warning diagnostics. -pub fn validate_multi_file(files: Vec<(String, SourceFile)>, connectors: ConnectorRegistry<'_>) -> ValidatedSchema { +pub fn validate_multi_file(files: &[(String, SourceFile)], connectors: ConnectorRegistry<'_>) -> ValidatedSchema { assert!( !files.is_empty(), "psl::validate_multi_file() must be called with at least one file" ); let mut diagnostics = Diagnostics::new(); - let db = ParserDatabase::new(&files, &mut diagnostics); + let db = ParserDatabase::new(files, &mut diagnostics); // TODO: the bulk of configuration block analysis should be part of ParserDatabase::new(). let mut configuration = Configuration::default(); diff --git a/psl/psl/src/lib.rs b/psl/psl/src/lib.rs index 9cbbc1bcc05a..318a25ce3bba 100644 --- a/psl/psl/src/lib.rs +++ b/psl/psl/src/lib.rs @@ -59,6 +59,18 @@ pub fn parse_schema(file: impl Into) -> Result Result { + let mut schema = validate_multi_file(files); + + schema + .diagnostics + .to_result() + .map_err(|err| schema.db.render_diagnostics(&err))?; + + Ok(schema) +} + /// The most general API for dealing with Prisma schemas. It accumulates what analysis and /// validation information it can, and returns it along with any error and warning diagnostics. pub fn validate(file: SourceFile) -> ValidatedSchema { @@ -71,6 +83,6 @@ pub fn parse_without_validation(file: SourceFile, connector_registry: ConnectorR } /// The most general API for dealing with Prisma schemas. It accumulates what analysis and /// validation information it can, and returns it along with any error and warning diagnostics. -pub fn validate_multi_file(files: Vec<(String, SourceFile)>) -> ValidatedSchema { +pub fn validate_multi_file(files: &[(String, SourceFile)]) -> ValidatedSchema { psl_core::validate_multi_file(files, builtin_connectors::BUILTIN_CONNECTORS) } diff --git a/psl/psl/tests/multi_file/basic.rs b/psl/psl/tests/multi_file/basic.rs index d5eaf5b8b489..dcb40bbccce0 100644 --- a/psl/psl/tests/multi_file/basic.rs +++ b/psl/psl/tests/multi_file/basic.rs @@ -2,10 +2,10 @@ use crate::common::expect; fn expect_errors(schemas: &[[&'static str; 2]], expectation: expect_test::Expect) { let out = psl::validate_multi_file( - schemas + &schemas .iter() .map(|[file_name, contents]| ((*file_name).into(), (*contents).into())) - .collect(), + .collect::>(), ); let actual = out.render_own_diagnostics(); diff --git a/psl/schema-ast/src/source_file.rs b/psl/schema-ast/src/source_file.rs index b53e2eaa5c16..6c8b1b19b17b 100644 --- a/psl/schema-ast/src/source_file.rs +++ b/psl/schema-ast/src/source_file.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use serde::{Deserialize, Deserializer}; /// A Prisma schema document. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct SourceFile { contents: Contents, } @@ -82,3 +82,29 @@ enum Contents { Static(&'static str), Allocated(Arc), } + +impl std::hash::Hash for Contents { + fn hash(&self, state: &mut H) { + match self { + Contents::Static(s) => (*s).hash(state), + Contents::Allocated(s) => { + let s: &str = s; + + s.hash(state); + } + } + } +} + +impl Eq for Contents {} + +impl PartialEq for Contents { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Contents::Static(l), Contents::Static(r)) => l == r, + (Contents::Allocated(l), Contents::Allocated(r)) => l == r, + (Contents::Static(l), Contents::Allocated(r)) => *l == &**r, + (Contents::Allocated(l), Contents::Static(r)) => &**l == *r, + } + } +} diff --git a/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs b/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs index 74b2d015f7df..74ab8de339fb 100644 --- a/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs @@ -151,7 +151,11 @@ pub(crate) async fn diff(schema: &str, url: String, connector: &mut dyn SchemaCo .database_schema_from_diff_target(DiffTarget::Empty, None, None) .await?; let to = connector - .database_schema_from_diff_target(DiffTarget::Datamodel(schema.into()), None, None) + .database_schema_from_diff_target( + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), schema.into())]), + None, + None, + ) .await?; let migration = connector.diff(from, to); connector.render_script(&migration, &Default::default()) diff --git a/schema-engine/cli/src/main.rs b/schema-engine/cli/src/main.rs index 5090502a85ca..f8c0d2445591 100644 --- a/schema-engine/cli/src/main.rs +++ b/schema-engine/cli/src/main.rs @@ -14,9 +14,9 @@ use structopt::StructOpt; #[derive(Debug, StructOpt)] #[structopt(version = env!("GIT_HASH"))] struct SchemaEngineCli { - /// Path to the datamodel + /// List of paths to the Prisma schema files. #[structopt(short = "d", long, name = "FILE")] - datamodel: Option, + datamodels: Option>, #[structopt(subcommand)] cli_subcommand: Option, } @@ -36,7 +36,7 @@ async fn main() { let input = SchemaEngineCli::from_args(); match input.cli_subcommand { - None => start_engine(input.datamodel.as_deref()).await, + None => start_engine(input.datamodels).await, Some(SubCommand::Cli(cli_command)) => { tracing::info!(git_hash = env!("GIT_HASH"), "Starting schema engine CLI"); cli_command.run().await; @@ -91,30 +91,35 @@ impl ConnectorHost for JsonRpcHost { } } -async fn start_engine(datamodel_location: Option<&str>) { +async fn start_engine(datamodel_locations: Option>) { use std::io::Read as _; tracing::info!(git_hash = env!("GIT_HASH"), "Starting schema engine RPC server",); - let datamodel = datamodel_location.map(|location| { - let mut file = match std::fs::File::open(location) { - Ok(file) => file, - Err(e) => panic!("Error opening datamodel file in `{location}`: {e}"), - }; + let datamodel_locations = datamodel_locations.map(|datamodel_locations| { + datamodel_locations + .into_iter() + .map(|location| { + let mut file = match std::fs::File::open(&location) { + Ok(file) => file, + Err(e) => panic!("Error opening datamodel file in `{location}`: {e}"), + }; - let mut datamodel = String::new(); + let mut datamodel = String::new(); - if let Err(e) = file.read_to_string(&mut datamodel) { - panic!("Error reading datamodel file `{location}`: {e}"); - }; + if let Err(e) = file.read_to_string(&mut datamodel) { + panic!("Error reading datamodel file `{location}`: {e}"); + }; - datamodel + (location, datamodel) + }) + .collect::>() }); let (client, adapter) = json_rpc_stdio::new_client(); let host = JsonRpcHost { client }; - let api = rpc_api(datamodel, Arc::new(host)); + let api = rpc_api(datamodel_locations, Arc::new(host)); // Block the thread and handle IO in async until EOF. json_rpc_stdio::run_with_client(&api, adapter).await.unwrap(); } diff --git a/schema-engine/cli/tests/cli_tests.rs b/schema-engine/cli/tests/cli_tests.rs index bec62a2d3a31..46dadef24f53 100644 --- a/schema-engine/cli/tests/cli_tests.rs +++ b/schema-engine/cli/tests/cli_tests.rs @@ -1,6 +1,7 @@ use connection_string::JdbcString; use expect_test::expect; use indoc::*; +use schema_core::json_rpc::types::*; use std::{ fs, io::{BufRead, BufReader, Write as _}, @@ -41,7 +42,18 @@ where })); child.kill().unwrap(); - res.unwrap(); + match res { + Ok(_) => (), + Err(panic_payload) => { + let res = panic_payload + .downcast_ref::<&str>() + .map(|s| -> String { (*s).to_owned() }) + .or_else(|| panic_payload.downcast_ref::().map(|s| s.to_owned())) + .unwrap_or_default(); + + panic!("Error: '{}'", res) + } + } } struct TestApi { @@ -324,7 +336,7 @@ fn basic_jsonrpc_roundtrip_works_with_no_params(_api: TestApi) { fs::write(&tmpfile, datamodel).unwrap(); let mut command = Command::new(schema_engine_bin_path()); - command.arg("--datamodel").arg(&tmpfile).env("RUST_LOG", "info"); + command.arg("--datamodels").arg(&tmpfile).env("RUST_LOG", "info"); with_child_process(command, |process| { let stdin = process.stdin.as_mut().unwrap(); @@ -350,12 +362,12 @@ fn basic_jsonrpc_roundtrip_works_with_params(_api: TestApi) { let tmpdir = tempfile::tempdir().unwrap(); let tmpfile = tmpdir.path().join("datamodel"); - let datamodel = r#" + let datamodel = indoc! {r#" datasource db { provider = "postgres" url = env("TEST_DATABASE_URL") } - "#; + "#}; fs::create_dir_all(&tmpdir).unwrap(); fs::write(&tmpfile, datamodel).unwrap(); @@ -363,10 +375,19 @@ fn basic_jsonrpc_roundtrip_works_with_params(_api: TestApi) { let command = Command::new(schema_engine_bin_path()); let path = tmpfile.to_str().unwrap(); - let schema_path_params = format!(r#"{{ "datasource": {{ "tag": "SchemaPath", "path": "{path}" }} }}"#); + let schema_path_params = serde_json::json!({ + "datasource": { + "tag": "Schema", + "files": [{ "path": path, "content": datamodel }] + } + }); - let url = std::env::var("TEST_DATABASE_URL").unwrap(); - let connection_string_params = format!(r#"{{ "datasource": {{ "tag": "ConnectionString", "url": "{url}" }} }}"#); + let connection_string_params = serde_json::json!({ + "datasource": { + "tag": "ConnectionString", + "url": std::env::var("TEST_DATABASE_URL").unwrap() + } + }); with_child_process(command, |process| { let stdin = process.stdin.as_mut().unwrap(); @@ -374,8 +395,13 @@ fn basic_jsonrpc_roundtrip_works_with_params(_api: TestApi) { for _ in 0..2 { for params in [&schema_path_params, &connection_string_params] { - let params_template = - format!(r#"{{ "jsonrpc": "2.0", "method": "getDatabaseVersion", "params": {params}, "id": 1 }}"#); + let params_template = serde_json::json!({ + "jsonrpc": "2.0", + "method": "getDatabaseVersion", + "params": params, + "id": 1 + }) + .to_string(); writeln!(stdin, "{}", ¶ms_template).unwrap(); @@ -416,7 +442,7 @@ fn introspect_sqlite_empty_database() { "method": "introspect", "id": 1, "params": { - "schema": schema, + "schema": { "files": [{ "path": "schema.prisma", "content": schema }] }, "force": true, "compositeTypeDepth": 5, } @@ -463,7 +489,7 @@ fn introspect_sqlite_invalid_empty_database() { "method": "introspect", "id": 1, "params": { - "schema": schema, + "schema": { "files": [{ "path": "schema.prisma", "content": schema }] }, "force": true, "compositeTypeDepth": -1, } @@ -517,7 +543,8 @@ fn execute_postgres(api: TestApi) { "params": { "datasourceType": { "tag": "schema", - "schema": &schema_path, + "files": [{ "path": &schema_path, "content": &schema }], + "configDir": schema_path.parent().unwrap().to_string_lossy(), }, "script": "SELECT 1;", } @@ -593,7 +620,8 @@ fn introspect_postgres(api: TestApi) { "params": { "datasourceType": { "tag": "schema", - "schema": &schema_path, + "files": [{ "path": &schema_path, "content": &schema }], + "configDir": schema_path.parent().unwrap().to_string_lossy(), }, "script": script, } @@ -617,7 +645,7 @@ fn introspect_postgres(api: TestApi) { "method": "introspect", "id": 1, "params": { - "schema": &schema, + "schema": { "files": [{ "path": &schema_path, "content": &schema }] }, "force": true, "compositeTypeDepth": 5, } @@ -686,3 +714,106 @@ fn introspect_e2e() { assert!(response.starts_with(r#"{"jsonrpc":"2.0","result":{"datamodel":"datasource db {\n provider = \"sqlite\"\n url = env(\"TEST_DATABASE_URL\")\n}\n","warnings":[]},"#)); }); } + +macro_rules! write_multi_file_vec { + // Match multiple pairs of filename and content + ( $( $filename:expr => $content:expr ),* $(,)? ) => { + { + use std::fs::File; + use std::io::Write; + + // Create a result vector to collect errors + let mut results = Vec::new(); + let tmpdir = tempfile::tempdir().unwrap(); + + fs::create_dir_all(&tmpdir).unwrap(); + + $( + let file_path = tmpdir.path().join($filename); + // Attempt to create or open the file + let result = (|| -> std::io::Result<()> { + let mut file = File::create(&file_path)?; + file.write_all($content.as_bytes())?; + Ok(()) + })(); + + result.unwrap(); + + // Push the result of the operation to the results vector + results.push((file_path.to_string_lossy().into_owned(), $content)); + )* + + // Return the results vector for further inspection if needed + results + } + }; +} + +fn to_schema_containers(files: Vec<(String, &str)>) -> Vec { + files + .into_iter() + .map(|(path, content)| SchemaContainer { + path: path.to_string(), + content: content.to_string(), + }) + .collect() +} + +fn to_schemas_container(files: Vec<(String, &str)>) -> SchemasContainer { + SchemasContainer { + files: to_schema_containers(files), + } +} + +#[test_connector(tags(Postgres))] +fn get_database_version_multi_file(_api: TestApi) { + let files = write_multi_file_vec! { + "a.prisma" => r#" + datasource db { + provider = "postgres" + url = env("TEST_DATABASE_URL") + } + "#, + "b.prisma" => r#" + model User { + id Int @id + } + "#, + }; + + let command = Command::new(schema_engine_bin_path()); + + let schema_path_params = GetDatabaseVersionInput { + datasource: DatasourceParam::Schema(to_schemas_container(files)), + }; + + let connection_string_params = GetDatabaseVersionInput { + datasource: DatasourceParam::ConnectionString(UrlContainer { + url: std::env::var("TEST_DATABASE_URL").unwrap(), + }), + }; + + with_child_process(command, |process| { + let stdin = process.stdin.as_mut().unwrap(); + let mut stdout = BufReader::new(process.stdout.as_mut().unwrap()); + + for _ in 0..2 { + for params in [&schema_path_params, &connection_string_params] { + let params_template = serde_json::json!({ + "jsonrpc": "2.0", + "method": "getDatabaseVersion", + "params": params, + "id": 1 + }) + .to_string(); + + writeln!(stdin, "{}", ¶ms_template).unwrap(); + + let mut response = String::new(); + stdout.read_line(&mut response).unwrap(); + + assert!(response.contains("PostgreSQL") || response.contains("CockroachDB")); + } + } + }); +} diff --git a/schema-engine/connectors/mongodb-schema-connector/src/lib.rs b/schema-engine/connectors/mongodb-schema-connector/src/lib.rs index 16f77df1818b..bab0531b61c4 100644 --- a/schema-engine/connectors/mongodb-schema-connector/src/lib.rs +++ b/schema-engine/connectors/mongodb-schema-connector/src/lib.rs @@ -52,8 +52,10 @@ impl MongoDbSchemaConnector { async fn mongodb_schema_from_diff_target(&self, target: DiffTarget<'_>) -> ConnectorResult { match target { - DiffTarget::Datamodel(schema) => { - let validated_schema = psl::parse_schema(schema).map_err(ConnectorError::new_schema_parser_error)?; + DiffTarget::Datamodel(sources) => { + let validated_schema = + psl::parse_schema_multi(&sources).map_err(ConnectorError::new_schema_parser_error)?; + Ok(schema_calculator::calculate(&validated_schema)) } DiffTarget::Database => self.client().await?.describe().await, diff --git a/schema-engine/connectors/mongodb-schema-connector/tests/introspection/test_api/utils.rs b/schema-engine/connectors/mongodb-schema-connector/tests/introspection/test_api/utils.rs index 38287300990d..3dc83b12fa80 100644 --- a/schema-engine/connectors/mongodb-schema-connector/tests/introspection/test_api/utils.rs +++ b/schema-engine/connectors/mongodb-schema-connector/tests/introspection/test_api/utils.rs @@ -70,10 +70,10 @@ pub(crate) fn config_block_string(features: BitFlags) -> String #[track_caller] pub(crate) fn parse_datamodels(datamodels: &[(&str, String)]) -> psl::ValidatedSchema { - let datamodels = datamodels + let datamodels: Vec<_> = datamodels .iter() .map(|(file_name, dm)| (file_name.to_string(), psl::SourceFile::from(dm))) .collect(); - psl::validate_multi_file(datamodels) + psl::validate_multi_file(&datamodels) } diff --git a/schema-engine/connectors/mongodb-schema-connector/tests/migrations/test_api.rs b/schema-engine/connectors/mongodb-schema-connector/tests/migrations/test_api.rs index 255e4a616057..eacd5f23b2c4 100644 --- a/schema-engine/connectors/mongodb-schema-connector/tests/migrations/test_api.rs +++ b/schema-engine/connectors/mongodb-schema-connector/tests/migrations/test_api.rs @@ -185,7 +185,11 @@ pub(crate) fn test_scenario(scenario_name: &str) { .await .unwrap(); let to = connector - .database_schema_from_diff_target(DiffTarget::Datamodel(schema.clone()), None, None) + .database_schema_from_diff_target( + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), schema.clone())]), + None, + None, + ) .await .unwrap(); let migration = connector.diff(from, to); @@ -227,7 +231,11 @@ Snapshot comparison failed. Run the test again with UPDATE_EXPECT=1 in the envir .await .unwrap(); let to = connector - .database_schema_from_diff_target(DiffTarget::Datamodel(schema), None, None) + .database_schema_from_diff_target( + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), schema.clone())]), + None, + None, + ) .await .unwrap(); let migration = connector.diff(from, to); diff --git a/schema-engine/connectors/schema-connector/src/diff.rs b/schema-engine/connectors/schema-connector/src/diff.rs index 48e34c432b5d..aaa1ae6fc498 100644 --- a/schema-engine/connectors/schema-connector/src/diff.rs +++ b/schema-engine/connectors/schema-connector/src/diff.rs @@ -5,7 +5,7 @@ use std::fmt::Debug; /// Diffable things pub enum DiffTarget<'a> { /// A Prisma schema. - Datamodel(SourceFile), + Datamodel(Vec<(String, SourceFile)>), /// A migrations folder. What is diffable is the state of the database schema at the end of the /// migrations history. Migrations(&'a [MigrationDirectory]), @@ -25,13 +25,3 @@ impl Debug for DiffTarget<'_> { } } } - -impl DiffTarget<'_> { - /// Try interpreting the DiffTarget as a Datamodel variant. - pub fn as_datamodel(&self) -> Option<&str> { - match self { - DiffTarget::Datamodel(schema) => Some(schema.as_str()), - _ => None, - } - } -} diff --git a/schema-engine/connectors/sql-schema-connector/src/lib.rs b/schema-engine/connectors/sql-schema-connector/src/lib.rs index 66924c2b5a9b..97bbbde409c9 100644 --- a/schema-engine/connectors/sql-schema-connector/src/lib.rs +++ b/schema-engine/connectors/sql-schema-connector/src/lib.rs @@ -131,8 +131,9 @@ impl SqlSchemaConnector { namespaces: Option, ) -> ConnectorResult { match target { - DiffTarget::Datamodel(schema) => { - let schema = psl::parse_schema(schema).map_err(ConnectorError::new_schema_parser_error)?; + DiffTarget::Datamodel(sources) => { + let schema = psl::parse_schema_multi(&sources).map_err(ConnectorError::new_schema_parser_error)?; + self.flavour.check_schema_features(&schema)?; Ok(sql_schema_calculator::calculate_sql_schema( &schema, diff --git a/schema-engine/core/src/commands/create_migration.rs b/schema-engine/core/src/commands/create_migration.rs index 316883b0d0c3..2ab08711dc74 100644 --- a/schema-engine/core/src/commands/create_migration.rs +++ b/schema-engine/core/src/commands/create_migration.rs @@ -1,7 +1,6 @@ -use crate::{json_rpc::types::*, CoreError, CoreResult}; -use psl::parser_database::SourceFile; +use crate::{json_rpc::types::*, CoreError, CoreResult, SchemaContainerExt}; use schema_connector::{migrations_directory::*, DiffTarget, SchemaConnector}; -use std::{path::Path, sync::Arc}; +use std::path::Path; use user_facing_errors::schema_engine::MigrationNameTooLong; /// Create a new migration. @@ -20,17 +19,11 @@ pub async fn create_migration( // Infer the migration. let previous_migrations = list_migrations(Path::new(&input.migrations_directory_path))?; - + let sources: Vec<_> = input.schema.to_psl_input(); // We need to start with the 'to', which is the Schema, in order to grab the // namespaces, in case we've got MultiSchema enabled. let to = connector - .database_schema_from_diff_target( - DiffTarget::Datamodel(SourceFile::new_allocated(Arc::from( - input.prisma_schema.into_boxed_str(), - ))), - None, - None, - ) + .database_schema_from_diff_target(DiffTarget::Datamodel(sources), None, None) .await?; let namespaces = connector.extract_namespaces(&to); diff --git a/schema-engine/core/src/commands/diff.rs b/schema-engine/core/src/commands/diff.rs index 266bd0e40efc..e502d678db26 100644 --- a/schema-engine/core/src/commands/diff.rs +++ b/schema-engine/core/src/commands/diff.rs @@ -1,9 +1,9 @@ use crate::{ core_error::CoreResult, - json_rpc::types::{DiffParams, DiffResult, DiffTarget, PathContainer, SchemaContainer, UrlContainer}, + json_rpc::types::{DiffParams, DiffResult, DiffTarget, PathContainer, UrlContainer}, + SchemaContainerExt, }; use enumflags2::BitFlags; -use psl::parser_database::SourceFile; use schema_connector::{ ConnectorError, ConnectorHost, DatabaseSchema, DiffTarget as McDiff, Namespaces, SchemaConnector, }; @@ -96,28 +96,15 @@ fn namespaces_and_preview_features_from_diff_targets( for target in targets { match target { DiffTarget::Migrations(_) | DiffTarget::Empty | DiffTarget::Url(_) => (), - DiffTarget::SchemaDatasource(SchemaContainer { schema }) - | DiffTarget::SchemaDatamodel(SchemaContainer { schema }) => { - let schema_str: String = std::fs::read_to_string(schema).map_err(|err| { - ConnectorError::from_source_with_context( - err, - format!("Error trying to read Prisma schema file at `{schema}`.").into_boxed_str(), - ) - })?; - - let validated_schema = psl::validate(schema_str.into()); - for (namespace, _span) in validated_schema - .configuration - .datasources - .iter() - .flat_map(|ds| ds.namespaces.iter()) - { - namespaces.push(namespace.clone()); - } + DiffTarget::SchemaDatasource(schemas) => { + let sources = (&schemas.files).to_psl_input(); - for generator in &validated_schema.configuration.generators { - preview_features |= generator.preview_features.unwrap_or_default(); - } + extract_namespaces(&sources, &mut namespaces, &mut preview_features); + } + DiffTarget::SchemaDatamodel(schemas) => { + let sources = (&schemas.files).to_psl_input(); + + extract_namespaces(&sources, &mut namespaces, &mut preview_features); } } } @@ -125,6 +112,27 @@ fn namespaces_and_preview_features_from_diff_targets( Ok((Namespaces::from_vec(&mut namespaces), preview_features)) } +fn extract_namespaces( + files: &[(String, psl::SourceFile)], + namespaces: &mut Vec, + preview_features: &mut BitFlags, +) { + let validated_schema = psl::validate_multi_file(files); + + for (namespace, _span) in validated_schema + .configuration + .datasources + .iter() + .flat_map(|ds| ds.namespaces.iter()) + { + namespaces.push(namespace.clone()); + } + + for generator in &validated_schema.configuration.generators { + *preview_features |= generator.preview_features.unwrap_or_default(); + } +} + // `None` in case the target is empty async fn json_rpc_diff_target_to_connector( target: &DiffTarget, @@ -132,21 +140,12 @@ async fn json_rpc_diff_target_to_connector( namespaces: Option, preview_features: BitFlags, ) -> CoreResult, DatabaseSchema)>> { - let read_prisma_schema_from_path = |schema_path: &str| -> CoreResult { - std::fs::read_to_string(schema_path).map_err(|err| { - ConnectorError::from_source_with_context( - err, - format!("Error trying to read Prisma schema file at `{schema_path}`.").into_boxed_str(), - ) - }) - }; - match target { DiffTarget::Empty => Ok(None), - DiffTarget::SchemaDatasource(SchemaContainer { schema }) => { - let schema_contents = read_prisma_schema_from_path(schema)?; - let schema_dir = std::path::Path::new(schema).parent(); - let mut connector = crate::schema_to_connector(&schema_contents, schema_dir)?; + DiffTarget::SchemaDatasource(schemas) => { + let config_dir = std::path::Path::new(&schemas.config_dir); + let sources: Vec<_> = schemas.to_psl_input(); + let mut connector = crate::schema_to_connector(&sources, Some(config_dir))?; connector.ensure_connection_validity().await?; connector.set_preview_features(preview_features); let schema = connector @@ -154,16 +153,13 @@ async fn json_rpc_diff_target_to_connector( .await?; Ok(Some((connector, schema))) } - DiffTarget::SchemaDatamodel(SchemaContainer { schema }) => { - let schema_contents = read_prisma_schema_from_path(schema)?; - let mut connector = crate::schema_to_connector_unchecked(&schema_contents)?; + DiffTarget::SchemaDatamodel(schemas) => { + let sources = schemas.to_psl_input(); + let mut connector = crate::schema_to_connector_unchecked(&sources)?; connector.set_preview_features(preview_features); + let schema = connector - .database_schema_from_diff_target( - McDiff::Datamodel(SourceFile::new_allocated(Arc::from(schema_contents.into_boxed_str()))), - None, - namespaces, - ) + .database_schema_from_diff_target(McDiff::Datamodel(sources), None, namespaces) .await?; Ok(Some((connector, schema))) } diff --git a/schema-engine/core/src/commands/evaluate_data_loss.rs b/schema-engine/core/src/commands/evaluate_data_loss.rs index 2a95df04c51c..10417f3af778 100644 --- a/schema-engine/core/src/commands/evaluate_data_loss.rs +++ b/schema-engine/core/src/commands/evaluate_data_loss.rs @@ -1,7 +1,5 @@ -use crate::{json_rpc::types::*, CoreResult}; -use psl::parser_database::SourceFile; +use crate::{json_rpc::types::*, CoreResult, SchemaContainerExt}; use schema_connector::{migrations_directory::*, DiffTarget, SchemaConnector}; -use std::sync::Arc; /// Development command for migrations. Evaluate the data loss induced by the /// next migration the engine would generate on the main database. @@ -13,12 +11,12 @@ pub async fn evaluate_data_loss( connector: &mut dyn SchemaConnector, ) -> CoreResult { error_on_changed_provider(&input.migrations_directory_path, connector.connector_type())?; - let source_file = SourceFile::new_allocated(Arc::from(input.prisma_schema.into_boxed_str())); + let sources: Vec<_> = input.schema.to_psl_input(); let migrations_from_directory = list_migrations(input.migrations_directory_path.as_ref())?; let to = connector - .database_schema_from_diff_target(DiffTarget::Datamodel(source_file), None, None) + .database_schema_from_diff_target(DiffTarget::Datamodel(sources), None, None) .await?; let namespaces = connector.extract_namespaces(&to); diff --git a/schema-engine/core/src/commands/schema_push.rs b/schema-engine/core/src/commands/schema_push.rs index a06ee7645b33..e0f1a68a8f57 100644 --- a/schema-engine/core/src/commands/schema_push.rs +++ b/schema-engine/core/src/commands/schema_push.rs @@ -1,21 +1,19 @@ -use crate::{json_rpc::types::*, parse_schema, CoreResult}; -use psl::parser_database::SourceFile; +use crate::{json_rpc::types::*, parse_schema_multi, CoreResult, SchemaContainerExt}; use schema_connector::{ConnectorError, DiffTarget, SchemaConnector}; -use std::sync::Arc; use tracing_futures::Instrument; /// Command to bring the local database in sync with the prisma schema, without /// interacting with the migrations directory nor the migrations table. pub async fn schema_push(input: SchemaPushInput, connector: &mut dyn SchemaConnector) -> CoreResult { - let source = SourceFile::new_allocated(Arc::from(input.schema.into_boxed_str())); - let datamodel = parse_schema(source.clone())?; + let sources = input.schema.to_psl_input(); + let datamodel = parse_schema_multi(&sources)?; if let Some(err) = connector.check_database_version_compatibility(&datamodel) { return Err(ConnectorError::user_facing(err)); }; let to = connector - .database_schema_from_diff_target(DiffTarget::Datamodel(source), None, None) + .database_schema_from_diff_target(DiffTarget::Datamodel(sources), None, None) .instrument(tracing::info_span!("Calculate `to`")) .await?; diff --git a/schema-engine/core/src/lib.rs b/schema-engine/core/src/lib.rs index 3ca75a596de0..b367ab0bfff9 100644 --- a/schema-engine/core/src/lib.rs +++ b/schema-engine/core/src/lib.rs @@ -17,6 +17,7 @@ mod state; mod timings; pub use self::{api::GenericApi, core_error::*, rpc::rpc_api, timings::TimingsLayer}; +use json_rpc::types::{SchemaContainer, SchemasContainer, SchemasWithConfigDir}; pub use schema_connector; use enumflags2::BitFlags; @@ -30,8 +31,8 @@ use sql_schema_connector::SqlSchemaConnector; use std::{env, path::Path}; use user_facing_errors::common::InvalidConnectionString; -fn parse_schema(schema: SourceFile) -> CoreResult { - psl::parse_schema(schema).map_err(CoreError::new_schema_parser_error) +fn parse_schema_multi(files: &[(String, SourceFile)]) -> CoreResult { + psl::parse_schema_multi(files).map_err(CoreError::new_schema_parser_error) } fn connector_for_connection_string( @@ -97,9 +98,11 @@ fn connector_for_connection_string( } /// Same as schema_to_connector, but it will only read the provider, not the connector params. -fn schema_to_connector_unchecked(schema: &str) -> CoreResult> { - let config = psl::parse_configuration(schema) - .map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", schema)))?; +fn schema_to_connector_unchecked( + files: &[(String, SourceFile)], +) -> CoreResult> { + let (_, config) = psl::parse_configuration_multi_file(files) + .map_err(|(files, err)| CoreError::new_schema_parser_error(files.render_diagnostics(&err)))?; let preview_features = config.preview_features(); let source = config @@ -123,10 +126,10 @@ fn schema_to_connector_unchecked(schema: &str) -> CoreResult, ) -> CoreResult> { - let (source, url, preview_features, shadow_database_url) = parse_configuration(schema)?; + let (source, url, preview_features, shadow_database_url) = parse_configuration_multi(files)?; let url = config_dir .map(|config_dir| psl::set_config_dir(source.active_connector.flavour(), config_dir, &url).into_owned()) @@ -140,6 +143,7 @@ fn schema_to_connector( let mut connector = connector_for_provider(source.active_provider)?; connector.set_params(params)?; + Ok(connector) } @@ -174,6 +178,7 @@ pub fn schema_api( parse_configuration(datamodel)?; } + let datamodel = datamodel.map(|datamodel| vec![("schema.prisma".to_owned(), SourceFile::from(datamodel))]); let state = state::EngineState::new(datamodel, host); Ok(Box::new(state)) } @@ -182,6 +187,26 @@ fn parse_configuration(datamodel: &str) -> CoreResult<(Datasource, String, BitFl let config = psl::parse_configuration(datamodel) .map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?; + extract_configuration(config, |err| { + CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)) + }) +} + +fn parse_configuration_multi( + files: &[(String, SourceFile)], +) -> CoreResult<(Datasource, String, BitFlags, Option)> { + let (files, config) = psl::parse_configuration_multi_file(files) + .map_err(|(files, err)| CoreError::new_schema_parser_error(files.render_diagnostics(&err)))?; + + extract_configuration(config, |err| { + CoreError::new_schema_parser_error(files.render_diagnostics(&err)) + }) +} + +fn extract_configuration( + config: psl::Configuration, + mut err_handler: impl FnMut(psl::Diagnostics) -> CoreError, +) -> CoreResult<(Datasource, String, BitFlags, Option)> { let preview_features = config.preview_features(); let source = config @@ -192,11 +217,55 @@ fn parse_configuration(datamodel: &str) -> CoreResult<(Datasource, String, BitFl let url = source .load_direct_url(|key| env::var(key).ok()) - .map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?; + .map_err(&mut err_handler)?; - let shadow_database_url = source - .load_shadow_database_url() - .map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?; + let shadow_database_url = source.load_shadow_database_url().map_err(err_handler)?; Ok((source, url, preview_features, shadow_database_url)) } + +trait SchemaContainerExt { + fn to_psl_input(self) -> Vec<(String, SourceFile)>; +} + +impl SchemaContainerExt for SchemasContainer { + fn to_psl_input(self) -> Vec<(String, SourceFile)> { + self.files.to_psl_input() + } +} + +impl SchemaContainerExt for &SchemasContainer { + fn to_psl_input(self) -> Vec<(String, SourceFile)> { + (&self.files).to_psl_input() + } +} + +impl SchemaContainerExt for Vec { + fn to_psl_input(self) -> Vec<(String, SourceFile)> { + self.into_iter() + .map(|container| (container.path, SourceFile::from(container.content))) + .collect() + } +} + +impl SchemaContainerExt for Vec<&SchemaContainer> { + fn to_psl_input(self) -> Vec<(String, SourceFile)> { + self.into_iter() + .map(|container| (container.path.clone(), SourceFile::from(&container.content))) + .collect() + } +} + +impl SchemaContainerExt for &Vec { + fn to_psl_input(self) -> Vec<(String, SourceFile)> { + self.iter() + .map(|container| (container.path.clone(), SourceFile::from(&container.content))) + .collect() + } +} + +impl SchemaContainerExt for &SchemasWithConfigDir { + fn to_psl_input(self) -> Vec<(String, SourceFile)> { + (&self.files).to_psl_input() + } +} diff --git a/schema-engine/core/src/rpc.rs b/schema-engine/core/src/rpc.rs index 326f7328bf6d..cf7bac51a8ec 100644 --- a/schema-engine/core/src/rpc.rs +++ b/schema-engine/core/src/rpc.rs @@ -1,11 +1,22 @@ use crate::{json_rpc::method_names::*, CoreError, CoreResult, GenericApi}; use jsonrpc_core::{types::error::Error as JsonRpcError, IoHandler, Params}; +use psl::SourceFile; use std::sync::Arc; /// Initialize a JSON-RPC ready schema engine API. -pub fn rpc_api(prisma_schema: Option, host: Arc) -> IoHandler { +pub fn rpc_api( + initial_datamodels: Option>, + host: Arc, +) -> IoHandler { let mut io_handler = IoHandler::default(); - let api = Arc::new(crate::state::EngineState::new(prisma_schema, Some(host))); + let initial_datamodels = initial_datamodels.map(|schemas| { + schemas + .into_iter() + .map(|(name, schema)| (name, SourceFile::from(schema))) + .collect() + }); + + let api = Arc::new(crate::state::EngineState::new(initial_datamodels, Some(host))); for cmd in METHOD_NAMES { let api = api.clone(); @@ -23,7 +34,6 @@ async fn run_command( cmd: &str, params: Params, ) -> Result { - tracing::debug!(?cmd, "running the command"); match cmd { APPLY_MIGRATIONS => render(executor.apply_migrations(params.parse()?).await), CREATE_DATABASE => render(executor.create_database(params.parse()?).await), diff --git a/schema-engine/core/src/state.rs b/schema-engine/core/src/state.rs index 3951c6b91a25..02de93922bd3 100644 --- a/schema-engine/core/src/state.rs +++ b/schema-engine/core/src/state.rs @@ -3,7 +3,9 @@ //! Why this rather than using connectors directly? We must be able to use the schema engine //! without a valid schema or database connection for commands like createDatabase and diff. -use crate::{api::GenericApi, commands, json_rpc::types::*, CoreError, CoreResult}; +use crate::{ + api::GenericApi, commands, json_rpc::types::*, parse_configuration_multi, CoreError, CoreResult, SchemaContainerExt, +}; use enumflags2::BitFlags; use psl::{parser_database::SourceFile, PreviewFeature}; use schema_connector::{ConnectorError, ConnectorHost, IntrospectionResult, Namespaces, SchemaConnector}; @@ -27,7 +29,27 @@ pub(crate) struct EngineState { // - a full schema // // To a channel leading to a spawned MigrationConnector. - connectors: Mutex>>, + connectors: Mutex>>, +} + +impl EngineState { + fn get_url_from_schemas(&self, container: &SchemasWithConfigDir) -> CoreResult { + let sources = container.to_psl_input(); + let (datasource, url, _, _) = parse_configuration_multi(&sources)?; + + Ok(psl::set_config_dir( + datasource.active_connector.flavour(), + std::path::Path::new(&container.config_dir), + &url, + ) + .into_owned()) + } +} + +#[derive(Debug, Eq, Hash, PartialEq)] +enum ConnectorRequestType { + Schema(Vec<(String, SourceFile)>), + Url(String), } /// A request from the core to a connector, in the form of an async closure. @@ -41,9 +63,12 @@ type ErasedConnectorRequest = Box< >; impl EngineState { - pub(crate) fn new(initial_datamodel: Option, host: Option>) -> Self { + pub(crate) fn new( + initial_datamodels: Option>, + host: Option>, + ) -> Self { EngineState { - initial_datamodel: initial_datamodel.map(|s| psl::validate(s.into())), + initial_datamodel: initial_datamodels.as_deref().map(psl::validate_multi_file), host: host.unwrap_or_else(|| Arc::new(schema_connector::EmptyHost)), connectors: Default::default(), } @@ -59,20 +84,9 @@ impl EngineState { }) } - async fn with_connector_from_schema_path( - &self, - path: &str, - f: ConnectorRequest, - ) -> CoreResult { - let config_dir = std::path::Path::new(path).parent(); - let schema = std::fs::read_to_string(path) - .map_err(|err| ConnectorError::from_source(err, "Falied to read Prisma schema."))?; - self.with_connector_for_schema(&schema, config_dir, f).await - } - async fn with_connector_for_schema( &self, - schema: &str, + schemas: Vec<(String, SourceFile)>, config_dir: Option<&Path>, f: ConnectorRequest, ) -> CoreResult { @@ -88,13 +102,15 @@ impl EngineState { }); let mut connectors = self.connectors.lock().await; - match connectors.get(schema) { + + match connectors.get(&ConnectorRequestType::Schema(schemas.clone())) { Some(request_sender) => match request_sender.send(erased).await { Ok(()) => (), Err(_) => return Err(ConnectorError::from_msg("tokio mpsc send error".to_owned())), }, None => { - let mut connector = crate::schema_to_connector(schema, config_dir)?; + let mut connector = crate::schema_to_connector(&schemas, config_dir)?; + connector.set_host(self.host.clone()); let (erased_sender, mut erased_receiver) = mpsc::channel::(12); tokio::spawn(async move { @@ -106,7 +122,7 @@ impl EngineState { Ok(()) => (), Err(_) => return Err(ConnectorError::from_msg("erased sender send error".to_owned())), }; - connectors.insert(schema.to_owned(), erased_sender); + connectors.insert(ConnectorRequestType::Schema(schemas), erased_sender); } } @@ -126,7 +142,7 @@ impl EngineState { }); let mut connectors = self.connectors.lock().await; - match connectors.get(&url) { + match connectors.get(&ConnectorRequestType::Url(url.clone())) { Some(request_sender) => match request_sender.send(erased).await { Ok(()) => (), Err(_) => return Err(ConnectorError::from_msg("tokio mpsc send error".to_owned())), @@ -134,6 +150,7 @@ impl EngineState { None => { let mut connector = crate::connector_for_connection_string(url.clone(), None, BitFlags::default())?; connector.set_host(self.host.clone()); + let (erased_sender, mut erased_receiver) = mpsc::channel::(12); tokio::spawn(async move { while let Some(req) = erased_receiver.recv().await { @@ -144,7 +161,8 @@ impl EngineState { Ok(()) => (), Err(_) => return Err(ConnectorError::from_msg("erased sender send error".to_owned())), }; - connectors.insert(url, erased_sender); + + connectors.insert(ConnectorRequestType::Url(url), erased_sender); } } @@ -153,17 +171,12 @@ impl EngineState { async fn with_connector_from_datasource_param( &self, - param: &DatasourceParam, + param: DatasourceParam, f: ConnectorRequest, ) -> CoreResult { match param { - DatasourceParam::ConnectionString(UrlContainer { url }) => { - self.with_connector_for_url(url.clone(), f).await - } - DatasourceParam::SchemaPath(PathContainer { path }) => self.with_connector_from_schema_path(path, f).await, - DatasourceParam::SchemaString(SchemaContainer { schema }) => { - self.with_connector_for_schema(schema, None, f).await - } + DatasourceParam::ConnectionString(UrlContainer { url }) => self.with_connector_for_url(url, f).await, + DatasourceParam::Schema(schemas) => self.with_connector_for_schema(schemas.to_psl_input(), None, f).await, } } @@ -174,11 +187,16 @@ impl EngineState { let schema = if let Some(initial_datamodel) = &self.initial_datamodel { initial_datamodel } else { - return Err(ConnectorError::from_msg("Missing --datamodel".to_owned())); + return Err(ConnectorError::from_msg("Missing --datamodels".to_owned())); }; - self.with_connector_for_schema(schema.db.source_assert_single(), None, f) - .await + let schemas = schema + .db + .iter_file_sources() + .map(|(name, content)| (name.to_string(), content.clone())) + .collect::>(); + + self.with_connector_for_schema(schemas, None, f).await } } @@ -188,7 +206,7 @@ impl GenericApi for EngineState { let f: ConnectorRequest = Box::new(|connector| connector.version()); match params { - Some(params) => self.with_connector_from_datasource_param(¶ms.datasource, f).await, + Some(params) => self.with_connector_from_datasource_param(params.datasource, f).await, None => self.with_default_connector(f).await, } } @@ -207,7 +225,7 @@ impl GenericApi for EngineState { async fn create_database(&self, params: CreateDatabaseParams) -> CoreResult { self.with_connector_from_datasource_param( - ¶ms.datasource, + params.datasource, Box::new(|connector| { Box::pin(async move { let database_name = SchemaConnector::create_database(connector).await?; @@ -231,25 +249,9 @@ impl GenericApi for EngineState { } async fn db_execute(&self, params: DbExecuteParams) -> CoreResult<()> { - use std::io::Read; - let url: String = match ¶ms.datasource_type { DbExecuteDatasourceType::Url(UrlContainer { url }) => url.clone(), - DbExecuteDatasourceType::Schema(SchemaContainer { schema: file_path }) => { - let mut schema_file = std::fs::File::open(file_path) - .map_err(|err| ConnectorError::from_source(err, "Opening Prisma schema file."))?; - let mut schema_string = String::new(); - schema_file - .read_to_string(&mut schema_string) - .map_err(|err| ConnectorError::from_source(err, "Reading Prisma schema file."))?; - let (datasource, url, _, _) = crate::parse_configuration(&schema_string)?; - std::path::Path::new(file_path) - .parent() - .map(|config_dir| { - psl::set_config_dir(datasource.active_connector.flavour(), config_dir, &url).into_owned() - }) - .unwrap_or(url) - } + DbExecuteDatasourceType::Schema(schemas) => self.get_url_from_schemas(schemas)?, }; self.with_connector_for_url(url, Box::new(move |connector| connector.db_execute(params.script))) @@ -301,7 +303,7 @@ impl GenericApi for EngineState { params: EnsureConnectionValidityParams, ) -> CoreResult { self.with_connector_from_datasource_param( - ¶ms.datasource, + params.datasource, Box::new(|connector| { Box::pin(async move { SchemaConnector::ensure_connection_validity(connector).await?; @@ -321,21 +323,24 @@ impl GenericApi for EngineState { async fn introspect(&self, params: IntrospectParams) -> CoreResult { tracing::info!("{:?}", params.schema); - let source_file = SourceFile::new_allocated(Arc::from(params.schema.clone().into_boxed_str())); + let source_files = params.schema.to_psl_input(); - let has_some_namespaces = params.schemas.is_some(); + let has_some_namespaces = params.namespaces.is_some(); let composite_type_depth = From::from(params.composite_type_depth); let ctx = if params.force { - let previous_schema = psl::validate(source_file); + let previous_schema = psl::validate_multi_file(&source_files); + schema_connector::IntrospectionContext::new_config_only( previous_schema, composite_type_depth, - params.schemas, + params.namespaces, ) } else { - let previous_schema = psl::parse_schema(source_file).map_err(ConnectorError::new_schema_parser_error)?; - schema_connector::IntrospectionContext::new(previous_schema, composite_type_depth, params.schemas) + let previous_schema = + psl::parse_schema_multi(&source_files).map_err(ConnectorError::new_schema_parser_error)?; + + schema_connector::IntrospectionContext::new(previous_schema, composite_type_depth, params.namespaces) }; if !ctx @@ -351,7 +356,7 @@ impl GenericApi for EngineState { } self.with_connector_for_schema( - ¶ms.schema, + source_files, None, Box::new(move |connector| { Box::pin(async move { diff --git a/schema-engine/json-rpc-api-build/methods/common.toml b/schema-engine/json-rpc-api-build/methods/common.toml index e13d7a6aef03..efee7dfb4de3 100644 --- a/schema-engine/json-rpc-api-build/methods/common.toml +++ b/schema-engine/json-rpc-api-build/methods/common.toml @@ -2,16 +2,40 @@ [enumShapes.DatasourceParam] description = """ -The path to a live database taken as input. For flexibility, this can be the path to a Prisma -schema file containing the datasource, or the whole Prisma schema as a string, or only the +The path to a live database taken as input. For flexibility, this can be Prisma schemas as strings, or only the connection string. See variants. """ -[enumShapes.DatasourceParam.variants.SchemaPath] -shape = "PathContainer" - -[enumShapes.DatasourceParam.variants.SchemaString] -shape = "SchemaContainer" +[enumShapes.DatasourceParam.variants.Schema] +shape = "SchemasContainer" [enumShapes.DatasourceParam.variants.ConnectionString] shape = "UrlContainer" + +[recordShapes.SchemasContainer] +description = "A container that holds multiple Prisma schema files." +fields.files.shape = "SchemaContainer" +fields.files.isList = true + +[recordShapes.SchemaContainer] +description = "A container that holds the path and the content of a Prisma schema file." + +fields.content.description = "The content of the Prisma schema file." +fields.content.shape = "string" + +fields.path.shape = "string" +fields.path.description = "The file name of the Prisma schema file." + +[recordShapes.SchemasWithConfigDir] +description = "A list of Prisma schema files with a config directory." + +fields.files.description = "A list of Prisma schema files." +fields.files.shape = "SchemaContainer" +fields.files.isList = true + +fields.configDir.description = "An optional directory containing the config files such as SSL certificates." +fields.configDir.shape = "string" + +[recordShapes.UrlContainer] +description = "An object with a `url` field." +fields.url.shape = "string" \ No newline at end of file diff --git a/schema-engine/json-rpc-api-build/methods/createMigration.toml b/schema-engine/json-rpc-api-build/methods/createMigration.toml index d393af359353..f6c89e5814cc 100644 --- a/schema-engine/json-rpc-api-build/methods/createMigration.toml +++ b/schema-engine/json-rpc-api-build/methods/createMigration.toml @@ -25,9 +25,9 @@ shape = "string" description = "The filesystem path of the migrations directory to use." shape = "string" -[recordShapes.createMigrationInput.fields.prismaSchema] -description = "The Prisma schema to use as a target for the generated migration." -shape = "string" +[recordShapes.createMigrationInput.fields.schema] +description = "The Prisma schema files to use as a target for the generated migration." +shape = "SchemasContainer" [recordShapes.createMigrationOutput] description = "The output of the `creatMigration` command." diff --git a/schema-engine/json-rpc-api-build/methods/dbExecute.toml b/schema-engine/json-rpc-api-build/methods/dbExecute.toml index ebcf7684052a..e60342761761 100644 --- a/schema-engine/json-rpc-api-build/methods/dbExecute.toml +++ b/schema-engine/json-rpc-api-build/methods/dbExecute.toml @@ -6,14 +6,6 @@ defined on all connectors. requestShape = "DbExecuteParams" responseShape = "DbExecuteResult" -[recordShapes.SchemaContainer] -description = "An object with a `schema` field." -fields.schema.shape = "string" - -[recordShapes.UrlContainer] -description = "An object with a `url` field." -fields.url.shape = "string" - [recordShapes.DbExecuteParams] description = "The type of params accepted by dbExecute." fields.datasourceType.description = "The location of the live database to connect to." @@ -26,7 +18,8 @@ description = "The type of results returned by dbExecute." [enumShapes.DbExecuteDatasourceType] description = "The location of the live database to connect to." -variants.schema.description = "Path to the Prisma schema file to take the datasource URL from." -variants.schema.shape = "SchemaContainer" +variants.schema.description = "Prisma schema files and content to take the datasource URL from." +variants.schema.shape = "SchemasWithConfigDir" + variants.url.description = "The URL of the database to run the command on." variants.url.shape = "UrlContainer" diff --git a/schema-engine/json-rpc-api-build/methods/diff.toml b/schema-engine/json-rpc-api-build/methods/diff.toml index 267249e6d352..5e3e380d90a0 100644 --- a/schema-engine/json-rpc-api-build/methods/diff.toml +++ b/schema-engine/json-rpc-api-build/methods/diff.toml @@ -93,14 +93,14 @@ description = """ The path to a Prisma schema. The _datasource url_ will be considered, and the live database it points to introspected for its schema. """ -shape = "SchemaContainer" +shape = "SchemasWithConfigDir" [enumShapes.DiffTarget.variants.schemaDatamodel] description = """ The path to a Prisma schema. The contents of the schema itself will be considered. This source does not need any database connection. """ -shape = "SchemaContainer" +shape = "SchemasContainer" [enumShapes.DiffTarget.variants.url] description = """ diff --git a/schema-engine/json-rpc-api-build/methods/evaluateDataLoss.toml b/schema-engine/json-rpc-api-build/methods/evaluateDataLoss.toml index b2ecd5dd307c..5875e5695f9e 100644 --- a/schema-engine/json-rpc-api-build/methods/evaluateDataLoss.toml +++ b/schema-engine/json-rpc-api-build/methods/evaluateDataLoss.toml @@ -21,9 +21,9 @@ description = "The input to the `evaluateDataLoss` command." description = "The location of the migrations directory." shape = "string" -[recordShapes.evaluateDataLossInput.fields.prismaSchema] -description = "The prisma schema to migrate to." -shape = "string" +[recordShapes.evaluateDataLossInput.fields.schema] +description = "The prisma schema files to migrate to." +shape = "SchemasContainer" [recordShapes.evaluateDataLossOutput] description = """ diff --git a/schema-engine/json-rpc-api-build/methods/introspect.toml b/schema-engine/json-rpc-api-build/methods/introspect.toml index 8128d9af314d..4f7e4743ac67 100644 --- a/schema-engine/json-rpc-api-build/methods/introspect.toml +++ b/schema-engine/json-rpc-api-build/methods/introspect.toml @@ -7,7 +7,7 @@ responseShape = "introspectResult" description = "Params type for the introspect method." [recordShapes.introspectParams.fields.schema] -shape = "string" +shape = "SchemasContainer" [recordShapes.introspectParams.fields.force] shape = "bool" @@ -15,7 +15,7 @@ shape = "bool" [recordShapes.introspectParams.fields.compositeTypeDepth] shape = "isize" -[recordShapes.introspectParams.fields.schemas] +[recordShapes.introspectParams.fields.namespaces] shape = "string" isList = true isNullable = true diff --git a/schema-engine/json-rpc-api-build/methods/schemaPush.toml b/schema-engine/json-rpc-api-build/methods/schemaPush.toml index 0e338e411cde..b668a6f21b98 100644 --- a/schema-engine/json-rpc-api-build/methods/schemaPush.toml +++ b/schema-engine/json-rpc-api-build/methods/schemaPush.toml @@ -11,8 +11,8 @@ description = "Push the schema ignoring destructive change warnings." shape = "bool" [recordShapes.schemaPushInput.fields.schema] -description = "The Prisma schema." -shape = "string" +description = "The Prisma schema files." +shape = "SchemasContainer" [recordShapes.schemaPushOutput] description = "Response result for the `schemaPush` method." diff --git a/schema-engine/json-rpc-api-build/src/lib.rs b/schema-engine/json-rpc-api-build/src/lib.rs index bbf9c6fb0ca6..ec01a8d06555 100644 --- a/schema-engine/json-rpc-api-build/src/lib.rs +++ b/schema-engine/json-rpc-api-build/src/lib.rs @@ -8,6 +8,8 @@ use std::{ path::Path, }; +// Note: the easiest way to update the generated JSON-RPC API types is to comment out every line in `schema-engine/core/src/lib.rs` +// but the `include!` macro invocation, then run `cargo build -p schema-core`. pub fn generate_rust_modules(out_dir: &Path) -> CrateResult { let api_defs_root = concat!(env!("CARGO_MANIFEST_DIR"), "/methods"); diff --git a/schema-engine/json-rpc-api-build/src/rust_crate.rs b/schema-engine/json-rpc-api-build/src/rust_crate.rs index f4fbd387d550..b422b51de341 100644 --- a/schema-engine/json-rpc-api-build/src/rust_crate.rs +++ b/schema-engine/json-rpc-api-build/src/rust_crate.rs @@ -81,11 +81,10 @@ fn generate_types_rs(mut file: impl std::io::Write, api: &Api) -> CrateResult { file.write_all(b"\n/// ```\n")?; } - writeln!( - file, - "#[derive(Serialize, Deserialize, Debug)]\npub struct {} {{", - rustify_type_name(type_name) - )?; + writeln!(file, "#[derive(Serialize, Deserialize, Debug)]",)?; + + writeln!(file, "pub struct {} {{", rustify_type_name(type_name))?; + for (field_name, field) in &record_type.fields { if let Some(description) = &field.description { for line in description.lines() { diff --git a/schema-engine/sql-introspection-tests/src/test_api.rs b/schema-engine/sql-introspection-tests/src/test_api.rs index d6eb9a349ec6..dc338f4b563a 100644 --- a/schema-engine/sql-introspection-tests/src/test_api.rs +++ b/schema-engine/sql-introspection-tests/src/test_api.rs @@ -555,12 +555,12 @@ fn parse_datamodel(dm: &str) -> psl::ValidatedSchema { #[track_caller] fn parse_datamodels(datamodels: &[(&str, String)]) -> psl::ValidatedSchema { - let datamodels = datamodels + let datamodels: Vec<_> = datamodels .iter() .map(|(file_name, dm)| (file_name.to_string(), psl::SourceFile::from(dm))) .collect(); - psl::validate_multi_file(datamodels) + psl::validate_multi_file(&datamodels) } pub struct IntrospectionMultiTestResult { diff --git a/schema-engine/sql-migration-tests/src/commands/create_migration.rs b/schema-engine/sql-migration-tests/src/commands/create_migration.rs index 77dd0dbf5753..a402610a92d6 100644 --- a/schema-engine/sql-migration-tests/src/commands/create_migration.rs +++ b/schema-engine/sql-migration-tests/src/commands/create_migration.rs @@ -8,7 +8,7 @@ use test_setup::runtime::run_with_thread_local_runtime; pub struct CreateMigration<'a> { api: &'a mut dyn SchemaConnector, - schema: &'a str, + files: Vec, migrations_directory: &'a TempDir, draft: bool, name: &'a str, @@ -18,12 +18,18 @@ impl<'a> CreateMigration<'a> { pub fn new( api: &'a mut dyn SchemaConnector, name: &'a str, - schema: &'a str, + files: &[(&'a str, &'a str)], migrations_directory: &'a TempDir, ) -> Self { CreateMigration { api, - schema, + files: files + .iter() + .map(|(path, content)| SchemaContainer { + path: path.to_string(), + content: content.to_string(), + }) + .collect(), migrations_directory, draft: false, name, @@ -40,7 +46,7 @@ impl<'a> CreateMigration<'a> { let output = create_migration( CreateMigrationInput { migrations_directory_path: self.migrations_directory.path().to_str().unwrap().to_owned(), - prisma_schema: self.schema.to_owned(), + schema: SchemasContainer { files: self.files }, draft: self.draft, migration_name: self.name.to_owned(), }, diff --git a/schema-engine/sql-migration-tests/src/commands/evaluate_data_loss.rs b/schema-engine/sql-migration-tests/src/commands/evaluate_data_loss.rs index 7353ec1651cb..80039f712ee6 100644 --- a/schema-engine/sql-migration-tests/src/commands/evaluate_data_loss.rs +++ b/schema-engine/sql-migration-tests/src/commands/evaluate_data_loss.rs @@ -6,15 +6,25 @@ use tempfile::TempDir; pub struct EvaluateDataLoss<'a> { api: &'a mut dyn SchemaConnector, migrations_directory: &'a TempDir, - prisma_schema: String, + files: Vec, } impl<'a> EvaluateDataLoss<'a> { - pub fn new(api: &'a mut dyn SchemaConnector, migrations_directory: &'a TempDir, prisma_schema: String) -> Self { + pub fn new<'b>( + api: &'a mut dyn SchemaConnector, + migrations_directory: &'a TempDir, + files: &[(&'b str, &'b str)], + ) -> Self { EvaluateDataLoss { api, migrations_directory, - prisma_schema, + files: files + .iter() + .map(|(path, content)| SchemaContainer { + path: path.to_string(), + content: content.to_string(), + }) + .collect(), } } @@ -22,7 +32,7 @@ impl<'a> EvaluateDataLoss<'a> { let fut = evaluate_data_loss( EvaluateDataLossInput { migrations_directory_path: self.migrations_directory.path().to_str().unwrap().to_owned(), - prisma_schema: self.prisma_schema, + schema: SchemasContainer { files: self.files }, }, self.api, ); diff --git a/schema-engine/sql-migration-tests/src/commands/schema_push.rs b/schema-engine/sql-migration-tests/src/commands/schema_push.rs index 541dba81797f..f7442b3a72c4 100644 --- a/schema-engine/sql-migration-tests/src/commands/schema_push.rs +++ b/schema-engine/sql-migration-tests/src/commands/schema_push.rs @@ -8,7 +8,7 @@ use tracing_futures::Instrument; pub struct SchemaPush<'a> { api: &'a mut dyn SchemaConnector, - schema: String, + files: Vec, force: bool, /// Purely for logging diagnostics. migration_id: Option<&'a str>, @@ -17,10 +17,16 @@ pub struct SchemaPush<'a> { } impl<'a> SchemaPush<'a> { - pub fn new(api: &'a mut dyn SchemaConnector, schema: String, max_refresh_delay: Option) -> Self { + pub fn new(api: &'a mut dyn SchemaConnector, files: &[(&str, &str)], max_refresh_delay: Option) -> Self { SchemaPush { api, - schema, + files: files + .iter() + .map(|(path, content)| SchemaContainer { + path: path.to_string(), + content: content.to_string(), + }) + .collect(), force: false, migration_id: None, max_ddl_refresh_delay: max_refresh_delay, @@ -39,7 +45,7 @@ impl<'a> SchemaPush<'a> { fn send_impl(self) -> CoreResult { let input = SchemaPushInput { - schema: self.schema, + schema: SchemasContainer { files: self.files }, force: self.force, }; diff --git a/schema-engine/sql-migration-tests/src/lib.rs b/schema-engine/sql-migration-tests/src/lib.rs index 90c6776b2d82..ad911c9bcb23 100644 --- a/schema-engine/sql-migration-tests/src/lib.rs +++ b/schema-engine/sql-migration-tests/src/lib.rs @@ -2,6 +2,7 @@ pub mod multi_engine_test_api; pub mod test_api; +pub mod utils; mod assertions; mod commands; diff --git a/schema-engine/sql-migration-tests/src/multi_engine_test_api.rs b/schema-engine/sql-migration-tests/src/multi_engine_test_api.rs index 79c745aa86d3..ca1e807a46b9 100644 --- a/schema-engine/sql-migration-tests/src/multi_engine_test_api.rs +++ b/schema-engine/sql-migration-tests/src/multi_engine_test_api.rs @@ -313,7 +313,12 @@ impl EngineTestApi { schema: &'a str, migrations_directory: &'a TempDir, ) -> CreateMigration<'a> { - CreateMigration::new(&mut self.connector, name, schema, migrations_directory) + CreateMigration::new( + &mut self.connector, + name, + &[("schema.prisma", schema)], + migrations_directory, + ) } /// Builder and assertions to call the DiagnoseMigrationHistory command. @@ -336,7 +341,13 @@ impl EngineTestApi { /// Plan a `schemaPush` command pub fn schema_push(&mut self, dm: impl Into) -> SchemaPush<'_> { - SchemaPush::new(&mut self.connector, dm.into(), self.max_ddl_refresh_delay) + let dm: String = dm.into(); + + SchemaPush::new( + &mut self.connector, + &[("schema.prisma", &dm)], + self.max_ddl_refresh_delay, + ) } /// The schema name of the current connected database. diff --git a/schema-engine/sql-migration-tests/src/test_api.rs b/schema-engine/sql-migration-tests/src/test_api.rs index de5dd8e5b342..7f2bc769f78b 100644 --- a/schema-engine/sql-migration-tests/src/test_api.rs +++ b/schema-engine/sql-migration-tests/src/test_api.rs @@ -96,7 +96,21 @@ impl TestApi { schema: &'a str, migrations_directory: &'a TempDir, ) -> CreateMigration<'a> { - CreateMigration::new(&mut self.connector, name, schema, migrations_directory) + CreateMigration::new( + &mut self.connector, + name, + &[("schema.prisma", schema)], + migrations_directory, + ) + } + + pub fn create_migration_multi_file<'a>( + &'a mut self, + name: &'a str, + files: &[(&'a str, &'a str)], + migrations_directory: &'a TempDir, + ) -> CreateMigration<'a> { + CreateMigration::new(&mut self.connector, name, files, migrations_directory) } /// Create a temporary directory to serve as a test migrations directory. @@ -132,7 +146,15 @@ impl TestApi { migrations_directory: &'a TempDir, schema: String, ) -> EvaluateDataLoss<'a> { - EvaluateDataLoss::new(&mut self.connector, migrations_directory, schema) + EvaluateDataLoss::new(&mut self.connector, migrations_directory, &[("schema.prisma", &schema)]) + } + + pub fn evaluate_data_loss_multi_file<'a>( + &'a mut self, + migrations_directory: &'a TempDir, + files: &[(&'a str, &'a str)], + ) -> EvaluateDataLoss<'a> { + EvaluateDataLoss::new(&mut self.connector, migrations_directory, files) } /// Returns true only when testing on MSSQL. @@ -319,7 +341,7 @@ impl TestApi { pub fn expect_sql_for_schema(&mut self, schema: &'static str, sql: &expect_test::Expect) { let found = self.connector_diff( DiffTarget::Empty, - DiffTarget::Datamodel(SourceFile::new_static(schema)), + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), SourceFile::new_static(schema))]), None, ); sql.assert_eq(&found); @@ -331,10 +353,25 @@ impl TestApi { self.schema_push(schema) } + pub fn schema_push_w_datasource_multi_file(&mut self, files: &[(&str, &str)]) -> SchemaPush<'_> { + let (first, rest) = files.split_first().unwrap(); + let first_with_provider = self.datamodel_with_provider(first.1); + let recombined = [&[(first.0, first_with_provider.as_str())], rest].concat(); + + self.schema_push_multi_file(&recombined) + } + /// Plan a `schemaPush` command pub fn schema_push(&mut self, dm: impl Into) -> SchemaPush<'_> { let max_ddl_refresh_delay = self.max_ddl_refresh_delay(); - SchemaPush::new(&mut self.connector, dm.into(), max_ddl_refresh_delay) + let dm: String = dm.into(); + + SchemaPush::new(&mut self.connector, &[("schema.prisma", &dm)], max_ddl_refresh_delay) + } + + pub fn schema_push_multi_file(&mut self, files: &[(&str, &str)]) -> SchemaPush<'_> { + let max_ddl_refresh_delay = self.max_ddl_refresh_delay(); + SchemaPush::new(&mut self.connector, files, max_ddl_refresh_delay) } pub fn tags(&self) -> BitFlags { diff --git a/schema-engine/sql-migration-tests/src/utils.rs b/schema-engine/sql-migration-tests/src/utils.rs new file mode 100644 index 000000000000..0eea2d1a0658 --- /dev/null +++ b/schema-engine/sql-migration-tests/src/utils.rs @@ -0,0 +1,44 @@ +use schema_core::json_rpc::types::SchemaContainer; + +#[macro_export] +macro_rules! write_multi_file { + // Match multiple pairs of filename and content + ( $( $filename:expr => $content:expr ),* $(,)? ) => { + { + use std::fs::File; + use std::io::Write; + + // Create a result vector to collect errors + let mut results = Vec::new(); + let tmpdir = tempfile::tempdir().unwrap(); + + std::fs::create_dir_all(&tmpdir).unwrap(); + + $( + let file_path = tmpdir.path().join($filename); + // Attempt to create or open the file + let result = (|| -> std::io::Result<()> { + let mut file = File::create(&file_path)?; + file.write_all($content.as_bytes())?; + Ok(()) + })(); + + result.unwrap(); + + results.push((file_path.to_string_lossy().into_owned(), $content)); + )* + + (tmpdir, results) + } + }; +} + +pub fn to_schema_containers(files: &[(String, &str)]) -> Vec { + files + .iter() + .map(|(path, content)| SchemaContainer { + path: path.to_string(), + content: content.to_string(), + }) + .collect() +} diff --git a/schema-engine/sql-migration-tests/tests/apply_migrations/mod.rs b/schema-engine/sql-migration-tests/tests/apply_migrations/mod.rs index 17a0800ba127..b51282f47c21 100644 --- a/schema-engine/sql-migration-tests/tests/apply_migrations/mod.rs +++ b/schema-engine/sql-migration-tests/tests/apply_migrations/mod.rs @@ -488,3 +488,36 @@ fn migrations_should_succeed_on_an_uninitialized_nonempty_database_with_postgis_ .send_sync() .assert_applied_migrations(&["01-init"]); } + +#[test_connector] +fn applying_a_single_migration_multi_file_should_work(api: TestApi) { + let schema_a = api.datamodel_with_provider( + r#" + model Cat { + id Int @id + name String + } + "#, + ); + let schema_b = indoc::indoc! {r#" + model Dog { + id Int @id + name String + } + "#}; + + let dir = api.create_migrations_directory(); + + api.create_migration_multi_file( + "init", + &[("schema_a.prisma", schema_a.as_str()), ("schema_b.prisma", schema_b)], + &dir, + ) + .send_sync(); + + api.apply_migrations(&dir) + .send_sync() + .assert_applied_migrations(&["init"]); + + api.apply_migrations(&dir).send_sync().assert_applied_migrations(&[]); +} diff --git a/schema-engine/sql-migration-tests/tests/create_migration/create_migration_tests.rs b/schema-engine/sql-migration-tests/tests/create_migration/create_migration_tests.rs index eb6dff20def0..26b95c1acb01 100644 --- a/schema-engine/sql-migration-tests/tests/create_migration/create_migration_tests.rs +++ b/schema-engine/sql-migration-tests/tests/create_migration/create_migration_tests.rs @@ -98,6 +98,145 @@ fn basic_create_migration_works(api: TestApi) { }); } +#[test_connector] +fn basic_create_migration_multi_file_works(api: TestApi) { + let schema_a = api.datamodel_with_provider( + r#" + model Cat { + id Int @id + name String + } + "#, + ); + + let schema_b = indoc::indoc! {r#" + model Dog { + id Int @id + name String + } + "#}; + + let dir = api.create_migrations_directory(); + + let is_postgres = api.is_postgres(); + let is_mysql = api.is_mysql(); + let is_sqlite = api.is_sqlite(); + let is_cockroach = api.is_cockroach(); + let is_mssql = api.is_mssql(); + + api.create_migration_multi_file("create-cats", &[("a.prisma", &schema_a), ("b.prisma", schema_b)], &dir) + .send_sync() + .assert_migration_directories_count(1) + .assert_migration("create-cats", move |migration| { + let expected_script = if is_cockroach { + expect![[r#" + -- CreateTable + CREATE TABLE "Cat" ( + "id" INT4 NOT NULL, + "name" STRING NOT NULL, + + CONSTRAINT "Cat_pkey" PRIMARY KEY ("id") + ); + + -- CreateTable + CREATE TABLE "Dog" ( + "id" INT4 NOT NULL, + "name" STRING NOT NULL, + + CONSTRAINT "Dog_pkey" PRIMARY KEY ("id") + ); + "#]] + } else if is_postgres { + expect![[r#" + -- CreateTable + CREATE TABLE "Cat" ( + "id" INTEGER NOT NULL, + "name" TEXT NOT NULL, + + CONSTRAINT "Cat_pkey" PRIMARY KEY ("id") + ); + + -- CreateTable + CREATE TABLE "Dog" ( + "id" INTEGER NOT NULL, + "name" TEXT NOT NULL, + + CONSTRAINT "Dog_pkey" PRIMARY KEY ("id") + ); + "#]] + } else if is_mysql { + expect![[r#" + -- CreateTable + CREATE TABLE `Cat` ( + `id` INTEGER NOT NULL, + `name` VARCHAR(191) NOT NULL, + + PRIMARY KEY (`id`) + ) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + + -- CreateTable + CREATE TABLE `Dog` ( + `id` INTEGER NOT NULL, + `name` VARCHAR(191) NOT NULL, + + PRIMARY KEY (`id`) + ) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + "#]] + } else if is_sqlite { + expect![[r#" + -- CreateTable + CREATE TABLE "Cat" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "name" TEXT NOT NULL + ); + + -- CreateTable + CREATE TABLE "Dog" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "name" TEXT NOT NULL + ); + "#]] + } else if is_mssql { + expect![[r#" + BEGIN TRY + + BEGIN TRAN; + + -- CreateTable + CREATE TABLE [dbo].[Cat] ( + [id] INT NOT NULL, + [name] NVARCHAR(1000) NOT NULL, + CONSTRAINT [Cat_pkey] PRIMARY KEY CLUSTERED ([id]) + ); + + -- CreateTable + CREATE TABLE [dbo].[Dog] ( + [id] INT NOT NULL, + [name] NVARCHAR(1000) NOT NULL, + CONSTRAINT [Dog_pkey] PRIMARY KEY CLUSTERED ([id]) + ); + + COMMIT TRAN; + + END TRY + BEGIN CATCH + + IF @@TRANCOUNT > 0 + BEGIN + ROLLBACK TRAN; + END; + THROW + + END CATCH + "#]] + } else { + unreachable!() + }; + + migration.expect_contents(expected_script) + }); +} + #[test_connector] fn creating_a_second_migration_should_have_the_previous_sql_schema_as_baseline(api: TestApi) { let dm1 = api.datamodel_with_provider( diff --git a/schema-engine/sql-migration-tests/tests/errors/error_tests.rs b/schema-engine/sql-migration-tests/tests/errors/error_tests.rs index b266040381d9..2318e0f2f7c8 100644 --- a/schema-engine/sql-migration-tests/tests/errors/error_tests.rs +++ b/schema-engine/sql-migration-tests/tests/errors/error_tests.rs @@ -2,7 +2,7 @@ use indoc::{formatdoc, indoc}; use pretty_assertions::assert_eq; use quaint::prelude::Insert; use schema_core::{ - json_rpc::types::{DatasourceParam, EnsureConnectionValidityParams}, + json_rpc::types::{DatasourceParam, EnsureConnectionValidityParams, SchemasContainer}, schema_connector::ConnectorError, }; use serde_json::json; @@ -16,7 +16,12 @@ pub(crate) async fn connection_error(schema: String) -> ConnectorError { }; api.ensure_connection_validity(EnsureConnectionValidityParams { - datasource: DatasourceParam::SchemaString(SchemaContainer { schema }), + datasource: DatasourceParam::Schema(SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }), }) .await .unwrap_err() @@ -403,7 +408,12 @@ async fn connection_string_problems_give_a_nice_error() { let api = schema_core::schema_api(Some(dm.clone()), None).unwrap(); let error = api .ensure_connection_validity(EnsureConnectionValidityParams { - datasource: DatasourceParam::SchemaString(SchemaContainer { schema: dm }), + datasource: DatasourceParam::Schema(SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: dm, + }], + }), }) .await .unwrap_err(); diff --git a/schema-engine/sql-migration-tests/tests/evaluate_data_loss/evaluate_data_loss_tests.rs b/schema-engine/sql-migration-tests/tests/evaluate_data_loss/evaluate_data_loss_tests.rs index 44f23e109fe6..56faa8dd4a28 100644 --- a/schema-engine/sql-migration-tests/tests/evaluate_data_loss/evaluate_data_loss_tests.rs +++ b/schema-engine/sql-migration-tests/tests/evaluate_data_loss/evaluate_data_loss_tests.rs @@ -284,3 +284,67 @@ fn evaluate_data_loss_maps_warnings_to_the_right_steps(api: TestApi) { ("Added the required column `isGoodDog` to the `Dog` table without a default value. There are 1 rows in this table, it is not possible to execute this step.".into(), if is_postgres { 2 } else { 1 }), ]); } + +#[test_connector(capabilities(Enums))] +fn evaluate_data_loss_multi_file_maps_warnings_to_the_right_steps(api: TestApi) { + let dm1 = api.datamodel_with_provider( + r#" + model Cat { + id Int @id + name String + } + + model Dog { + id Int @id + name String + } + "#, + ); + + let directory = api.create_migrations_directory(); + api.create_migration("1-initial", &dm1, &directory).send_sync(); + api.apply_migrations(&directory).send_sync(); + + api.insert("Cat").value("id", 1).value("name", "Felix").result_raw(); + api.insert("Dog").value("id", 1).value("name", "Norbert").result_raw(); + + let schema_a = api.datamodel_with_provider( + r#" + model Hyena { + id Int @id + name String + } + + model Cat { + id Int @id + } + "#, + ); + + let schema_b = indoc::indoc! {r#" + model Dog { + id Int @id + name String + isGoodDog BetterBoolean + } + + enum BetterBoolean { + YES + } + "#}; + + let warn = format!( + "You are about to drop the column `name` on the `{}` table, which still contains 1 non-null values.", + api.normalize_identifier("Cat") + ); + + let is_postgres = api.is_postgres(); + + #[allow(clippy::bool_to_int_with_if)] + api.evaluate_data_loss_multi_file(&directory, &[("schema_a", &schema_a), ("schema_b", schema_b)]) + .send() + .assert_warnings_with_indices(&[(warn.into(), if is_postgres { 1 } else { 0 })]) + .assert_unexecutables_with_indices(&[ + ("Added the required column `isGoodDog` to the `Dog` table without a default value. There are 1 rows in this table, it is not possible to execute this step.".into(), if is_postgres { 2 } else { 1 }), + ]); +} diff --git a/schema-engine/sql-migration-tests/tests/existing_data/mod.rs b/schema-engine/sql-migration-tests/tests/existing_data/mod.rs index bed7b8fc80ca..1b57ed4bb36f 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/mod.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/mod.rs @@ -35,6 +35,45 @@ fn dropping_a_table_with_rows_should_warn(api: TestApi) { .assert_no_steps(); } +#[test_connector] +fn dropping_a_table_with_rows_multi_file_should_warn(api: TestApi) { + let schema_a = r#" + model Cat { + id String @id @default(cuid()) + } + "#; + let schema_b = r#" + model Dog { + id String @id @default(cuid()) + } + "#; + + api.schema_push_w_datasource_multi_file(&[("a.prisma", schema_a), ("b.prisma", schema_b)]) + .send() + .assert_green(); + + api.query( + Insert::single_into(api.render_table_name("Cat")) + .value("id", "test") + .into(), + ); + api.query( + Insert::single_into(api.render_table_name("Dog")) + .value("id", "test") + .into(), + ); + + let warn = format!( + "You are about to drop the `{}` table, which is not empty (1 rows).", + api.normalize_identifier("Dog") + ); + + api.schema_push_w_datasource_multi_file(&[("a.prisma", schema_a)]) + .send() + .assert_warnings(&[warn.into()]) + .assert_no_steps(); +} + #[test_connector] fn dropping_a_column_with_non_null_values_should_warn(api: TestApi) { let dm = r#" diff --git a/schema-engine/sql-migration-tests/tests/initialization/mod.rs b/schema-engine/sql-migration-tests/tests/initialization/mod.rs index 81df714f445e..b064d79c5845 100644 --- a/schema-engine/sql-migration-tests/tests/initialization/mod.rs +++ b/schema-engine/sql-migration-tests/tests/initialization/mod.rs @@ -1,4 +1,4 @@ -use schema_core::schema_api; +use schema_core::{json_rpc::types::SchemasContainer, schema_api}; use sql_migration_tests::{multi_engine_test_api::*, test_api::SchemaContainer}; use test_macros::test_connector; use url::Url; @@ -58,7 +58,12 @@ fn connecting_to_a_postgres_database_with_missing_schema_creates_it(api: TestApi let me = schema_api(Some(schema.clone()), None).unwrap(); tok( me.ensure_connection_validity(schema_core::json_rpc::types::EnsureConnectionValidityParams { - datasource: schema_core::json_rpc::types::DatasourceParam::SchemaString(SchemaContainer { schema }), + datasource: schema_core::json_rpc::types::DatasourceParam::Schema(SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }), }), ) .unwrap(); @@ -104,7 +109,12 @@ fn ipv6_addresses_are_supported_in_connection_strings(api: TestApi) { let engine = schema_api(Some(schema.clone()), None).unwrap(); tok( engine.ensure_connection_validity(schema_core::json_rpc::types::EnsureConnectionValidityParams { - datasource: schema_core::json_rpc::types::DatasourceParam::SchemaString(SchemaContainer { schema }), + datasource: schema_core::json_rpc::types::DatasourceParam::Schema(SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }), }), ) .unwrap(); diff --git a/schema-engine/sql-migration-tests/tests/introspection/mod.rs b/schema-engine/sql-migration-tests/tests/introspection/mod.rs index cd87e77be542..e23b0abe2a00 100644 --- a/schema-engine/sql-migration-tests/tests/introspection/mod.rs +++ b/schema-engine/sql-migration-tests/tests/introspection/mod.rs @@ -1,6 +1,7 @@ use expect_test::expect; use quaint::connector::rusqlite; -use schema_core::json_rpc::types::IntrospectParams; +use schema_core::json_rpc::types::{IntrospectParams, SchemasContainer}; +use sql_migration_tests::test_api::SchemaContainer; use test_setup::runtime::run_with_thread_local_runtime as tok; #[test] @@ -30,10 +31,15 @@ fn introspect_force_with_invalid_schema() { let api = schema_core::schema_api(Some(schema.clone()), None).unwrap(); let params = IntrospectParams { - schema, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }, force: true, composite_type_depth: 0, - schemas: None, + namespaces: None, }; let result = &tok(api.introspect(params)) @@ -85,10 +91,15 @@ fn introspect_no_force_with_invalid_schema() { let api = schema_core::schema_api(Some(schema.clone()), None).unwrap(); let params = IntrospectParams { - schema, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }, force: false, composite_type_depth: 0, - schemas: None, + namespaces: None, }; let ufe = tok(api.introspect(params)).unwrap_err().to_user_facing(); diff --git a/schema-engine/sql-migration-tests/tests/migrations/cockroachdb.rs b/schema-engine/sql-migration-tests/tests/migrations/cockroachdb.rs index 5612e04cb689..dc8778f0138b 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/cockroachdb.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/cockroachdb.rs @@ -4,7 +4,7 @@ mod failure_modes; use prisma_value::PrismaValue; use psl::parser_database::*; use quaint::prelude::Insert; -use schema_core::schema_connector::DiffTarget; +use schema_core::{json_rpc::types::SchemasContainer, schema_connector::DiffTarget}; use serde_json::json; use sql_migration_tests::test_api::*; use sql_schema_describer::{ColumnTypeFamily, ForeignKeyAction}; @@ -30,7 +30,12 @@ fn db_push_on_cockroach_db_with_postgres_provider_fails(api: TestApi) { let connector = schema_core::schema_api(Some(schema.clone()), None).unwrap(); let error = tok(connector.schema_push(schema_core::json_rpc::types::SchemaPushInput { force: false, - schema: schema.clone(), + schema: schema_core::json_rpc::types::SchemasContainer { + files: vec![schema_core::json_rpc::types::SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }, })) .unwrap_err() .message() @@ -437,8 +442,11 @@ fn connecting_to_a_cockroachdb_database_with_the_postgresql_connector_fails(_api let engine = schema_core::schema_api(None, None).unwrap(); let err = tok( engine.ensure_connection_validity(schema_core::json_rpc::types::EnsureConnectionValidityParams { - datasource: schema_core::json_rpc::types::DatasourceParam::SchemaString(SchemaContainer { - schema: dm.to_owned(), + datasource: schema_core::json_rpc::types::DatasourceParam::Schema(SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: dm.to_owned(), + }], }), }), ) @@ -1333,8 +1341,8 @@ fn alter_type_works(api: TestApi) { "#; let migration = api.connector_diff( - DiffTarget::Datamodel(schema.into()), - DiffTarget::Datamodel(to_schema.into()), + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), schema.into())]), + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), to_schema.into())]), None, ); @@ -1404,7 +1412,10 @@ fn schema_from_introspection_docs_works(api: TestApi) { let migration = api.connector_diff( DiffTarget::Database, - DiffTarget::Datamodel(SourceFile::new_static(introspected_schema)), + DiffTarget::Datamodel(vec![( + "schema.prisma".to_string(), + SourceFile::new_static(introspected_schema), + )]), None, ); @@ -1461,8 +1472,13 @@ fn cockroach_introspection_with_postgres_provider_fails() { let error = tok(me.introspect(schema_core::json_rpc::types::IntrospectParams { composite_type_depth: -1, force: false, - schema, - schemas: None, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }, + namespaces: None, })) .unwrap_err() .message() diff --git a/schema-engine/sql-migration-tests/tests/migrations/db_execute.rs b/schema-engine/sql-migration-tests/tests/migrations/db_execute.rs index a74fd4a2cbf5..d635ee445c82 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/db_execute.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/db_execute.rs @@ -1,5 +1,8 @@ use quaint::{prelude::Queryable, single::Quaint}; +use schema_core::json_rpc::types::SchemasWithConfigDir; use sql_migration_tests::test_api::*; +use sql_migration_tests::utils::to_schema_containers; +use sql_migration_tests::*; #[test] fn db_execute_input_source_takes_expected_json_shape() { @@ -61,7 +64,7 @@ fn db_execute_happy_path_with_prisma_schema() { url.replace('\\', "\\\\") ); let schema_path = tmpdir.path().join("schema.prisma"); - std::fs::write(&schema_path, prisma_schema).unwrap(); + std::fs::write(&schema_path, prisma_schema.clone()).unwrap(); let script = r#" CREATE TABLE "dogs" ( id INTEGER PRIMARY KEY, name TEXT ); INSERT INTO "dogs" ("name") VALUES ('snoopy'), ('marmaduke'); @@ -70,8 +73,12 @@ fn db_execute_happy_path_with_prisma_schema() { // Execute the command. let generic_api = schema_core::schema_api(None, None).unwrap(); tok(generic_api.db_execute(DbExecuteParams { - datasource_type: DbExecuteDatasourceType::Schema(SchemaContainer { - schema: schema_path.to_string_lossy().into_owned(), + datasource_type: DbExecuteDatasourceType::Schema(SchemasWithConfigDir { + files: vec![SchemaContainer { + path: schema_path.to_string_lossy().into_owned(), + content: prisma_schema.to_string(), + }], + config_dir: schema_path.parent().unwrap().to_string_lossy().into_owned(), }), script: script.to_owned(), })) @@ -166,8 +173,12 @@ fn sqlite_db_execute_with_schema_datasource_resolves_relative_paths_correctly() let api = schema_core::schema_api(None, None).unwrap(); tok(api.db_execute(DbExecuteParams { - datasource_type: DbExecuteDatasourceType::Schema(SchemaContainer { - schema: schema_path.to_str().unwrap().to_owned(), + datasource_type: DbExecuteDatasourceType::Schema(SchemasWithConfigDir { + files: vec![SchemaContainer { + path: schema_path.to_str().unwrap().to_owned(), + content: schema.to_owned(), + }], + config_dir: schema_path.parent().unwrap().to_string_lossy().into_owned(), }), script: "CREATE TABLE dog ( id INTEGER PRIMARY KEY )".to_owned(), })) @@ -175,3 +186,44 @@ fn sqlite_db_execute_with_schema_datasource_resolves_relative_paths_correctly() assert!(expected_sqlite_path.exists()); } + +#[test] +fn db_execute_multi_file() { + let (tmpdir, files) = write_multi_file! { + "a.prisma" => r#" + datasource dbtest { + provider = "sqlite" + url = "file:db1.sqlite" + } + "#, + "b.prisma" => r#" + model dogs { + id Int @id + } + "#, + }; + + let url = format!("file:{}/db1.sqlite", tmpdir.path().to_string_lossy()); + let script = r#" + CREATE TABLE "dogs" ( id INTEGER PRIMARY KEY, name TEXT ); + INSERT INTO "dogs" ("name") VALUES ('snoopy'), ('marmaduke'); + "#; + + // Execute the command. + let generic_api = schema_core::schema_api(None, None).unwrap(); + tok(generic_api.db_execute(DbExecuteParams { + datasource_type: DbExecuteDatasourceType::Schema(SchemasWithConfigDir { + files: to_schema_containers(&files), + config_dir: tmpdir.path().to_string_lossy().into_owned(), + }), + script: script.to_owned(), + })) + .unwrap(); + + // Check that the command was executed + let q = tok(quaint::single::Quaint::new(&url)).unwrap(); + let result = tok(q.query_raw("SELECT name FROM dogs;", &[])).unwrap(); + let mut rows = result.into_iter(); + assert_eq!(rows.next().unwrap()[0].to_string().unwrap(), "snoopy"); + assert_eq!(rows.next().unwrap()[0].to_string().unwrap(), "marmaduke"); +} diff --git a/schema-engine/sql-migration-tests/tests/migrations/diff.rs b/schema-engine/sql-migration-tests/tests/migrations/diff.rs index bf48f4faa34c..0eadac39657e 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/diff.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/diff.rs @@ -1,10 +1,10 @@ use quaint::{prelude::Queryable, single::Quaint}; use schema_core::{ commands::diff, - json_rpc::types::{DiffTarget, PathContainer}, + json_rpc::types::{DiffTarget, PathContainer, SchemasContainer, SchemasWithConfigDir}, schema_connector::SchemaConnector, }; -use sql_migration_tests::test_api::*; +use sql_migration_tests::{test_api::*, utils::to_schema_containers}; use std::sync::Arc; #[test_connector(tags(Sqlite))] @@ -16,7 +16,7 @@ fn diffing_postgres_schemas_when_initialized_on_sqlite(mut api: TestApi) { api.connector.set_host(host.clone()); - let from = r#" + let from_schema = r#" datasource db { provider = "postgresql" url = "postgresql://example.com/test" @@ -28,9 +28,9 @@ fn diffing_postgres_schemas_when_initialized_on_sqlite(mut api: TestApi) { } "#; - let from_file = write_file_to_tmp(from, &tempdir, "from"); + let from_file = write_file_to_tmp(from_schema, &tempdir, "from"); - let to = r#" + let to_schema = r#" datasource db { provider = "postgresql" url = "postgresql://example.com/test" @@ -46,16 +46,22 @@ fn diffing_postgres_schemas_when_initialized_on_sqlite(mut api: TestApi) { } "#; - let to_file = write_file_to_tmp(to, &tempdir, "to"); + let to_file = write_file_to_tmp(to_schema, &tempdir, "to"); api.diff(DiffParams { exit_code: None, - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: from_file.to_string_lossy().into_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: from_file.to_string_lossy().into_owned(), + content: from_schema.to_string(), + }], }), shadow_database_url: None, - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: to_file.to_string_lossy().into_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: to_file.to_string_lossy().into_owned(), + content: to_schema.to_string(), + }], }), script: true, }) @@ -63,12 +69,18 @@ fn diffing_postgres_schemas_when_initialized_on_sqlite(mut api: TestApi) { api.diff(DiffParams { exit_code: None, - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: from_file.to_string_lossy().into_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: from_file.to_string_lossy().into_owned(), + content: from_schema.to_string(), + }], }), shadow_database_url: None, - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: to_file.to_string_lossy().into_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: to_file.to_string_lossy().into_owned(), + content: to_schema.to_string(), + }], }), script: false, }) @@ -192,8 +204,11 @@ fn from_schema_datamodel_to_url(mut api: TestApi) { let input = DiffParams { exit_code: None, - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: schema_path.to_string_lossy().into_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: schema_path.to_string_lossy().into_owned(), + content: first_schema.to_string(), + }], }), script: true, shadow_database_url: None, @@ -240,8 +255,12 @@ fn from_schema_datasource_relative(mut api: TestApi) { let params = DiffParams { exit_code: None, - from: DiffTarget::SchemaDatasource(SchemaContainer { - schema: schema_path.to_string_lossy().into_owned(), + from: DiffTarget::SchemaDatasource(SchemasWithConfigDir { + files: vec![SchemaContainer { + path: schema_path.to_string_lossy().into_owned(), + content: schema.to_string(), + }], + config_dir: schema_path.parent().unwrap().to_string_lossy().into_owned(), }), script: true, shadow_database_url: None, @@ -296,8 +315,12 @@ fn from_schema_datasource_to_url(mut api: TestApi) { let input = DiffParams { exit_code: None, - from: DiffTarget::SchemaDatasource(SchemaContainer { - schema: schema_path.to_string_lossy().into_owned(), + from: DiffTarget::SchemaDatasource(SchemasWithConfigDir { + files: vec![SchemaContainer { + path: schema_path.to_string_lossy().into_owned(), + content: schema_content.to_string(), + }], + config_dir: schema_path.parent().unwrap().to_string_lossy().into_owned(), }), script: true, shadow_database_url: None, @@ -396,12 +419,18 @@ fn diffing_mongo_schemas_to_script_returns_a_nice_error() { let params = DiffParams { exit_code: None, - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: from_file.to_string_lossy().into_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: from_file.to_string_lossy().into_owned(), + content: from.to_string(), + }], }), shadow_database_url: None, - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: to_file.to_string_lossy().into_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: to_file.to_string_lossy().into_owned(), + content: to.to_string(), + }], }), script: true, }; @@ -480,12 +509,18 @@ fn diffing_mongo_schemas_works() { let params = DiffParams { exit_code: None, - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: from_file.to_string_lossy().into_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: from_file.to_string_lossy().into_owned(), + content: from.to_string(), + }], }), shadow_database_url: None, - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: to_file.to_string_lossy().into_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: to_file.to_string_lossy().into_owned(), + content: to.to_string(), + }], }), script: false, }; @@ -498,48 +533,6 @@ fn diffing_mongo_schemas_works() { expected_printed_messages.assert_eq(&diff_output(params)); } -#[test] -fn with_missing_prisma_schema_should_return_helpful_error() { - // We are counting on this path not existing. - let tmp_path = std::env::temp_dir().join("prisma_migrate_diff_test_this_file_does_not_exist"); - let tmp_path_str = tmp_path.to_str().unwrap(); - - // We want to test for both --schema-datamodel and --schema-datasource - let test_with_from_target = |from_target: DiffTarget| { - let params = DiffParams { - exit_code: None, - from: from_target, - script: false, - shadow_database_url: None, - to: DiffTarget::Empty, - }; - - let error = diff_error(params); - assert!(error.match_indices(tmp_path_str).next().is_some()); - - let expected = if cfg!(windows) { - expect![[r#" - Error trying to read Prisma schema file at ``. - The system cannot find the file specified. (os error 2) - "#]] - } else { - expect![[r#" - Error trying to read Prisma schema file at ``. - No such file or directory (os error 2) - "#]] - }; - - expected.assert_eq(&error.replace(tmp_path_str, "")); - }; - - test_with_from_target(DiffTarget::SchemaDatamodel(SchemaContainer { - schema: tmp_path_str.to_owned(), - })); - test_with_from_target(DiffTarget::SchemaDatasource(SchemaContainer { - schema: tmp_path_str.to_owned(), - })); -} - #[test] fn diffing_two_schema_datamodels_with_missing_datasource_env_vars() { for provider in ["sqlite", "postgresql", "postgres", "mysql", "sqlserver"] { @@ -566,8 +559,8 @@ fn diffing_two_schema_datamodels_with_missing_datasource_env_vars() { ); let tmpdir = tempfile::tempdir().unwrap(); - let schema_a = write_file_to_tmp(&schema_a, &tmpdir, "schema_a"); - let schema_b = write_file_to_tmp(&schema_b, &tmpdir, "schema_b"); + let schema_a_path = write_file_to_tmp(&schema_a, &tmpdir, "schema_a"); + let schema_b_path = write_file_to_tmp(&schema_b, &tmpdir, "schema_b"); let expected = expect![[r#" @@ -576,13 +569,19 @@ fn diffing_two_schema_datamodels_with_missing_datasource_env_vars() { "#]]; expected.assert_eq(&diff_output(DiffParams { exit_code: None, - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: schema_a.to_str().unwrap().to_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: schema_a_path.to_str().unwrap().to_owned(), + content: schema_a.to_string(), + }], }), script: false, shadow_database_url: None, - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: schema_b.to_str().unwrap().to_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: schema_b_path.to_str().unwrap().to_owned(), + content: schema_b.to_string(), + }], }), })) } @@ -607,11 +606,17 @@ fn diff_with_exit_code_and_empty_diff_returns_zero() { let (result, diff) = diff_result(DiffParams { exit_code: Some(true), - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: path.to_str().unwrap().to_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: path.to_str().unwrap().to_owned(), + content: schema.to_string(), + }], }), - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: path.to_str().unwrap().to_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: path.to_str().unwrap().to_owned(), + content: schema.to_string(), + }], }), script: false, shadow_database_url: None, @@ -644,8 +649,11 @@ fn diff_with_exit_code_and_non_empty_diff_returns_two() { let (result, diff) = diff_result(DiffParams { exit_code: Some(true), from: DiffTarget::Empty, - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: path.to_str().unwrap().to_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: path.to_str().unwrap().to_owned(), + content: schema.to_string(), + }], }), script: false, shadow_database_url: None, @@ -705,8 +713,12 @@ fn diff_with_non_existing_sqlite_database_from_datasource() { from: DiffTarget::Empty, script: false, shadow_database_url: None, - to: DiffTarget::SchemaDatasource(SchemaContainer { - schema: schema_path.to_string_lossy().into_owned(), + to: DiffTarget::SchemaDatasource(SchemasWithConfigDir { + files: vec![SchemaContainer { + path: schema_path.to_string_lossy().into_owned(), + content: schema.to_string(), + }], + config_dir: schema_path.parent().unwrap().to_string_lossy().into_owned(), }), }); @@ -717,6 +729,141 @@ fn diff_with_non_existing_sqlite_database_from_datasource() { expected.assert_eq(&error); } +#[test_connector] +fn from_multi_file_schema_datasource_to_url(mut api: TestApi) { + let host = Arc::new(TestConnectorHost::default()); + api.connector.set_host(host.clone()); + + let base_dir = tempfile::TempDir::new().unwrap(); + let base_dir_str = base_dir.path().to_string_lossy(); + let first_url = format!("file:{base_dir_str}/first_db.sqlite"); + let second_url = format!("file:{base_dir_str}/second_db.sqlite"); + + tok(async { + let q = quaint::single::Quaint::new(&first_url).await.unwrap(); + q.raw_cmd("CREATE TABLE cows ( id INTEGER PRIMARY KEY, moos BOOLEAN DEFAULT true );") + .await + .unwrap(); + }); + + tok(async { + let q = quaint::single::Quaint::new(&second_url).await.unwrap(); + q.raw_cmd("CREATE TABLE cats ( id INTEGER PRIMARY KEY, meows BOOLEAN DEFAULT true );") + .await + .unwrap(); + }); + + let schema_a = format!( + r#" + datasource db {{ + provider = "sqlite" + url = "{}" + }} + "#, + first_url.replace('\\', "\\\\") + ); + let schema_a_path = write_file_to_tmp(&schema_a, &base_dir, "a.prisma"); + + let schema_b = r#" + model cats { + id Int @id + meows Boolean + } + "#; + let schema_b_path = write_file_to_tmp(schema_b, &base_dir, "b.prisma"); + + let files = to_schema_containers(&[ + (schema_a_path.to_string_lossy().into_owned(), &schema_a), + (schema_b_path.to_string_lossy().into_owned(), schema_b), + ]); + + let input = DiffParams { + exit_code: None, + from: DiffTarget::SchemaDatasource(SchemasWithConfigDir { + files, + config_dir: base_dir.path().to_string_lossy().into_owned(), + }), + script: true, + shadow_database_url: None, + to: DiffTarget::Url(UrlContainer { url: second_url }), + }; + + api.diff(input).unwrap(); + + let expected_printed_messages = expect![[r#" + [ + "-- DropTable\nPRAGMA foreign_keys=off;\nDROP TABLE \"cows\";\nPRAGMA foreign_keys=on;\n\n-- CreateTable\nCREATE TABLE \"cats\" (\n \"id\" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,\n \"meows\" BOOLEAN DEFAULT true\n);\n", + ] + "#]]; + expected_printed_messages.assert_debug_eq(&host.printed_messages.lock().unwrap()); +} + +#[test_connector] +fn from_multi_file_schema_datamodel_to_url(mut api: TestApi) { + let host = Arc::new(TestConnectorHost::default()); + api.connector.set_host(host.clone()); + + let base_dir = tempfile::TempDir::new().unwrap(); + let base_dir_str = base_dir.path().to_string_lossy(); + let first_url = format!("file:{base_dir_str}/first_db.sqlite"); + let second_url = format!("file:{base_dir_str}/second_db.sqlite"); + + tok(async { + let q = quaint::single::Quaint::new(&second_url).await.unwrap(); + q.raw_cmd("CREATE TABLE cats ( id INTEGER PRIMARY KEY, meows BOOLEAN DEFAULT true );") + .await + .unwrap(); + }); + + let from_files = { + let schema_a = format!( + r#" + datasource db {{ + provider = "sqlite" + url = "{}" + }} + + model cows {{ + id Int @id + meows Boolean + }} + "#, + first_url.replace('\\', "\\\\") + ); + let schema_a_path = write_file_to_tmp(&schema_a, &base_dir, "a.prisma"); + + let schema_b = r#" + model dogs { + id Int @id + wouaf Boolean + } + "#; + let schema_b_path = write_file_to_tmp(schema_b, &base_dir, "b.prisma"); + + to_schema_containers(&[ + (schema_a_path.to_string_lossy().into_owned(), &schema_a), + (schema_b_path.to_string_lossy().into_owned(), schema_b), + ]) + }; + + let input = DiffParams { + exit_code: None, + from: DiffTarget::SchemaDatamodel(SchemasContainer { files: from_files }), + script: true, + shadow_database_url: None, + to: DiffTarget::Url(UrlContainer { url: second_url }), + }; + + api.diff(input).unwrap(); + + let expected_printed_messages = expect![[r#" + [ + "-- DropTable\nPRAGMA foreign_keys=off;\nDROP TABLE \"cows\";\nPRAGMA foreign_keys=on;\n\n-- DropTable\nPRAGMA foreign_keys=off;\nDROP TABLE \"dogs\";\nPRAGMA foreign_keys=on;\n\n-- CreateTable\nCREATE TABLE \"cats\" (\n \"id\" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,\n \"meows\" BOOLEAN DEFAULT true\n);\n", + ] + "#]]; + expected_printed_messages.assert_debug_eq(&host.printed_messages.lock().unwrap()); +} + // Call diff, and expect it to error. Return the error. pub(crate) fn diff_error(params: DiffParams) -> String { let api = schema_core::schema_api(None, None).unwrap(); diff --git a/schema-engine/sql-migration-tests/tests/migrations/drift_summary.rs b/schema-engine/sql-migration-tests/tests/migrations/drift_summary.rs index b9fe774f9b91..c485843649da 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/drift_summary.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/drift_summary.rs @@ -1,4 +1,5 @@ use expect_test::{expect, Expect}; +use schema_core::json_rpc::types::SchemasContainer; use sql_migration_tests::test_api::*; use std::sync::Arc; @@ -6,15 +7,22 @@ fn check(from: &str, to: &str, expectation: Expect) { let tmpdir = tempfile::tempdir().unwrap(); let from_schema = write_file_to_tmp(from, &tmpdir, "from.prisma"); let to_schema = write_file_to_tmp(to, &tmpdir, "to.prisma"); + let params = DiffParams { exit_code: None, - from: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(SchemaContainer { - schema: from_schema.to_str().unwrap().to_owned(), + from: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: from_schema.to_str().unwrap().to_owned(), + content: from.to_string(), + }], }), script: false, shadow_database_url: None, - to: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(SchemaContainer { - schema: to_schema.to_str().unwrap().to_owned(), + to: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: to_schema.to_str().unwrap().to_owned(), + content: to.to_string(), + }], }), }; diff --git a/schema-engine/sql-migration-tests/tests/migrations/mssql.rs b/schema-engine/sql-migration-tests/tests/migrations/mssql.rs index 8375626a7d6f..309e82dfadad 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/mssql.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/mssql.rs @@ -170,7 +170,10 @@ fn foreign_key_renaming_to_default_works(api: TestApi) { let migration = api.connector_diff( DiffTarget::Database, - DiffTarget::Datamodel(SourceFile::new_static(target_schema)), + DiffTarget::Datamodel(vec![( + "schema.prisma".to_string(), + SourceFile::new_static(target_schema), + )]), None, ); let expected = expect![[r#" diff --git a/schema-engine/sql-migration-tests/tests/migrations/mssql/multi_schema.rs b/schema-engine/sql-migration-tests/tests/migrations/mssql/multi_schema.rs index 96d20dc922e3..08194e8fa01e 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/mssql/multi_schema.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/mssql/multi_schema.rs @@ -3,9 +3,8 @@ use connection_string::JdbcString; use indoc::{formatdoc, indoc}; use psl::PreviewFeature; use schema_core::{ - commands::apply_migrations, - commands::create_migration, - json_rpc::types::{ApplyMigrationsInput, CreateMigrationInput}, + commands::{apply_migrations, create_migration}, + json_rpc::types::{ApplyMigrationsInput, CreateMigrationInput, SchemasContainer}, schema_connector::{ConnectorParams, SchemaConnector}, }; use sql_migration_tests::test_api::*; @@ -1219,7 +1218,12 @@ async fn migration_with_shadow_database() { let migration = CreateMigrationInput { migrations_directory_path: migrations_directory.path().to_str().unwrap().to_owned(), - prisma_schema: dm.clone(), + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: dm.clone(), + }], + }, draft: false, migration_name: "init".to_string(), }; diff --git a/schema-engine/sql-migration-tests/tests/migrations/mysql.rs b/schema-engine/sql-migration-tests/tests/migrations/mysql.rs index 1e5501a93d45..1add9849542a 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/mysql.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/mysql.rs @@ -470,11 +470,17 @@ fn dropping_m2m_relation_from_datamodel_works() { let (_result, diff) = super::diff::diff_result(DiffParams { exit_code: None, - from: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: path.to_str().unwrap().to_owned(), + from: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: path.to_str().unwrap().to_owned(), + content: schema.to_string(), + }], }), - to: DiffTarget::SchemaDatamodel(SchemaContainer { - schema: path2.to_str().unwrap().to_owned(), + to: DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![SchemaContainer { + path: path2.to_str().unwrap().to_owned(), + content: schema2.to_string(), + }], }), script: true, shadow_database_url: None, diff --git a/schema-engine/sql-migration-tests/tests/migrations/postgres.rs b/schema-engine/sql-migration-tests/tests/migrations/postgres.rs index a31454b8058b..7aa222fc64bf 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/postgres.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/postgres.rs @@ -4,7 +4,7 @@ mod multi_schema; use psl::parser_database::SourceFile; use quaint::Value; -use schema_core::schema_connector::DiffTarget; +use schema_core::{json_rpc::types::SchemasContainer, schema_connector::DiffTarget}; use sql_migration_tests::test_api::*; use std::fmt::Write; @@ -384,7 +384,10 @@ fn foreign_key_renaming_to_default_works(api: TestApi) { let migration = api.connector_diff( DiffTarget::Database, - DiffTarget::Datamodel(SourceFile::new_static(target_schema)), + DiffTarget::Datamodel(vec![( + "schema.prisma".to_string(), + SourceFile::new_static(target_schema), + )]), None, ); let expected = expect![[r#" @@ -482,8 +485,11 @@ fn connecting_to_a_postgres_database_with_the_cockroach_connector_fails(_api: Te let engine = schema_core::schema_api(None, None).unwrap(); let err = tok( engine.ensure_connection_validity(schema_core::json_rpc::types::EnsureConnectionValidityParams { - datasource: schema_core::json_rpc::types::DatasourceParam::SchemaString(SchemaContainer { - schema: dm.to_owned(), + datasource: schema_core::json_rpc::types::DatasourceParam::Schema(SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: dm.to_owned(), + }], }), }), ) @@ -617,8 +623,8 @@ fn scalar_list_default_diffing(api: TestApi) { "#; let migration = api.connector_diff( - DiffTarget::Datamodel(SourceFile::new_static(schema_1)), - DiffTarget::Datamodel(SourceFile::new_static(schema_2)), + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), SourceFile::new_static(schema_1))]), + DiffTarget::Datamodel(vec![("schema.prisma".to_string(), SourceFile::new_static(schema_2))]), None, ); diff --git a/schema-engine/sql-migration-tests/tests/migrations/postgres/introspection.rs b/schema-engine/sql-migration-tests/tests/migrations/postgres/introspection.rs index fb77c35f44c9..4d4c71110bfa 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/postgres/introspection.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/postgres/introspection.rs @@ -1,3 +1,4 @@ +use schema_core::json_rpc::types::SchemasContainer; use sql_migration_tests::test_api::*; #[test] @@ -53,8 +54,13 @@ ALTER TABLE blocks let result = tok(me.introspect(schema_core::json_rpc::types::IntrospectParams { composite_type_depth: -1, force: false, - schema, - schemas: None, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }, + namespaces: None, })) .unwrap(); @@ -123,8 +129,13 @@ CREATE TABLE capitals ( let result = tok(me.introspect(schema_core::json_rpc::types::IntrospectParams { composite_type_depth: -1, force: false, - schema, - schemas: None, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }, + namespaces: None, })) .unwrap(); @@ -194,8 +205,13 @@ CREATE TABLE capitals ( let result = tok(me.introspect(schema_core::json_rpc::types::IntrospectParams { composite_type_depth: -1, force: false, - schema, - schemas: None, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: schema, + }], + }, + namespaces: None, })) .unwrap(); diff --git a/schema-engine/sql-migration-tests/tests/migrations/postgres/multi_schema.rs b/schema-engine/sql-migration-tests/tests/migrations/postgres/multi_schema.rs index 628985ec5443..d255f93967b3 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/postgres/multi_schema.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/postgres/multi_schema.rs @@ -1,9 +1,8 @@ use indoc::{formatdoc, indoc}; use psl::PreviewFeature; use schema_core::{ - commands::apply_migrations, - commands::create_migration, - json_rpc::types::{ApplyMigrationsInput, CreateMigrationInput}, + commands::{apply_migrations, create_migration}, + json_rpc::types::{ApplyMigrationsInput, CreateMigrationInput, SchemasContainer}, schema_connector::{ConnectorParams, SchemaConnector}, }; use sql_schema_connector::SqlSchemaConnector; @@ -1453,7 +1452,12 @@ async fn migration_with_shadow_database() { let migration = CreateMigrationInput { migrations_directory_path: migrations_directory.path().to_str().unwrap().to_owned(), - prisma_schema: dm.clone(), + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: dm.clone(), + }], + }, draft: false, migration_name: "init".to_string(), }; diff --git a/schema-engine/sql-migration-tests/tests/migrations/sqlite.rs b/schema-engine/sql-migration-tests/tests/migrations/sqlite.rs index b028e7737973..cda2cb38d3d4 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/sqlite.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/sqlite.rs @@ -1,4 +1,5 @@ use quaint::prelude::Insert; +use schema_core::json_rpc::types::SchemasContainer; use sql_migration_tests::test_api::*; #[test_connector(tags(Sqlite))] @@ -202,8 +203,13 @@ fn introspecting_a_non_existing_db_fails() { let err = tok(api.introspect(schema_core::json_rpc::types::IntrospectParams { composite_type_depth: -1, force: false, - schema: dm.to_owned(), - schemas: None, + schema: SchemasContainer { + files: vec![SchemaContainer { + path: "schema.prisma".to_string(), + content: dm.to_string(), + }], + }, + namespaces: None, })) .unwrap_err(); diff --git a/schema-engine/sql-migration-tests/tests/single_migration_tests.rs b/schema-engine/sql-migration-tests/tests/single_migration_tests.rs index 4bae11634344..d7b16af5915e 100644 --- a/schema-engine/sql-migration-tests/tests/single_migration_tests.rs +++ b/schema-engine/sql-migration-tests/tests/single_migration_tests.rs @@ -1,4 +1,7 @@ -use schema_core::schema_connector::{ConnectorParams, SchemaConnector}; +use schema_core::{ + json_rpc::types::SchemasContainer, + schema_connector::{ConnectorParams, SchemaConnector}, +}; use sql_migration_tests::test_api::*; use sql_schema_connector::SqlSchemaConnector; use std::{fs, io::Write as _, path, sync::Arc}; @@ -104,8 +107,11 @@ fn run_single_migration_test(test_file_path: &str, test_function_name: &'static script: true, shadow_database_url: None, from: schema_core::json_rpc::types::DiffTarget::Empty, - to: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(schema_core::json_rpc::types::SchemaContainer { - schema: file_path.to_str().unwrap().to_owned(), + to: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![schema_core::json_rpc::types::SchemaContainer { + path: file_path.to_str().unwrap().to_owned(), + content: text.to_string(), + }], }), })) .unwrap(); @@ -129,8 +135,11 @@ fn run_single_migration_test(test_file_path: &str, test_function_name: &'static from: schema_core::json_rpc::types::DiffTarget::Url(schema_core::json_rpc::types::UrlContainer { url: connection_string, }), - to: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(schema_core::json_rpc::types::SchemaContainer { - schema: file_path.to_str().unwrap().to_owned(), + to: schema_core::json_rpc::types::DiffTarget::SchemaDatamodel(SchemasContainer { + files: vec![schema_core::json_rpc::types::SchemaContainer { + path: file_path.to_str().unwrap().to_owned(), + content: text.to_string(), + }], }), })) .unwrap();