Skip to content

Commit

Permalink
translation tests
Browse files Browse the repository at this point in the history
  • Loading branch information
pranshi06 committed Sep 19, 2024
1 parent e59519b commit bb80198
Show file tree
Hide file tree
Showing 44 changed files with 1,136 additions and 542 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion crates/configuration/src/config2.sql
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ WITH column_data AS (
c.column_name,
TO_JSON_STRING(STRUCT(
c.column_name AS name,
JSON_OBJECT('ScalarType',
JSON_OBJECT('scalarType',
case LOWER(c.data_type)
when 'bool' then 'boolean'
when 'boolean' then 'boolean'
Expand Down
3 changes: 2 additions & 1 deletion crates/configuration/src/version1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ const NOT_APPROX_COUNTABLE: [&str; 4] = ["image", "sql_variant", "ntext", "text"
/// Initial configuration, just enough to connect to a database and elaborate a full
/// 'Configuration'.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct ParsedConfiguration {
// Which version of the configuration format are we using
pub version: u32,
Expand Down Expand Up @@ -238,7 +239,7 @@ pub async fn configure(

/// Parse the configuration format from a directory.
pub async fn parse_configuration(
configuration_dir: impl AsRef<Path>,
configuration_dir: impl AsRef<Path> + Send,
) -> Result<ParsedConfiguration, ParseConfigurationError> {
let configuration_file = configuration_dir.as_ref().join(CONFIGURATION_FILENAME);

Expand Down
1 change: 1 addition & 0 deletions crates/query-engine/metadata/src/metadata/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ pub struct ScalarTypeTypeName(pub String);

/// The type of values that a column, field, or argument may take.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "camelCase")]
pub enum Type {
ScalarType(models::ScalarTypeName),
CompositeType(models::TypeName),
Expand Down
1 change: 1 addition & 0 deletions crates/query-engine/metadata/src/metadata/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use serde::{Deserialize, Serialize};

/// Metadata information.
#[derive(Clone, PartialEq, Eq, Debug, Default, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
pub tables: TablesInfo,
// pub composite_types: CompositeTypes,
Expand Down
2 changes: 2 additions & 0 deletions crates/query-engine/translation/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ ndc-models = { workspace = true }

query-engine-metadata = { path = "../metadata" }
query-engine-sql = { path = "../sql" }
ndc-bigquery-configuration = { path = "../../../crates/configuration" }

enum-iterator = { workspace = true }
indexmap = { workspace = true }
Expand All @@ -18,6 +19,7 @@ serde_json = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
anyhow = { workspace = true }
tokio = { workspace = true }

[dev-dependencies]
insta = { workspace = true, features = ["json"] }
Expand Down
52 changes: 37 additions & 15 deletions crates/query-engine/translation/tests/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,36 +2,58 @@ use std::fs;

use query_engine_sql::sql;
use query_engine_translation::translation;
use std::path::PathBuf;

/// Run a query against the server, get the result, and compare against the snapshot.
pub fn test_translation(testname: &str) -> anyhow::Result<String> {
let tables = serde_json::from_str(
fs::read_to_string(format!("tests/goldenfiles/{}/tables.json", testname))
.unwrap()
.as_str(),
)
.unwrap();
let request = serde_json::from_str(
fs::read_to_string(format!("tests/goldenfiles/{}/request.json", testname))
.unwrap()
.as_str(),
)
.unwrap();

let plan = translation::query::translate(&tables, request)?;
pub async fn test_translation(testname: &str) -> anyhow::Result<String> {
// let tables = serde_json::from_str(
// fs::read_to_string(format!("tests/goldenfiles/{}/tables.json", testname))
// .unwrap()
// .as_str(),
// )
// .unwrap();

let directory = PathBuf::from("tests/goldenfiles").join(testname);

let parsed_configuration = ndc_bigquery_configuration::parse_configuration(&directory).await?;
let configuration = ndc_bigquery_configuration::make_runtime_configuration(
parsed_configuration,
ndc_bigquery_configuration::environment::FixedEnvironment::from([
(
"HASURA_BIGQUERY_SERVICE_KEY".into(),
"the translation tests do not rely on a database connection".into(),
),
(
"HASURA_BIGQUERY_PROJECT_ID".into(),
"the translation tests do not rely on a database connection".into(),
),
(
"HASURA_BIGQUERY_DATASET_ID".into(),
"the translation tests do not rely on a database connection".into(),
),
]),
)?;
let metadata = configuration.metadata;

let request =
serde_json::from_str(&fs::read_to_string(directory.join("request.json")).unwrap()).unwrap();

let plan = translation::query::translate(&metadata, request)?;
let query = plan.query.query_sql();
let params: Vec<(usize, &sql::string::Param)> = query
.params
.iter()
.enumerate()
.map(|(i, p)| (i + 1, p))
.collect();
dbg!(&query);

let pretty = sqlformat::format(
&query.sql,
&sqlformat::QueryParams::None,
sqlformat::FormatOptions::default(),
);
dbg!(&pretty);

Ok(format!("{}\n\n{:?}", pretty, params))
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"version": 1,
"connectionSettings": {
"serviceKey": {
"variable": "HASURA_BIGQUERY_SERVICE_KEY"
},
"projectId": {
"variable": "HASURA_BIGQUERY_PROJECT_ID"
},
"datasetId": {
"variable": "HASURA_BIGQUERY_DATASET_ID"
}
},
"metadata": {
"tables": {
"albums": {
"schemaName": "test_project.test_dataset",
"tableName": "albums",
"columns": {
"AlbumId": {
"name": "AlbumId",
"type": {
"scalarType": "bigint"
},
"nullable": "nullable",
"description": null
},
"Title": {
"name": "Title",
"type": {
"scalarType": "string"
},
"nullable": "nullable",
"description": null
},
"ArtistId": {
"name": "ArtistId",
"type": {
"scalarType": "bigint"
},
"nullable": "nullable",
"description": null
}
},
"uniquenessConstraints": {},
"foreignRelations": {},
"description": null
}
},
"scalarTypes": {
"bigint": {
"typeName": "bigint",
"schemaName": "test_project.test_dataset",
"description": null,
"aggregateFunctions": {},
"comparisonOperators": {},
"typeRepresentation": null
},
"string": {
"typeName": "string",
"schemaName": "test_project.test_dataset",
"description": null,
"aggregateFunctions": {},
"comparisonOperators": {},
"typeRepresentation": null
}
},
"nativeOperations": {
"queries": {},
"mutations": {}
}
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"collection": "Album",
"collection": "albums",
"query": {
"fields": {
"Title": {
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"version": 1,
"connectionSettings": {
"serviceKey": {
"variable": "HASURA_BIGQUERY_SERVICE_KEY"
},
"projectId": {
"variable": "HASURA_BIGQUERY_PROJECT_ID"
},
"datasetId": {
"variable": "HASURA_BIGQUERY_DATASET_ID"
}
},
"metadata": {
"tables": {
"albums": {
"schemaName": "test_project.test_dataset",
"tableName": "albums",
"columns": {
"AlbumId": {
"name": "AlbumId",
"type": {
"scalarType": "bigint"
},
"nullable": "nullable",
"description": null
},
"Title": {
"name": "Title",
"type": {
"scalarType": "string"
},
"nullable": "nullable",
"description": null
},
"ArtistId": {
"name": "ArtistId",
"type": {
"scalarType": "bigint"
},
"nullable": "nullable",
"description": null
}
},
"uniquenessConstraints": {},
"foreignRelations": {},
"description": null
}
},
"scalarTypes": {
"bigint": {
"typeName": "bigint",
"schemaName": "test_project.test_dataset",
"description": null,
"aggregateFunctions": {},
"comparisonOperators": {},
"typeRepresentation": null
},
"string": {
"typeName": "string",
"schemaName": "test_project.test_dataset",
"description": null,
"aggregateFunctions": {},
"comparisonOperators": {},
"typeRepresentation": null
}
},
"nativeOperations": {
"queries": {},
"mutations": {}
}
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"collection": "Album",
"collection": "albums",
"query": {
"aggregates": {
"how_many_distinct_artist_ids": {
Expand Down

This file was deleted.

Loading

0 comments on commit bb80198

Please sign in to comment.