Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions crates/api-ui/src/databases/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -257,8 +257,8 @@ pub async fn update_database(
("offset" = Option<usize>, Query, description = "Databases offset"),
("limit" = Option<usize>, Query, description = "Databases limit"),
("search" = Option<String>, Query, description = "Databases search"),
("order_by" = Option<String>, Query, description = "Order by: database_name, volume_name, created_at (default), updated_at"),
("order_direction" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
("orderBy" = Option<String>, Query, description = "Order by: database_name, volume_name, created_at (default), updated_at"),
("orderDirection" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
),
tags = ["databases"],
path = "/ui/databases",
Expand Down
1 change: 1 addition & 0 deletions crates/api-ui/src/databases/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ use utoipa::ToSchema;
// }

#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Database {
pub name: String,
pub volume: String,
Expand Down
1 change: 1 addition & 0 deletions crates/api-ui/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ pub(crate) trait IntoStatusCode {
}

#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ErrorResponse {
pub message: String,
pub status_code: u16,
Expand Down
70 changes: 35 additions & 35 deletions crates/api-ui/src/queries/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use axum::{
use chrono::{DateTime, Utc};
use core_executor::models::{QueryContext, QueryResult};
use core_history::WorksheetId;
use datafusion::arrow::array::Array;
use datafusion::arrow::array::{Array, Int64Array, RecordBatch, StringArray};
use snafu::ResultExt;
use std::collections::HashMap;
use std::net::SocketAddr;
Expand Down Expand Up @@ -184,13 +184,13 @@ pub async fn get_query(
operation_id = "getQueries",
tags = ["queries"],
params(
("min_duration_ms" = Option<i64>, Query, description = "Minimal duration of queries in milliseconds"),
("worksheet_id" = Option<WorksheetId>, Query, description = "Worksheet id of the queries"),
("minDurationMs" = Option<i64>, Query, description = "Minimal duration of queries in milliseconds"),
("worksheetId" = Option<WorksheetId>, Query, description = "Worksheet id of the queries"),
("offset" = Option<usize>, Query, description = "Queries offset"),
("limit" = Option<usize>, Query, description = "Queries limit"),
("search" = Option<String>, Query, description = "Queries search"),
("order_by" = Option<String>, Query, description = "Order by: id, worksheet_id, result_count, status, start_time (default), end_time, duration_ms"),
("order_direction" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
("orderBy" = Option<String>, Query, description = "Order by: id, worksheet_id, result_count, status, start_time (default), end_time, duration_ms"),
("orderDirection" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
),
responses(
(status = 200, description = "Returns queries history", body = QueriesResponse),
Expand Down Expand Up @@ -240,36 +240,16 @@ pub async fn queries(
.context(QueriesSnafu)?;
let mut items = Vec::new();
for record in records {
let ids = downcast_int64_column(&record, "id")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let worksheet_ids = downcast_int64_column(&record, "worksheet_id")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let queries = downcast_string_column(&record, "query")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let start_times = downcast_string_column(&record, "start_time")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let end_times = downcast_string_column(&record, "end_time")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let duration_ms_values = downcast_int64_column(&record, "duration_ms")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let result_counts = downcast_int64_column(&record, "result_count")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let results = downcast_string_column(&record, "result")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let status = downcast_string_column(&record, "status")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let errors = downcast_string_column(&record, "error")
.context(ExecutionSnafu)
.context(QueriesSnafu)?;
let ids = w_downcast_int64_column(&record, "id")?;
let worksheet_ids = w_downcast_int64_column(&record, "worksheet_id")?;
let queries = w_downcast_string_column(&record, "query")?;
let start_times = w_downcast_string_column(&record, "start_time")?;
let end_times = w_downcast_string_column(&record, "end_time")?;
let duration_ms_values = w_downcast_int64_column(&record, "duration_ms")?;
let result_counts = w_downcast_int64_column(&record, "result_count")?;
let results = w_downcast_string_column(&record, "result")?;
let status = w_downcast_string_column(&record, "status")?;
let errors = w_downcast_string_column(&record, "error")?;
for i in 0..record.num_rows() {
items.push(QueryRecord {
id: ids.value(i),
Expand Down Expand Up @@ -310,3 +290,23 @@ pub async fn queries(
}
Ok(Json(QueriesResponse { items }))
}

#[allow(clippy::result_large_err)]
fn w_downcast_int64_column<'a>(
batch: &'a RecordBatch,
name: &str,
) -> std::result::Result<&'a Int64Array, crate::queries::error::Error> {
downcast_int64_column(batch, name)
.context(ExecutionSnafu)
.context(QueriesSnafu)
}

#[allow(clippy::result_large_err)]
fn w_downcast_string_column<'a>(
batch: &'a RecordBatch,
name: &str,
) -> std::result::Result<&'a StringArray, crate::queries::error::Error> {
downcast_string_column(batch, name)
.context(ExecutionSnafu)
.context(QueriesSnafu)
}
20 changes: 10 additions & 10 deletions crates/api-ui/src/schemas/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ pub struct ApiDoc;
operation_id = "createSchema",
tags = ["schemas"],
params(
("databaseName" = String, description = "Database Name")
("databaseName" = String, Path, description = "Database Name")
),
request_body = SchemaCreatePayload,
responses(
Expand Down Expand Up @@ -127,8 +127,8 @@ pub async fn create_schema(
operation_id = "deleteSchema",
tags = ["schemas"],
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name")
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name")
),
responses(
(status = 204, description = "Successful Response"),
Expand Down Expand Up @@ -167,8 +167,8 @@ pub async fn delete_schema(
get,
path = "/ui/databases/{databaseName}/schemas/{schemaName}",
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name")
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name")
),
operation_id = "getSchema",
tags = ["schemas"],
Expand Down Expand Up @@ -222,8 +222,8 @@ pub async fn get_schema(
path="/ui/databases/{databaseName}/schemas/{schemaName}",
tags = ["schemas"],
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name")
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name")
),
request_body = SchemaUpdatePayload,
responses(
Expand Down Expand Up @@ -265,12 +265,12 @@ pub async fn update_schema(
path="/ui/databases/{databaseName}/schemas",
tags = ["schemas"],
params(
("databaseName" = String, description = "Database Name"),
("databaseName" = String, Path, description = "Database Name"),
("offset" = Option<usize>, Query, description = "Schemas offset"),
("limit" = Option<u16>, Query, description = "Schemas limit"),
("search" = Option<String>, Query, description = "Schemas search"),
("order_by" = Option<String>, Query, description = "Order by: schema_name, database_name, created_at (default), updated_at"),
("order_direction" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
("orderBy" = Option<String>, Query, description = "Order by: schema_name, database_name, created_at (default), updated_at"),
("orderDirection" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
),
responses(
(status = 200, body = SchemasResponse),
Expand Down
1 change: 1 addition & 0 deletions crates/api-ui/src/schemas/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::convert::From;
use utoipa::ToSchema;

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct Schema {
pub name: String,
pub database: String,
Expand Down
32 changes: 16 additions & 16 deletions crates/api-ui/src/tables/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,9 @@ pub struct ApiDoc;
get,
path = "/ui/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}/statistics",
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name"),
("tableName" = String, description = "Table Name")
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name"),
("tableName" = String, Path, description = "Table Name")
),
operation_id = "getTableStatistics",
tags = ["tables"],
Expand Down Expand Up @@ -141,9 +141,9 @@ pub async fn get_table_statistics(
get,
path = "/ui/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}/columns",
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name"),
("tableName" = String, description = "Table Name")
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name"),
("tableName" = String, Path, description = "Table Name")
),
operation_id = "getTableColumns",
tags = ["tables"],
Expand Down Expand Up @@ -199,9 +199,9 @@ pub async fn get_table_columns(
get,
path = "/ui/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}/rows",
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name"),
("tableName" = String, description = "Table Name"),
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name"),
("tableName" = String, Path, description = "Table Name"),
("offset" = Option<u32>, Query, description = "Table preview offset"),
("limit" = Option<u16>, Query, description = "Table preview limit")
),
Expand Down Expand Up @@ -279,9 +279,9 @@ pub async fn get_table_preview_data(
operation_id = "uploadFile",
tags = ["tables"],
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name"),
("tableName" = String, description = "Table Name"),
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name"),
("tableName" = String, Path, description = "Table Name"),
("header" = Option<bool>, Query, example = json!(true), description = "Has header"),
("delimiter" = Option<u8>, Query, description = "an optional column delimiter, defaults to comma `','`"),
("escape" = Option<u8>, Query, description = "an escape character"),
Expand Down Expand Up @@ -371,13 +371,13 @@ pub async fn upload_file(
get,
path = "/ui/databases/{databaseName}/schemas/{schemaName}/tables",
params(
("databaseName" = String, description = "Database Name"),
("schemaName" = String, description = "Schema Name"),
("databaseName" = String, Path, description = "Database Name"),
("schemaName" = String, Path, description = "Schema Name"),
("offset" = Option<usize>, Query, description = "Tables offset"),
("limit" = Option<usize>, Query, description = "Tables limit"),
("search" = Option<String>, Query, description = "Tables search"),
("order_by" = Option<String>, Query, description = "Order by: table_name, schema_name, database_name, volume_name, table_type, table_format, owner, created_at (default), updated_at"),
("order_direction" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
("orderBy" = Option<String>, Query, description = "Order by: table_name, schema_name, database_name, volume_name, table_type, table_format, owner, created_at (default), updated_at"),
("orderDirection" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
),
operation_id = "getTables",
tags = ["tables"],
Expand Down
2 changes: 1 addition & 1 deletion crates/api-ui/src/tests/databases.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use http::Method;
#[tokio::test]
#[allow(clippy::too_many_lines)]
#[should_panic(
expected = "Failed to get error response: reqwest::Error { kind: Decode, source: Error(\"missing field `message`\", line: 1, column: 122) }"
expected = "Failed to get error response: reqwest::Error { kind: Decode, source: Error(\"missing field `message`\", line: 1, column: 120) }"
)]
async fn test_ui_databases_metastore_update_bug() {
let addr = run_test_server().await;
Expand Down
4 changes: 2 additions & 2 deletions crates/api-ui/src/volumes/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -253,8 +253,8 @@ pub async fn delete_volume(
("offset" = Option<usize>, Query, description = "Volumes offset"),
("limit" = Option<usize>, Query, description = "Volumes limit"),
("search" = Option<String>, Query, description = "Volumes search"),
("order_by" = Option<String>, Query, description = "Order by: volume_name, volume_type, created_at (default), updated_at"),
("order_direction" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
("orderBy" = Option<String>, Query, description = "Order by: volume_name, volume_type, created_at (default), updated_at"),
("orderDirection" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
),
tags = ["volumes"],
path = "/ui/volumes",
Expand Down
4 changes: 2 additions & 2 deletions crates/api-ui/src/worksheets/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ pub struct ApiDoc;
("offset" = Option<usize>, Query, description = "Worksheets offset"),
("limit" = Option<usize>, Query, description = "Worksheets limit"),
("search" = Option<String>, Query, description = "Worksheets search"),
("order_by" = Option<String>, Query, description = "Order by: id, name, content, created_at (default), updated_at"),
("order_direction" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
("orderBy" = Option<String>, Query, description = "Order by: id, name, content, created_at (default), updated_at"),
("orderDirection" = Option<OrderDirection>, Query, description = "Order direction: ASC, DESC (default)"),
),
responses(
(status = 200, description = "Get list of worksheets", body = WorksheetsResponse),
Expand Down