Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor!: ResourceId -> ast::ObjectName #448

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions crates/proof-of-sql-parser/src/sqlparser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,16 @@ impl From<ResourceId> for ObjectName {
}
}

/// Converts a dot-separated string into an `ObjectName`.
#[must_use]
pub fn object_name_from(s: &str) -> ObjectName {
ObjectName(
s.split('.')
.map(|part| Ident::new(part.to_string()))
.collect(),
)
}

impl From<TableExpression> for TableFactor {
fn from(table: TableExpression) -> Self {
match table {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,9 @@ impl<C: Commitment> ColumnCommitments<C> {
ColumnCommitmentMetadataMap::from_column_fields_with_max_bounds(columns);
let commitments = columns
.iter()
.map(|c| accessor.get_commitment(ColumnRef::new(table, c.name(), c.data_type())))
.map(|c| {
accessor.get_commitment(ColumnRef::new(table.clone(), c.name(), c.data_type()))
})
.collect();
ColumnCommitments {
commitments,
Expand Down
6 changes: 3 additions & 3 deletions crates/proof-of-sql/src/base/commitment/query_commitments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,11 @@ impl<C: Commitment> QueryCommitmentsExt<C> for QueryCommitments<C> {
.into_iter()
.map(|(table_ref, columns)| {
(
table_ref,
table_ref.clone(),
TableCommitment::from_accessor_with_max_bounds(
table_ref,
table_ref.clone(),
accessor
.lookup_schema(table_ref)
.lookup_schema(table_ref.clone())
.iter()
.filter_map(|c| {
columns.iter().find(|x| x.name() == c.0.clone()).cloned()
Expand Down
4 changes: 2 additions & 2 deletions crates/proof-of-sql/src/base/commitment/table_commitment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ impl<C: Commitment> TableCommitment<C> {
columns: &[ColumnField],
accessor: &impl CommitmentAccessor<C>,
) -> Self {
let length = accessor.get_length(table_ref);
let offset = accessor.get_offset(table_ref);
let length = accessor.get_length(table_ref.clone());
let offset = accessor.get_offset(table_ref.clone());
Self::try_new(
ColumnCommitments::from_accessor_with_max_bounds(table_ref, columns, accessor),
offset..offset + length,
Expand Down
10 changes: 5 additions & 5 deletions crates/proof-of-sql/src/base/database/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -491,16 +491,16 @@ impl Display for ColumnType {

/// Reference of a SQL column
#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize, Deserialize)]
pub struct ColumnRef {
pub struct ColumnRef<'a> {
column_id: Ident,
table_ref: TableRef,
table_ref: &'a TableRef,
column_type: ColumnType,
}

impl ColumnRef {
impl<'a> ColumnRef<'a> {
/// Create a new `ColumnRef` from a table, column identifier and column type
#[must_use]
pub fn new(table_ref: TableRef, column_id: Ident, column_type: ColumnType) -> Self {
pub fn new(table_ref: &'a TableRef, column_id: Ident, column_type: ColumnType) -> Self {
Self {
column_id,
table_ref,
Expand All @@ -511,7 +511,7 @@ impl ColumnRef {
/// Returns the table reference of this column
#[must_use]
pub fn table_ref(&self) -> TableRef {
self.table_ref
self.table_ref.clone()
}

/// Returns the column identifier of this column
Expand Down
36 changes: 22 additions & 14 deletions crates/proof-of-sql/src/base/database/table_ref.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,52 +4,60 @@ use core::{
fmt::{Display, Formatter},
str::FromStr,
};
use proof_of_sql_parser::{impl_serde_from_str, ResourceId};
use sqlparser::ast::Ident;
use proof_of_sql_parser::{impl_serde_from_str, sqlparser::object_name_from};
use sqlparser::ast::{Ident, ObjectName};

/// Expression for an SQL table
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TableRef {
resource_id: ResourceId,
object_name: ObjectName,
}

impl TableRef {
/// Creates a new table reference from a resource id
#[must_use]
pub fn new(resource_id: ResourceId) -> Self {
Self { resource_id }
pub fn new(object_name: ObjectName) -> Self {
Self {
object_name: object_name.clone(),
}
}

/// Returns the identifier of the schema
#[must_use]
pub fn schema_id(&self) -> Ident {
self.resource_id.schema().into()
pub fn schema_id(&self) -> Option<Ident> {
self.object_name.0.get(0).cloned()
}

/// Returns the identifier of the table
#[must_use]
pub fn table_id(&self) -> Ident {
self.resource_id.object_name().into()
pub fn table_id(&self) -> Option<Ident> {
self.object_name.0.get(1).cloned()
}

/// Returns the underlying resource id of the table
#[must_use]
pub fn resource_id(&self) -> ResourceId {
self.resource_id
pub fn object_name(&self) -> ObjectName {
self.object_name.clone()
}
}

impl FromStr for TableRef {
type Err = proof_of_sql_parser::ParseError;

fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self::new(s.parse()?))
Ok(Self::new(object_name_from(s)))
}
}

impl From<&str> for TableRef {
fn from(s: &str) -> Self {
TableRef::new(object_name_from(s))
}
}

impl Display for TableRef {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.resource_id.fmt(f)
self.object_name.clone().fmt(f)
}
}

Expand Down
3 changes: 1 addition & 2 deletions crates/proof-of-sql/src/base/database/table_utility.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,7 @@ pub fn borrowed_int<S: Scalar>(
/// use bumpalo::Bump;
/// use proof_of_sql::base::{database::table_utility::*, scalar::Curve25519Scalar};
/// let alloc = Bump::new();
/// let result = table::<Curve25519Scalar>([
/// borrowed_bigint("a", [1, 2, 3], &alloc),
/// let result = table::<Curve25519Scalar>([/// borrowed_bigint("a", [1, 2, 3], &alloc),
/// ]);
/// ```

Expand Down
18 changes: 10 additions & 8 deletions crates/proof-of-sql/src/base/database/test_schema_accessor.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use super::{ColumnType, SchemaAccessor, TableRef};
use crate::base::map::IndexMap;
use sqlparser::ast::Ident;

/// A simple in-memory `SchemaAccessor` for testing intermediate AST -> Provable AST conversion.
pub struct TestSchemaAccessor {
schemas: IndexMap<TableRef, IndexMap<Ident, ColumnType>>,
Expand Down Expand Up @@ -32,10 +33,11 @@ impl SchemaAccessor for TestSchemaAccessor {
mod tests {
use super::*;
use crate::base::map::indexmap;
use proof_of_sql_parser::sqlparser::object_name_from;

fn sample_test_schema_accessor() -> TestSchemaAccessor {
let table1: TableRef = TableRef::new("schema.table1".parse().unwrap());
let table2: TableRef = TableRef::new("schema.table2".parse().unwrap());
let table1: TableRef = TableRef::new(object_name_from("schema.table1"));
let table2: TableRef = TableRef::new(object_name_from("schema.table2"));
TestSchemaAccessor::new(indexmap! {
table1 => indexmap! {
"col1".into() => ColumnType::BigInt,
Expand All @@ -50,9 +52,9 @@ mod tests {
#[test]
fn test_lookup_column() {
let accessor = sample_test_schema_accessor();
let table1: TableRef = TableRef::new("schema.table1".parse().unwrap());
let table2: TableRef = TableRef::new("schema.table2".parse().unwrap());
let not_a_table: TableRef = TableRef::new("schema.not_a_table".parse().unwrap());
let table1: TableRef = TableRef::new(object_name_from("schema.table1"));
let table2: TableRef = TableRef::new(object_name_from("schema.table2"));
let not_a_table: TableRef = TableRef::new(object_name_from("schema.not_a_table"));
assert_eq!(
accessor.lookup_column(table1, "col1".into()),
Some(ColumnType::BigInt)
Expand All @@ -78,9 +80,9 @@ mod tests {
#[test]
fn test_lookup_schema() {
let accessor = sample_test_schema_accessor();
let table1: TableRef = TableRef::new("schema.table1".parse().unwrap());
let table2: TableRef = TableRef::new("schema.table2".parse().unwrap());
let not_a_table: TableRef = TableRef::new("schema.not_a_table".parse().unwrap());
let table1: TableRef = TableRef::new(object_name_from("schema.table1"));
let table2: TableRef = TableRef::new(object_name_from("schema.table2"));
let not_a_table: TableRef = TableRef::new(object_name_from("schema.not_a_table"));
assert_eq!(
accessor.lookup_schema(table1),
vec![
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,15 +125,17 @@ mod tests {
let identifier_b = "b".into();
let identifier_alias = "alias".into();

let column_ref_a = ColumnRef::new(table_ref, identifier_a, ColumnType::BigInt);
let column_ref_b = ColumnRef::new(table_ref, identifier_b, ColumnType::BigInt);
let column_ref_a = ColumnRef::new(&table_ref, identifier_a, ColumnType::BigInt);
let column_ref_b = ColumnRef::new(&table_ref, identifier_b, ColumnType::BigInt);

let plan = DynProofPlan::Filter(FilterExec::new(
vec![AliasedDynProofExpr {
expr: DynProofExpr::Column(ColumnExpr::new(column_ref_b)),
alias: identifier_alias,
}],
TableExpr { table_ref },
TableExpr {
table_ref: &table_ref,
},
DynProofExpr::Equals(EqualsExpr::new(
Box::new(DynProofExpr::Column(ColumnExpr::new(column_ref_a))),
Box::new(DynProofExpr::Literal(LiteralExpr::new(
Expand Down
8 changes: 4 additions & 4 deletions crates/proof-of-sql/src/sql/parse/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,20 @@ use alloc::{
string::{String, ToString},
};
use core::result::Result;
use proof_of_sql_parser::{posql_time::PoSQLTimestampError, ResourceId};
use proof_of_sql_parser::posql_time::PoSQLTimestampError;
use snafu::Snafu;
use sqlparser::ast::Ident;

/// Errors from converting an intermediate AST into a provable AST.
#[derive(Snafu, Debug, PartialEq, Eq)]
pub enum ConversionError {
#[snafu(display("Column '{identifier}' was not found in table '{resource_id}'"))]
#[snafu(display("Column '{identifier}' was not found in table '{object_name}'"))]
/// The column is missing in the table
MissingColumn {
/// The missing column identifier
identifier: Box<Ident>,
/// The table resource id
resource_id: Box<ResourceId>,
/// The table object name
object_name: String,
},

#[snafu(display("Column '{identifier}' was not found"))]
Expand Down
4 changes: 2 additions & 2 deletions crates/proof-of-sql/src/sql/parse/query_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ impl TryFrom<&QueryContext> for Option<GroupByExec> {
expression: "QueryContext has no table_ref".to_owned(),
},
)?;
let resource_id = table.table_ref.resource_id();
let object_name = table.table_ref.object_name();
let group_by_exprs = value
.group_by_exprs
.iter()
Expand All @@ -252,7 +252,7 @@ impl TryFrom<&QueryContext> for Option<GroupByExec> {
.get(expr)
.ok_or(ConversionError::MissingColumn {
identifier: Box::new((expr).clone()),
resource_id: Box::new(resource_id),
object_name: Box::new(object_name),
})
.map(|column_ref| ColumnExpr::new(column_ref.clone()))
})
Expand Down
10 changes: 5 additions & 5 deletions crates/proof-of-sql/src/sql/parse/query_context_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@ use proof_of_sql_parser::{
AggregationOperator, AliasedResultExpr, Expression, Literal, OrderBy, SelectResultExpr,
Slice, TableExpression,
},
Identifier, ResourceId,
Identifier,
};
use sqlparser::ast::{BinaryOperator, UnaryOperator};
pub struct QueryContextBuilder<'a> {
context: QueryContext,
schema_accessor: &'a dyn SchemaAccessor,
}
use sqlparser::ast::Ident;
use sqlparser::ast::{Ident, ObjectName};

// Public interface
impl<'a> QueryContextBuilder<'a> {
Expand All @@ -44,7 +44,7 @@ impl<'a> QueryContextBuilder<'a> {
TableExpression::Named { table, schema } => {
let schema_identifier = schema.unwrap_or(default_schema);
self.context
.set_table_ref(TableRef::new(ResourceId::new(schema_identifier, table)));
.set_table_ref(TableRef::new(ObjectName::new(schema_identifier, table)));
}
}
self
Expand Down Expand Up @@ -109,7 +109,7 @@ impl<'a> QueryContextBuilder<'a> {
)]
fn lookup_schema(&self) -> Vec<(Ident, ColumnType)> {
let table_ref = self.context.get_table_ref();
let columns = self.schema_accessor.lookup_schema(*table_ref);
let columns = self.schema_accessor.lookup_schema(table_ref.clone());
assert!(!columns.is_empty(), "At least one column must exist");
columns
}
Expand Down Expand Up @@ -265,7 +265,7 @@ impl<'a> QueryContextBuilder<'a> {

let column_type = column_type.ok_or_else(|| ConversionError::MissingColumn {
identifier: Box::new(column_name.clone()),
resource_id: Box::new(table_ref.resource_id()),
object_name: Box::new(table_ref.object_name()),
})?;

let column = ColumnRef::new(*table_ref, column_name.clone(), column_type);
Expand Down
4 changes: 2 additions & 2 deletions crates/proof-of-sql/src/sql/parse/query_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ impl QueryExpr {
})
.collect::<Vec<_>>();
let filter = FilterExecBuilder::new(context.get_column_mapping())
.add_table_expr(*context.get_table_ref())
.add_table_expr(context.get_table_ref().clone())
.add_where_expr(context.get_where_expr().clone())?
.add_result_columns(&raw_enriched_exprs)
.build();
Expand Down Expand Up @@ -144,7 +144,7 @@ impl QueryExpr {
.map(|enriched_expr| enriched_expr.residue_expression.clone())
.collect::<Vec<_>>();
let filter = FilterExecBuilder::new(context.get_column_mapping())
.add_table_expr(*context.get_table_ref())
.add_table_expr(context.get_table_ref().clone())
.add_where_expr(context.get_where_expr().clone())?
.add_result_columns(&enriched_exprs)
.build();
Expand Down
10 changes: 5 additions & 5 deletions crates/proof-of-sql/src/sql/proof/query_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ fn get_index_range<'a>(
table_refs
.into_iter()
.map(|table_ref| {
let length = accessor.get_length(*table_ref);
let offset = accessor.get_offset(*table_ref);
let length = accessor.get_length(table_ref.clone());
let offset = accessor.get_offset(table_ref.clone());
(offset, offset + length)
})
.reduce(|(min_start, max_end), (start, end)| (min_start.min(start), max_end.max(end)))
Expand Down Expand Up @@ -99,7 +99,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
.filter(|col_ref| col_ref.table_ref() == table_ref)
.cloned()
.collect();
(table_ref, accessor.get_table(table_ref, &col_refs))
(table_ref.clone(), accessor.get_table(table_ref, &col_refs))
})
.collect();

Expand Down Expand Up @@ -338,7 +338,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
// Always prepend input lengths to the one evaluation lengths
let table_length_map = table_refs
.iter()
.map(|table_ref| (table_ref, accessor.get_length(*table_ref)))
.map(|table_ref| (table_ref, accessor.get_length(table_ref.clone())))
.collect::<IndexMap<_, _>>();

let one_evaluation_lengths = table_length_map
Expand All @@ -357,7 +357,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
);
let one_eval_map: IndexMap<TableRef, CP::Scalar> = table_length_map
.iter()
.map(|(table_ref, length)| (**table_ref, sumcheck_evaluations.one_evaluations[length]))
.map(|(table_ref, length)| (table_ref, sumcheck_evaluations.one_evaluations[length]))
.collect();
let mut builder = VerificationBuilder::new(
min_row_num,
Expand Down
Loading
Loading