Skip to content

Commit

Permalink
feat: correct column order (apache#340)
Browse files Browse the repository at this point in the history
* edit: column order use user define

* edit: update commit

* fix: some error

* fix: key comparableInnternalKey encode

* edit: select

* fix: insert data compare use primary key

* fix: unit test

* edit: remote annotation, fix large enum variant

* fix

* fix: unit test

* fix: unit test

* fix: project_record_schema_with_key

* fix: unit test

* edit: more simple code

* edit: modification of description
  • Loading branch information
dust1 authored Nov 18, 2022
1 parent 28f04a8 commit 8adc9ec
Show file tree
Hide file tree
Showing 15 changed files with 261 additions and 190 deletions.
11 changes: 4 additions & 7 deletions analytic_engine/src/memtable/key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,9 +121,8 @@ impl<'a> Encoder<Row> for ComparableInternalKey<'a> {

fn encode<B: BufMut>(&self, buf: &mut B, value: &Row) -> Result<()> {
let encoder = MemComparable;
for idx in 0..self.schema.num_key_columns() {
// Encode each column in primary key
encoder.encode(buf, &value[idx]).context(EncodeKeyDatum)?;
for idx in self.schema.primary_key_indexes() {
encoder.encode(buf, &value[*idx]).context(EncodeKeyDatum)?;
}
SequenceCodec.encode(buf, &self.sequence)?;

Expand All @@ -133,11 +132,9 @@ impl<'a> Encoder<Row> for ComparableInternalKey<'a> {
fn estimate_encoded_size(&self, value: &Row) -> usize {
let encoder = MemComparable;
let mut total_len = 0;
for idx in 0..self.schema.num_key_columns() {
// Size of each column in primary key
total_len += encoder.estimate_encoded_size(&value[idx]);
for idx in self.schema.primary_key_indexes() {
total_len += encoder.estimate_encoded_size(&value[*idx]);
}
// The size of sequence
total_len += KEY_SEQUENCE_BYTES_LEN;

total_len
Expand Down
1 change: 0 additions & 1 deletion analytic_engine/src/meta/meta_update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,6 @@ impl TryFrom<meta_pb::AddTableMeta> for AddTableMeta {
fn try_from(src: meta_pb::AddTableMeta) -> Result<Self> {
let table_schema = src.schema.context(EmptyTableSchema)?;
let opts = src.options.context(EmptyTableOptions)?;

Ok(Self {
space_id: src.space_id,
table_id: TableId::from(src.table_id),
Expand Down
4 changes: 2 additions & 2 deletions analytic_engine/src/row_iter/dedup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ impl<I: RecordBatchWithKeyIterator> DedupIterator<I> {
}

// Dedup batch.
for col_idx in 0..self.schema.num_key_columns() {
let column = record_batch.column(col_idx);
for col_idx in self.schema.primary_key_idx() {
let column = record_batch.column(*col_idx);

column.dedup(&mut self.selected_rows);
}
Expand Down
8 changes: 6 additions & 2 deletions analytic_engine/src/table/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ impl Table for TableImpl {

async fn get(&self, request: GetRequest) -> Result<Option<Row>> {
let schema = request.projected_schema.to_record_schema_with_key();
let primary_key_columns = schema.key_columns();
let primary_key_columns = &schema.key_columns()[..];
ensure!(
primary_key_columns.len() == request.primary_key.len(),
GetInvalidPrimaryKey {
Expand Down Expand Up @@ -199,7 +199,11 @@ impl Table for TableImpl {
result_columns.push(col.datum(row_idx));
}

if request.primary_key == result_columns[..schema.num_key_columns()] {
let mut result_columns_k = vec![];
for col_idx in schema.primary_key_idx() {
result_columns_k.push(result_columns[*col_idx].clone());
}
if request.primary_key == result_columns_k {
return Ok(Some(Row::from_datums(result_columns)));
}
result_columns.clear();
Expand Down
Loading

0 comments on commit 8adc9ec

Please sign in to comment.