Skip to content

Commit

Permalink
store: Properly account for fulltext columns in inserts
Browse files Browse the repository at this point in the history
Fixes #2330
  • Loading branch information
lutter committed Dec 16, 2022
1 parent 865d608 commit 8c868bb
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 11 deletions.
18 changes: 7 additions & 11 deletions store/postgres/src/relational.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ pub use crate::catalog::Catalog;
use crate::connection_pool::ForeignServer;
use crate::{catalog, deployment};

const POSTGRES_MAX_PARAMETERS: usize = u16::MAX as usize; // 65535
const DELETE_OPERATION_CHUNK_SIZE: usize = 1_000;

/// The size of string prefixes that we index. This is chosen so that we
Expand Down Expand Up @@ -601,11 +600,10 @@ impl Layout {
let table = self.table_for_entity(entity_type)?;
let _section = stopwatch.start_section("insert_modification_insert_query");
let mut count = 0;
// Each operation must respect the maximum number of bindings allowed in PostgreSQL queries,
// so we need to act in chunks whose size is defined by the number of entities times the
// number of attributes each entity type has.
// We add 1 to account for the `block_range` bind parameter
let chunk_size = POSTGRES_MAX_PARAMETERS / (table.columns.len() + 1);

// We insert the entities in chunks to make sure each operation does
// not exceed the maximum number of bindings allowed in queries
let chunk_size = InsertQuery::chunk_size(table);
for chunk in entities.chunks_mut(chunk_size) {
count += InsertQuery::new(table, chunk, block)?
.get_results(conn)
Expand Down Expand Up @@ -765,11 +763,9 @@ impl Layout {
let _section = stopwatch.start_section("update_modification_insert_query");
let mut count = 0;

// Each operation must respect the maximum number of bindings allowed in PostgreSQL queries,
// so we need to act in chunks whose size is defined by the number of entities times the
// number of attributes each entity type has.
// We add 1 to account for the `block_range` bind parameter
let chunk_size = POSTGRES_MAX_PARAMETERS / (table.columns.len() + 1);
// We insert the entities in chunks to make sure each operation does
// not exceed the maximum number of bindings allowed in queries
let chunk_size = InsertQuery::chunk_size(table);
for chunk in entities.chunks_mut(chunk_size) {
count += InsertQuery::new(table, chunk, block)?.execute(conn)?;
}
Expand Down
20 changes: 20 additions & 0 deletions store/postgres/src/relational_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ use crate::{
/// Those are columns that we always want to fetch from the database.
const BASE_SQL_COLUMNS: [&'static str; 2] = ["id", "vid"];

/// The maximum number of bind variables that can be used in a query
const POSTGRES_MAX_PARAMETERS: usize = u16::MAX as usize; // 65535

#[derive(Debug)]
pub(crate) struct UnsupportedFilter {
pub filter: String,
Expand Down Expand Up @@ -1695,6 +1698,23 @@ impl<'a> InsertQuery<'a> {
}
hashmap.into_iter().map(|(_key, value)| value).collect()
}

/// Return the maximum number of entities that can be inserted with one
/// invocation of `InsertQuery`. The number makes it so that we do not
/// exceed the maximum number of bind variables that can be used in a
/// query, and depends on what columns `table` has and how they get put
/// into the query
pub fn chunk_size(table: &Table) -> usize {
let mut count = 1;
for column in table.columns.iter() {
if let Some(fields) = &column.fulltext_fields {
count += fields.len()
} else {
count += 1
}
}
POSTGRES_MAX_PARAMETERS / count
}
}

impl<'a> QueryFragment<Pg> for InsertQuery<'a> {
Expand Down

0 comments on commit 8c868bb

Please sign in to comment.