Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix segfault on non-open primary dimension when compressing
Browse files Browse the repository at this point in the history
With the new hypertable API hypertables can be created with
primary space partition. In dev-builds this was prevented with
Asserts. This patch removes the Asserts and adds a proper check.
svenklemm committed May 31, 2024

Verified

This commit was signed with the committer’s verified signature.
chenrui333 Rui Chen
1 parent 01924c7 commit 6dd69f7
Showing 4 changed files with 27 additions and 9 deletions.
6 changes: 0 additions & 6 deletions src/subspace_store.c
Original file line number Diff line number Diff line change
@@ -75,12 +75,6 @@ ts_subspace_store_init(const Hyperspace *space, MemoryContext mcxt, int16 max_it
MemoryContext old = MemoryContextSwitchTo(mcxt);
SubspaceStore *sst = palloc(sizeof(SubspaceStore));

/*
* make sure that the first dimension is a time dimension, otherwise the
* tree will grow in a way that makes pruning less effective.
*/
Assert(space->num_dimensions < 1 || space->dimensions[0].type == DIMENSION_TYPE_OPEN);

sst->origin = subspace_store_internal_node_create(space->num_dimensions == 1);
sst->num_dimensions = space->num_dimensions;
/* max_items = 0 is treated as unlimited */
4 changes: 1 addition & 3 deletions tsl/src/compression/api.c
Original file line number Diff line number Diff line change
@@ -256,9 +256,7 @@ find_chunk_to_merge_into(Hypertable *ht, Chunk *current_chunk)

const Dimension *time_dim = hyperspace_get_open_dimension(ht->space, 0);

Assert(time_dim != NULL);

if (time_dim->fd.compress_interval_length == 0)
if (!time_dim || time_dim->fd.compress_interval_length == 0)
return NULL;

Assert(current_chunk->cube->num_slices > 0);
19 changes: 19 additions & 0 deletions tsl/test/expected/compression_merge.out
Original file line number Diff line number Diff line change
@@ -820,3 +820,22 @@ NOTICE: chunk "_hyper_17_344_chunk" is already compressed
(1 row)

ROLLBACK;
-- test segfault when compression hypertable with primary space dimension #6977
CREATE TABLE test_by_hash(id BIGINT, value float8);
SELECT create_hypertable('test_by_hash', by_hash('id', 8));
create_hypertable
-------------------
(19,t)
(1 row)

ALTER TABLE test_by_hash SET (timescaledb.compress = true);
WARNING: there was some uncertainty picking the default segment by for the hypertable: You do not have any indexes on columns that can be used for segment_by and thus we are not using segment_by for compression. Please make sure you are not missing any indexes
NOTICE: default segment by for hypertable "test_by_hash" is set to ""
NOTICE: default order by for hypertable "test_by_hash" is set to "id DESC"
INSERT INTO test_by_hash VALUES (1, 1.0), (2, 2.0), (3, 3.0);
SELECT compress_chunk('_timescaledb_internal._hyper_19_351_chunk');
compress_chunk
-------------------------------------------
_timescaledb_internal._hyper_19_351_chunk
(1 row)

7 changes: 7 additions & 0 deletions tsl/test/sql/compression_merge.sql
Original file line number Diff line number Diff line change
@@ -308,3 +308,10 @@ BEGIN;
SELECT hypertable_name, range_start, range_end FROM timescaledb_information.chunks WHERE hypertable_name = 'test9' ORDER BY 2;
ROLLBACK;

-- test segfault when compression hypertable with primary space dimension #6977
CREATE TABLE test_by_hash(id BIGINT, value float8);
SELECT create_hypertable('test_by_hash', by_hash('id', 8));
ALTER TABLE test_by_hash SET (timescaledb.compress = true);
INSERT INTO test_by_hash VALUES (1, 1.0), (2, 2.0), (3, 3.0);
SELECT compress_chunk('_timescaledb_internal._hyper_19_351_chunk');

0 comments on commit 6dd69f7

Please sign in to comment.