Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion be/src/vec/exec/format/orc/vorc_reader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1117,7 +1117,7 @@ Status OrcReader::set_fill_columns(
}
}

if (!_has_complex_type && _enable_lazy_mat && !_lazy_read_ctx.predicate_columns.first.empty() &&
if (_enable_lazy_mat && !_lazy_read_ctx.predicate_columns.first.empty() &&
!_lazy_read_ctx.lazy_read_columns.empty()) {
_lazy_read_ctx.can_lazy_read = true;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;

CREATE TABLE `nested_types1_orc` (
`id` INT,
`array_col` ARRAY<INT>,
`nested_array_col` ARRAY<ARRAY<INT>>,
`map_col` MAP<STRING, INT>,
`nested_map_col` MAP<STRING, ARRAY<INT>>,
`struct_col` STRUCT<`name`: STRING, `age`: INT>,
`array_struct_col` ARRAY<STRUCT<`name`: STRING, `age`: INT>>,
`map_struct_col` MAP<STRING, STRUCT<`name`: STRING, `age`: INT>>,
`complex_struct_col` STRUCT<
`a`: ARRAY<INT>,
`b`: MAP<STRING, ARRAY<INT>>,
`c`: STRUCT<
`x`: ARRAY<INT>,
`y`: STRING
>
>
)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
LOCATION
'/user/doris/suites/multi_catalog/nested_types1_orc';

msck repair table nested_types1_orc;

Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash
set -x

CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"

## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/

# create table
hive -f "${CUR_DIR}/create_table.hql"
Loading
Loading