Skip to content

Commit

Permalink
Improve read session caching key (#1118)
Browse files Browse the repository at this point in the history
* Improve read session caching key

* update changes file
  • Loading branch information
vishalkarve15 authored Nov 6, 2023
1 parent 2bd1df6 commit 00b0c4b
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
1 change: 1 addition & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
## Next

* PR #1117: Make read session caching duration configurable
* PR #1118: Improve read session caching key

## 0.34.0 - 2023-10-31

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Streams;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -98,7 +98,7 @@ public OptionalLong numRows() {
private final String applicationId;
private Optional<StructType> schema;
private Optional<StructType> userProvidedSchema;
private Filter[] pushedFilters = new Filter[] {};
private final Set<Filter> pushedFilters = new HashSet<>();
private Filter[] allFilters = new Filter[] {};
private Map<String, StructField> fields;
private ImmutableList<String> selectedFields;
Expand Down Expand Up @@ -199,7 +199,7 @@ public Optional<String> getCombinedFilter() {
readSessionCreatorConfig.getPushAllFilters(),
readSessionCreatorConfig.getReadDataFormat(),
globalFilter,
pushedFilters));
pushedFilters.toArray(new Filter[0])));
}

public Stream<InputPartitionContext<ColumnarBatch>> planBatchInputPartitionContexts() {
Expand Down Expand Up @@ -330,12 +330,12 @@ public Filter[] pushFilters(Filter[] filters) {
}

allFilters = filters;
pushedFilters = handledFilters.stream().toArray(Filter[]::new);
pushedFilters.addAll(handledFilters);
return unhandledFilters.stream().toArray(Filter[]::new);
}

public Filter[] pushedFilters() {
return pushedFilters;
return pushedFilters.toArray(new Filter[0]);
}

public Filter[] getAllFilters() {
Expand Down Expand Up @@ -368,8 +368,7 @@ public Optional<List<ArrowInputPartitionContext>> filter(Filter[] filters) {
BigQueryUtil.friendlyTableName(tableId));
return Optional.empty();
}
pushedFilters =
Stream.concat(Arrays.stream(pushedFilters), newFilters.stream()).toArray(Filter[]::new);
pushedFilters.addAll(newFilters);
Optional<String> combinedFilter = getCombinedFilter();
if (!BigQueryUtil.filterLengthInLimit(combinedFilter)) {
logger.warn(
Expand Down

0 comments on commit 00b0c4b

Please sign in to comment.