Skip to content

Commit

Permalink
7.0.0 rebase compilation fix
Browse files Browse the repository at this point in the history
  • Loading branch information
zhztheplayer committed Feb 28, 2022
1 parent 9cb3bbf commit b302ead
Show file tree
Hide file tree
Showing 11 changed files with 72 additions and 174 deletions.
7 changes: 4 additions & 3 deletions cpp/src/gandiva/expression_cache_key.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

#pragma once

#include <sstream>
#include <stddef.h>

#include <thread>
Expand Down Expand Up @@ -77,11 +78,11 @@ class ExpressionCacheKey {
size_t Hash() const { return hash_code_; }

std::string ToString() {
std::stringstream stringstream;
std::stringstream ss;
for (const auto &item : expressions_as_strings_) {
stringstream << item << " || ";
ss << item << " || ";
}
return stringstream.str();
return ss.str();
}

bool operator==(const ExpressionCacheKey& other) const {
Expand Down
6 changes: 0 additions & 6 deletions cpp/src/gandiva/gdv_function_stubs.cc
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,6 @@ static char mask_array[256] = {
'x', 'x', 'x', (char)123, (char)124, (char)125, (char)126, (char)127};


const uint8_t* gdv_fn_get_json_object_utf8_utf8(int64_t ptr, const char* data, int data_len,
const char* pattern, int pattern_len, int32_t* out_len) {
gandiva::JsonHolder* holder = reinterpret_cast<gandiva::JsonHolder*>(ptr);
return (*holder)(std::string(data, data_len), std::string(pattern, pattern_len), out_len);
}

const uint8_t* gdv_fn_get_json_object_utf8_utf8(int64_t ptr, int64_t holder_ptr, const char* data, int data_len, bool in1_valid,
const char* pattern, int pattern_len, bool in2_valid, bool* out_valid, int32_t* out_len) {
if (!in1_valid || !in2_valid) {
Expand Down
2 changes: 0 additions & 2 deletions cpp/src/gandiva/precompiled/arithmetic_ops.cc
Original file line number Diff line number Diff line change
Expand Up @@ -275,8 +275,6 @@ CEIL(int64, int64)

CAST_UNARY(castBIGINT, int32, int64)
CAST_UNARY(castBIGINT, date64, int64)
CAST_UNARY(castBIGINT, float32, int64)
CAST_UNARY(castBIGINT, float64, int64)
CAST_UNARY(castBIGINT, boolean, int64)
CAST_UNARY(castINT, int8, int32)
CAST_UNARY(castINT, int16, int32)
Expand Down
1 change: 0 additions & 1 deletion cpp/src/gandiva/precompiled/time.cc
Original file line number Diff line number Diff line change
Expand Up @@ -657,7 +657,6 @@ gdv_date64 castDATE_nullsafe_utf8(int64_t context, const char* input, gdv_int32
// store the last value
dateFields[dateIndex++] = value;
}
const char* msg = "Not a valid date value ";
if (dateIndex != 3) {
*out_valid = false;
return 0;
Expand Down
4 changes: 2 additions & 2 deletions cpp/src/gandiva/translate_holder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,15 @@ const uint8_t* TranslateHolder::operator()(gandiva::ExecutionContext* ctx, std::
std::string matching_str, std::string replace_str, int32_t* out_len) {
char res[text.length()];
std::unordered_map<char, char> replace_map;
for (int i = 0; i < matching_str.length(); i++) {
for (uint64_t i = 0; i < matching_str.length(); i++) {
if (i >= replace_str.length()) {
replace_map[matching_str[i]] = '\0';
} else {
replace_map[matching_str[i]] = replace_str[i];
}
}
int j = 0;
for (int i = 0; i < text.length(); i++) {
for (uint64_t i = 0; i < text.length(); i++) {
if (replace_map.find(text[i]) == replace_map.end()) {
res[j++] = text[i];
continue;
Expand Down
7 changes: 4 additions & 3 deletions cpp/src/jni/dataset/jni_wrapper.cc
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
#include "arrow/jniutil/jni_util.h"
#include "arrow/util/iterator.h"

#include "jni/dataset/DTypes.pb.h"

#include "org_apache_arrow_dataset_file_JniWrapper.h"
#include "org_apache_arrow_dataset_jni_JniWrapper.h"
#include "org_apache_arrow_dataset_jni_NativeMemoryPool.h"
Expand Down Expand Up @@ -186,7 +184,10 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved) {
CreateGlobalClassReference(env, "Ljava/lang/IllegalArgumentException;");
runtime_exception_class =
CreateGlobalClassReference(env, "Ljava/lang/RuntimeException;");

java_reservation_listener_class =
CreateGlobalClassReference(env,
"Lorg/apache/arrow/"
"dataset/jni/ReservationListener;");
reserve_memory_method =
JniGetOrThrow(GetMethodID(env, java_reservation_listener_class, "reserve", "(J)V"));
unreserve_memory_method = JniGetOrThrow(
Expand Down
13 changes: 13 additions & 0 deletions java/dataset/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,25 @@
<scope>compile</scope>
<classifier>${arrow.vector.classifier}</classifier>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-format</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
<classifier>${arrow.vector.classifier}</classifier>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-memory-core</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.flatbuffers</groupId>
<artifactId>flatbuffers-java</artifactId>
<version>1.12.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-memory-netty</artifactId>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.arrow.dataset.filter;

// todo filter tree implementation
// todo see also https://issues.apache.org/jira/browse/ARROW-6953

/**
* Datasets filter.
*/
public interface Filter {

Filter EMPTY = new Filter() {
@Override
public byte[] toByteArray() {
return new byte[0];
}
};

byte[] toByteArray();

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,56 +17,38 @@

package org.apache.arrow.dataset.scanner;

import java.util.Optional;
import org.apache.arrow.dataset.filter.Filter;

import org.apache.arrow.util.Preconditions;
import java.util.Optional;

/**
* Options used during scanning.
*/
public class ScanOptions {
private final Optional<String[]> columns;
private final String[] columns;
private final Filter filter;
private final long batchSize;

/**
* Constructor.
* @param columns Projected columns. Empty for scanning all columns.
* @param batchSize Maximum row number of each returned {@link org.apache.arrow.vector.ipc.message.ArrowRecordBatch}
*
* @deprecated Deprecated. Use {@link #ScanOptions(long, Optional)} instead.
*/
@Deprecated
public ScanOptions(String[] columns, long batchSize) {
this(batchSize, Optional.of(columns).map(present -> {
if (present.length == 0) {
// Backwards compatibility: See ARROW-13257, in the new constructor, we now use null to scan for all columns.
return null;
}
return present;
}));
}

/**
* Constructor.
* @param batchSize Maximum row number of each returned {@link org.apache.arrow.vector.ipc.message.ArrowRecordBatch}
* @param columns (Optional) Projected columns. {@link Optional#empty()} for scanning all columns. Otherwise,
* Only columns present in the Array will be scanned.
*/
public ScanOptions(long batchSize, Optional<String[]> columns) {
Preconditions.checkNotNull(columns);
this.batchSize = batchSize;
public ScanOptions(String[] columns, Filter filter, long batchSize) {
this.columns = columns;
this.filter = filter;
this.batchSize = batchSize;
}

public ScanOptions(long batchSize) {
this(batchSize, Optional.empty());
public Optional<String[]> getColumns() {
return Optional.of(columns);
}

public Optional<String[]> getColumns() {
return columns;
public Filter getFilter() {
return filter;
}

public long getBatchSize() {
return batchSize;
}
}
}
Empty file.

This file was deleted.

0 comments on commit b302ead

Please sign in to comment.