Skip to content

Commit 6a1bd96

Browse files
committed
Add fallback logic
We see that this causes the 1.14 to be incompatible with the previous releases. The config will be created and right after that the `getWriteSupport(conf)` is called. But since this method is freshly introduced: ```java protected WriteSupport<T> getWriteSupport(ParquetConfiguration conf) { throw new UnsupportedOperationException( "Override ParquetWriter$Builder#getWriteSupport(ParquetConfiguration)"); } ``` It is not implemented and causes an `UnsupportedOperationException`. Add fallback logic
1 parent 5cb50fb commit 6a1bd96

File tree

3 files changed

+7
-6
lines changed

3 files changed

+7
-6
lines changed

parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetWriter.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -503,8 +503,7 @@ protected Builder(OutputFile path) {
503503
* @return an appropriate WriteSupport for the object model.
504504
*/
505505
protected WriteSupport<T> getWriteSupport(ParquetConfiguration conf) {
506-
throw new UnsupportedOperationException(
507-
"Override ParquetWriter$Builder#getWriteSupport(ParquetConfiguration)");
506+
return getWriteSupport(ConfigurationUtil.createHadoopConfiguration(conf));
508507
}
509508

510509
/**

parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/ReadSupport.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import java.util.Map;
2222
import org.apache.hadoop.conf.Configuration;
2323
import org.apache.parquet.conf.ParquetConfiguration;
24+
import org.apache.parquet.hadoop.util.ConfigurationUtil;
2425
import org.apache.parquet.io.api.RecordMaterializer;
2526
import org.apache.parquet.schema.MessageType;
2627
import org.apache.parquet.schema.MessageTypeParser;
@@ -81,7 +82,7 @@ public ReadContext init(Configuration configuration, Map<String, String> keyValu
8182
@Deprecated
8283
public ReadContext init(
8384
ParquetConfiguration configuration, Map<String, String> keyValueMetaData, MessageType fileSchema) {
84-
throw new UnsupportedOperationException("Override ReadSupport.init(InitContext)");
85+
return init(ConfigurationUtil.createHadoopConfiguration(configuration), keyValueMetaData, fileSchema);
8586
}
8687

8788
/**
@@ -125,8 +126,8 @@ public RecordMaterializer<T> prepareForRead(
125126
Map<String, String> keyValueMetaData,
126127
MessageType fileSchema,
127128
ReadContext readContext) {
128-
throw new UnsupportedOperationException(
129-
"Override ReadSupport.prepareForRead(ParquetConfiguration, Map<String, String>, MessageType, ReadContext)");
129+
return prepareForRead(
130+
ConfigurationUtil.createHadoopConfiguration(configuration), keyValueMetaData, fileSchema, readContext);
130131
}
131132

132133
/**

parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import java.util.Objects;
2525
import org.apache.hadoop.conf.Configuration;
2626
import org.apache.parquet.conf.ParquetConfiguration;
27+
import org.apache.parquet.hadoop.util.ConfigurationUtil;
2728
import org.apache.parquet.io.api.RecordConsumer;
2829
import org.apache.parquet.schema.MessageType;
2930

@@ -109,7 +110,7 @@ public Map<String, String> getExtraMetaData() {
109110
* @return the information needed to write the file
110111
*/
111112
public WriteContext init(ParquetConfiguration configuration) {
112-
throw new UnsupportedOperationException("Override WriteSupport#init(ParquetConfiguration)");
113+
return init(ConfigurationUtil.createHadoopConfiguration(configuration));
113114
}
114115

115116
/**

0 commit comments

Comments
 (0)