diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java index 74feb39033..6c3dac5313 100644 --- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java +++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java @@ -56,6 +56,8 @@ class InternalParquetRecordWriter { private final boolean validating; private final ParquetProperties props; + private boolean closed; + private long recordCount = 0; private long recordCountForNextMemCheck = MINIMUM_RECORD_COUNT_FOR_CHECK; private long lastRowGroupEndPos = 0; @@ -103,15 +105,18 @@ private void initStore() { } public void close() throws IOException, InterruptedException { - flushRowGroupToStore(); - FinalizedWriteContext finalWriteContext = writeSupport.finalizeWrite(); - Map finalMetadata = new HashMap(extraMetaData); - String modelName = writeSupport.getName(); - if (modelName != null) { - finalMetadata.put(ParquetWriter.OBJECT_MODEL_NAME_PROP, modelName); + if (!closed) { + flushRowGroupToStore(); + FinalizedWriteContext finalWriteContext = writeSupport.finalizeWrite(); + Map finalMetadata = new HashMap(extraMetaData); + String modelName = writeSupport.getName(); + if (modelName != null) { + finalMetadata.put(ParquetWriter.OBJECT_MODEL_NAME_PROP, modelName); + } + finalMetadata.putAll(finalWriteContext.getExtraMetaData()); + parquetFileWriter.end(finalMetadata); + closed = true; } - finalMetadata.putAll(finalWriteContext.getExtraMetaData()); - parquetFileWriter.end(finalMetadata); } public void write(T value) throws IOException, InterruptedException {