Skip to content

Commit 622e5fd

Browse files
committed
Adding AlreadyClosedException. Fixing formatting.
1 parent c44808b commit 622e5fd

File tree

4 files changed

+70
-9
lines changed

4 files changed

+70
-9
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.io.compress;
20+
21+
import java.io.IOException;
22+
23+
/**
24+
* An exception class for when a closed compressor/decopressor is being used
25+
* {@link org.apache.hadoop.io.compress.Compressor}
26+
* {@link org.apache.hadoop.io.compress.Decompressor}
27+
*/
28+
public class AlreadyClosedException extends IOException {
29+
30+
/**
31+
* Constructs a new exception with the specified cause and a detail
32+
* message of <tt>(cause==null ? null : cause.toString())</tt> (which
33+
* typically contains the class and detail message of <tt>cause</tt>).
34+
* @param cause the cause (which is saved for later retrieval by the
35+
* {@link #getCause()} method). (A <tt>null</tt> value is
36+
* permitted, and indicates that the cause is nonexistent or
37+
* unknown.)
38+
*/
39+
public AlreadyClosedException(Throwable cause) {
40+
super(cause);
41+
}
42+
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipCompressor.java

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import java.util.zip.GZIPOutputStream;
2525

2626
import org.apache.hadoop.conf.Configuration;
27+
import org.apache.hadoop.io.compress.AlreadyClosedException;
2728
import org.apache.hadoop.io.compress.Compressor;
2829
import org.apache.hadoop.io.compress.DoNotPool;
2930
import org.apache.hadoop.util.DataChecksum;
@@ -102,7 +103,15 @@ public int compress(byte[] b, int off, int len) throws IOException {
102103

103104
if (state == BuiltInGzipDecompressor.GzipStateLabel.INFLATE_STREAM) {
104105
// now compress it into b[]
105-
int deflated = deflater.deflate(b, off, len);
106+
int deflated;
107+
try {
108+
deflated = deflater.deflate(b, off, len);
109+
} catch (NullPointerException npe) {
110+
if ("Deflater has been closed".equals(npe.getMessage())) {
111+
throw new AlreadyClosedException(npe);
112+
}
113+
throw npe;
114+
}
106115

107116
compressedBytesWritten += deflated;
108117
off += deflated;

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import java.util.zip.DataFormatException;
2424
import java.util.zip.Inflater;
2525

26+
import org.apache.hadoop.io.compress.AlreadyClosedException;
2627
import org.apache.hadoop.io.compress.Decompressor;
2728
import org.apache.hadoop.io.compress.DoNotPool;
2829
import org.apache.hadoop.util.DataChecksum;
@@ -211,6 +212,11 @@ public synchronized int decompress(byte[] b, int off, int len)
211212
numAvailBytes = inflater.inflate(b, off, len);
212213
} catch (DataFormatException dfe) {
213214
throw new IOException(dfe.getMessage());
215+
} catch (NullPointerException npe) {
216+
if ("Inflater has been closed".equals(npe.getMessage())) {
217+
throw new AlreadyClosedException(npe);
218+
}
219+
throw npe;
214220
}
215221
crc.update(b, off, numAvailBytes); // CRC-32 is on _uncompressed_ data
216222
if (inflater.finished()) {

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -207,12 +207,14 @@ public void testDoNotPoolCompressorNotUseableAfterReturn() throws Exception {
207207
final Compressor compressor = new BuiltInGzipCompressor(new Configuration());
208208
CodecPool.returnCompressor(compressor);
209209

210-
final CompressionOutputStream outputStream = gzipCodec.createOutputStream(new ByteArrayOutputStream(), compressor);
210+
final CompressionOutputStream outputStream =
211+
gzipCodec.createOutputStream(new ByteArrayOutputStream(), compressor);
211212
LambdaTestUtils.intercept(
212-
NullPointerException.class,
213+
AlreadyClosedException.class,
213214
"Deflater has been closed",
214-
"Compressor from Codec with @DoNotPool should not be useable after returning to CodecPool",
215-
() -> outputStream.write(1));
215+
"Compressor from Codec with @DoNotPool should not be " +
216+
"useable after returning to CodecPool",
217+
() -> outputStream.write(1));
216218
}
217219

218220
@Test(timeout = 10000)
@@ -233,15 +235,17 @@ public void testDoNotPoolDecompressorNotUseableAfterReturn() throws Exception {
233235
final byte[] gzipBytes = baos.toByteArray();
234236
final ByteArrayInputStream bais = new ByteArrayInputStream(gzipBytes);
235237

236-
// BuiltInGzipDecompressor is an explicit example of a Decompressor with the @DoNotPool annotation
238+
// BuiltInGzipDecompressor is an explicit example of a Decompressor
239+
// with the @DoNotPool annotation
237240
final Decompressor decompressor = new BuiltInGzipDecompressor();
238241
CodecPool.returnDecompressor(decompressor);
239242

240243
final CompressionInputStream inputStream = gzipCodec.createInputStream(bais, decompressor);
241244
LambdaTestUtils.intercept(
242-
NullPointerException.class,
245+
AlreadyClosedException.class,
243246
"Inflater has been closed",
244-
"Decompressor from Codec with @DoNotPool should not be useable after returning to CodecPool",
245-
() -> inputStream.read());
247+
"Decompressor from Codec with @DoNotPool should not be " +
248+
"useable after returning to CodecPool",
249+
() -> inputStream.read());
246250
}
247251
}

0 commit comments

Comments
 (0)