diff --git a/README.md b/README.md index 913cc2f7..e8d3c470 100644 --- a/README.md +++ b/README.md @@ -1801,6 +1801,10 @@ The logstash-logback-encoder library contains many providers out-of-the-box, and you can even plug-in your own by extending `JsonProvider`. Each provider has its own configuration options to further customize it. +These encoders/layouts make use of an internal buffer to hold the JSON output during the rendering process. +The size of this buffer is set to `1024` bytes by default. A different size can be configured by setting the `minBufferSize` property to the desired value. +The buffer automatically grows above the `minBufferSize` when needed to accommodate with larger events. However, only the first `minBufferSize` bytes will be reused by subsequent invocations. It is therefore strongly advised to set the minimum size at least equal to the average size of the encoded events to reduce unnecessary memory allocations and reduce pressure on the garbage collector. + #### Providers for LoggingEvents diff --git a/src/main/java/net/logstash/logback/appender/AbstractLogstashTcpSocketAppender.java b/src/main/java/net/logstash/logback/appender/AbstractLogstashTcpSocketAppender.java index 72f40353..c7407780 100644 --- a/src/main/java/net/logstash/logback/appender/AbstractLogstashTcpSocketAppender.java +++ b/src/main/java/net/logstash/logback/appender/AbstractLogstashTcpSocketAppender.java @@ -48,6 +48,11 @@ import net.logstash.logback.appender.destination.PreferPrimaryDestinationConnectionStrategy; import net.logstash.logback.appender.listener.TcpAppenderListener; import net.logstash.logback.encoder.SeparatorParser; +import net.logstash.logback.encoder.StreamingEncoder; + +import com.lmax.disruptor.EventHandler; +import com.lmax.disruptor.LifecycleAware; +import com.lmax.disruptor.RingBuffer; import ch.qos.logback.core.encoder.Encoder; import ch.qos.logback.core.joran.spi.DefaultClass; @@ -59,10 +64,6 @@ import ch.qos.logback.core.util.CloseUtil; import ch.qos.logback.core.util.Duration; -import com.lmax.disruptor.EventHandler; -import com.lmax.disruptor.LifecycleAware; -import com.lmax.disruptor.RingBuffer; - /** * An {@link AsyncDisruptorAppender} appender that writes * events to a TCP {@link Socket} outputStream. @@ -584,7 +585,7 @@ private void writeEvent(Socket socket, OutputStream outputStream, LogEvent) encoder).encode(event, outputStream); + } else { + byte[] data = encoder.encode(event); + if (data != null) { + outputStream.write(data); + } + } + } + + private boolean hasKeepAliveDurationElapsed(long lastSentNanoTime, long currentNanoTime) { return isKeepAliveEnabled() && lastSentNanoTime + TimeUnit.MILLISECONDS.toNanos(keepAliveDuration.getMilliseconds()) < currentNanoTime; diff --git a/src/main/java/net/logstash/logback/composite/CompositeJsonFormatter.java b/src/main/java/net/logstash/logback/composite/CompositeJsonFormatter.java index 4ac1ee03..8d4df851 100644 --- a/src/main/java/net/logstash/logback/composite/CompositeJsonFormatter.java +++ b/src/main/java/net/logstash/logback/composite/CompositeJsonFormatter.java @@ -16,29 +16,26 @@ import java.io.IOException; import java.io.OutputStream; import java.io.Writer; -import java.lang.ref.SoftReference; import java.util.ServiceConfigurationError; import net.logstash.logback.decorate.JsonFactoryDecorator; import net.logstash.logback.decorate.JsonGeneratorDecorator; import net.logstash.logback.decorate.NullJsonFactoryDecorator; import net.logstash.logback.decorate.NullJsonGeneratorDecorator; -import ch.qos.logback.access.spi.IAccessEvent; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.spi.ContextAware; -import ch.qos.logback.core.spi.ContextAwareBase; -import ch.qos.logback.core.spi.DeferredProcessingAware; -import ch.qos.logback.core.spi.LifeCycle; import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.io.SegmentedStringWriter; -import com.fasterxml.jackson.core.util.BufferRecycler; -import com.fasterxml.jackson.core.util.ByteArrayBuilder; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; +import ch.qos.logback.access.spi.IAccessEvent; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.spi.ContextAware; +import ch.qos.logback.core.spi.ContextAwareBase; +import ch.qos.logback.core.spi.DeferredProcessingAware; +import ch.qos.logback.core.spi.LifeCycle; + /** * Formats logstash Events as JSON using {@link JsonProvider}s. *

@@ -52,18 +49,6 @@ public abstract class CompositeJsonFormatter extends ContextAwareBase implements LifeCycle { - /** - * This ThreadLocal contains a {@link java.lang.ref.SoftReference} - * to a {@link BufferRecycler} used to provide a low-cost - * buffer recycling between writer instances. - */ - private final ThreadLocal> recycler = new ThreadLocal>() { - protected SoftReference initialValue() { - final BufferRecycler bufferRecycler = new BufferRecycler(); - return new SoftReference(bufferRecycler); - } - }; - /** * Used to create the necessary {@link JsonGenerator}s for generating JSON. */ @@ -134,35 +119,7 @@ private JsonFactory createJsonFactory() { } } - JsonFactory jsonFactory = objectMapper - .getFactory() - /* - * When generators are flushed, don't flush the underlying outputStream. - * - * This allows some streaming optimizations when using an encoder. - * - * The encoder generally determines when the stream should be flushed - * by an 'immediateFlush' property. - * - * The 'immediateFlush' property of the encoder can be set to false - * when the appender performs the flushes at appropriate times - * (such as the end of a batch in the AbstractLogstashTcpSocketAppender). - */ - .disable(JsonGenerator.Feature.FLUSH_PASSED_TO_STREAM); - - return this.jsonFactoryDecorator.decorate(jsonFactory); - } - - public byte[] writeEventAsBytes(Event event) throws IOException { - ByteArrayBuilder outputStream = new ByteArrayBuilder(getBufferRecycler()); - - try { - writeEventToOutputStream(event, outputStream); - outputStream.flush(); - return outputStream.toByteArray(); - } finally { - outputStream.release(); - } + return this.jsonFactoryDecorator.decorate(objectMapper.getFactory()); } public void writeEventToOutputStream(Event event, OutputStream outputStream) throws IOException { @@ -177,13 +134,9 @@ public void writeEventToOutputStream(Event event, OutputStream outputStream) thr */ } - public String writeEventAsString(Event event) throws IOException { - SegmentedStringWriter writer = new SegmentedStringWriter(getBufferRecycler()); - + public void writeEventToWriter(Event event, Writer writer) throws IOException { try (JsonGenerator generator = createGenerator(writer)) { writeEventToGenerator(generator, event); - writer.flush(); - return writer.getAndClear(); } } @@ -203,23 +156,36 @@ protected void prepareForDeferredProcessing(Event event) { } private JsonGenerator createGenerator(OutputStream outputStream) throws IOException { - return this.jsonGeneratorDecorator.decorate(jsonFactory.createGenerator(outputStream, encoding)); + return decorateGenerator(jsonFactory.createGenerator(outputStream, encoding)); } - + private JsonGenerator createGenerator(Writer writer) throws IOException { - return this.jsonGeneratorDecorator.decorate(jsonFactory.createGenerator(writer)); - } - - private BufferRecycler getBufferRecycler() { - SoftReference bufferRecyclerReference = recycler.get(); - BufferRecycler bufferRecycler = bufferRecyclerReference.get(); - if (bufferRecycler == null) { - recycler.remove(); - return getBufferRecycler(); - } - return bufferRecycler; - } - + return decorateGenerator(jsonFactory.createGenerator(writer)); + } + + private JsonGenerator decorateGenerator(JsonGenerator generator) { + return this.jsonGeneratorDecorator.decorate(generator) + /* + * When generators are flushed, don't flush the underlying outputStream. + * + * This allows some streaming optimizations when using an encoder. + * + * The encoder generally determines when the stream should be flushed + * by an 'immediateFlush' property. + * + * The 'immediateFlush' property of the encoder can be set to false + * when the appender performs the flushes at appropriate times + * (such as the end of a batch in the AbstractLogstashTcpSocketAppender). + */ + .disable(JsonGenerator.Feature.FLUSH_PASSED_TO_STREAM) + + /* + * Don't let the json generator close the underlying outputStream and let the + * encoder managed it. + */ + .disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET); + } + public JsonFactory getJsonFactory() { return jsonFactory; } diff --git a/src/main/java/net/logstash/logback/encoder/CompositeJsonEncoder.java b/src/main/java/net/logstash/logback/encoder/CompositeJsonEncoder.java index 5527b7ce..44968855 100644 --- a/src/main/java/net/logstash/logback/encoder/CompositeJsonEncoder.java +++ b/src/main/java/net/logstash/logback/encoder/CompositeJsonEncoder.java @@ -13,14 +13,17 @@ */ package net.logstash.logback.encoder; -import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.nio.charset.Charset; import net.logstash.logback.composite.CompositeJsonFormatter; import net.logstash.logback.composite.JsonProviders; import net.logstash.logback.decorate.JsonFactoryDecorator; import net.logstash.logback.decorate.JsonGeneratorDecorator; +import net.logstash.logback.util.ReusableByteBuffer; +import net.logstash.logback.util.ReusableByteBufferPool; + import ch.qos.logback.core.encoder.Encoder; import ch.qos.logback.core.encoder.EncoderBase; import ch.qos.logback.core.encoder.LayoutWrappingEncoder; @@ -28,18 +31,24 @@ import ch.qos.logback.core.spi.DeferredProcessingAware; public abstract class CompositeJsonEncoder - extends EncoderBase { - + extends EncoderBase implements StreamingEncoder { private static final byte[] EMPTY_BYTES = new byte[0]; /** - * The minimum size of the byte array buffer used when - * encoding events in logback versions greater than or equal to 1.2.0. + * The minimum size of the byte buffer used when encoding events. * - * The actual buffer size will be the {@link #minBufferSize} - * plus the prefix, suffix, and line separators sizes. + *

The buffer automatically grows above the {@code #minBufferSize} when needed to + * accommodate with larger events. However, only the first {@code minBufferSize} bytes + * will be reused by subsequent invocations. It is therefore strongly advised to set + * the minimum size at least equal to the average size of the encoded events to reduce + * unnecessary memory allocations and reduce pressure on the garbage collector. */ private int minBufferSize = 1024; + + /** + * Provides reusable byte buffers (initialized when the encoder is started). + */ + private ReusableByteBufferPool bufferPool; private Encoder prefix; private Encoder suffix; @@ -59,56 +68,54 @@ public CompositeJsonEncoder() { protected abstract CompositeJsonFormatter createFormatter(); + @Override + public void encode(Event event, OutputStream outputStream) throws IOException { + if (!isStarted()) { + throw new IllegalStateException("Encoder is not started"); + } + + encode(prefix, event, outputStream); + formatter.writeEventToOutputStream(event, outputStream); + encode(suffix, event, outputStream); + + outputStream.write(lineSeparatorBytes); + } + @Override public byte[] encode(Event event) { if (!isStarted()) { throw new IllegalStateException("Encoder is not started"); } - byte[] prefixBytes = doEncodeWrappedToBytes(prefix, event); - byte[] suffixBytes = doEncodeWrappedToBytes(suffix, event); - - ByteArrayOutputStream outputStream = new ByteArrayOutputStream( - minBufferSize - + (prefixBytes == null ? 0 : prefixBytes.length) - + (suffixBytes == null ? 0 : suffixBytes.length) - + lineSeparatorBytes.length); + + ReusableByteBuffer buffer = bufferPool.acquire(); try { - if (prefixBytes != null) { - outputStream.write(prefixBytes); - } - - formatter.writeEventToOutputStream(event, outputStream); - - if (suffixBytes != null) { - outputStream.write(suffixBytes); - } - - outputStream.write(lineSeparatorBytes); - - return outputStream.toByteArray(); + encode(event, buffer); + return buffer.toByteArray(); } catch (IOException e) { - addWarn("Error encountered while encoding log event. " - + "Event: " + event, e); + addWarn("Error encountered while encoding log event. Event: " + event, e); return EMPTY_BYTES; } finally { - try { - outputStream.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } + bufferPool.release(buffer); } } - - private byte[] doEncodeWrappedToBytes(Encoder wrapped, Event event) { - if (wrapped != null) { - return wrapped.encode(event); + + private void encode(Encoder encoder, Event event, OutputStream outputStream) throws IOException { + if (encoder != null) { + byte[] data = encoder.encode(event); + if (data != null) { + outputStream.write(data); + } } - return EMPTY_BYTES; } @Override public void start() { + if (isStarted()) { + return; + } + super.start(); + this.bufferPool = new ReusableByteBufferPool(this.minBufferSize); formatter.setContext(getContext()); formatter.start(); charset = Charset.forName(formatter.getEncoding()); @@ -154,14 +161,16 @@ private void startWrapped(Encoder wrapped) { @Override public void stop() { - super.stop(); - formatter.stop(); - stopWrapped(prefix); - stopWrapped(suffix); + if (isStarted()) { + super.stop(); + formatter.stop(); + stopWrapped(prefix); + stopWrapped(suffix); + } } private void stopWrapped(Encoder wrapped) { - if (wrapped != null && !wrapped.isStarted()) { + if (wrapped != null && wrapped.isStarted()) { wrapped.stop(); } } @@ -241,12 +250,18 @@ public void setLineSeparator(String lineSeparator) { public int getMinBufferSize() { return minBufferSize; } + /** - * Sets the minimum size of the byte array buffer used when - * encoding events in logback versions greater than or equal to 1.2.0. + * The minimum size of the byte buffer used when encoding events. + * + *

The buffer automatically grows above the {@code #minBufferSize} when needed to + * accommodate with larger events. However, only the first {@code minBufferSize} bytes + * will be reused by subsequent invocations. It is therefore strongly advised to set + * the minimum size at least equal to the average size of the encoded events to reduce + * unnecessary memory allocations and reduce pressure on the garbage collector. * - * The actual buffer size will be the {@link #minBufferSize} - * plus the prefix, suffix, and line separators sizes. + *

Note: changes to the buffer size will not be taken into account after the encoder + * is started. */ public void setMinBufferSize(int minBufferSize) { this.minBufferSize = minBufferSize; diff --git a/src/main/java/net/logstash/logback/encoder/StreamingEncoder.java b/src/main/java/net/logstash/logback/encoder/StreamingEncoder.java new file mode 100644 index 00000000..85d4d79e --- /dev/null +++ b/src/main/java/net/logstash/logback/encoder/StreamingEncoder.java @@ -0,0 +1,27 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.logstash.logback.encoder; + +import java.io.IOException; +import java.io.OutputStream; + +/** + * Interface implemented by {@link Encoder} that supports writing directly into a {@link OutputStream} + * instead of returning a byte array. + */ +public interface StreamingEncoder { + + void encode(Event event, OutputStream outputStream) throws IOException; + +} diff --git a/src/main/java/net/logstash/logback/layout/CompositeJsonLayout.java b/src/main/java/net/logstash/logback/layout/CompositeJsonLayout.java index 0c8b15e7..29b1b49c 100644 --- a/src/main/java/net/logstash/logback/layout/CompositeJsonLayout.java +++ b/src/main/java/net/logstash/logback/layout/CompositeJsonLayout.java @@ -14,17 +14,22 @@ package net.logstash.logback.layout; import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; import net.logstash.logback.composite.CompositeJsonFormatter; import net.logstash.logback.composite.JsonProviders; import net.logstash.logback.decorate.JsonFactoryDecorator; import net.logstash.logback.decorate.JsonGeneratorDecorator; +import net.logstash.logback.encoder.CompositeJsonEncoder; +import net.logstash.logback.encoder.SeparatorParser; +import net.logstash.logback.util.ReusableByteBuffer; +import net.logstash.logback.util.ReusableByteBufferPool; + import ch.qos.logback.core.Layout; import ch.qos.logback.core.LayoutBase; import ch.qos.logback.core.pattern.PatternLayoutBase; import ch.qos.logback.core.spi.DeferredProcessingAware; -import net.logstash.logback.encoder.CompositeJsonEncoder; -import net.logstash.logback.encoder.SeparatorParser; public abstract class CompositeJsonLayout extends LayoutBase { @@ -43,6 +48,23 @@ public abstract class CompositeJsonLayout */ private String lineSeparator; + /** + * The minimum size of the byte buffer used when encoding events. + * + *

The buffer automatically grows above the {@code #minBufferSize} when needed to + * accommodate with larger events. However, only the first {@code minBufferSize} bytes + * will be reused by subsequent invocations. It is therefore strongly advised to set + * the minimum size at least equal to the average size of the encoded events to reduce + * unnecessary memory allocations and reduce pressure on the garbage collector. + */ + private int minBufferSize = 1024; + + /** + * Provides reusable byte buffers (initialized when layout is started) + */ + private ReusableByteBufferPool bufferPool; + + private final CompositeJsonFormatter formatter; public CompositeJsonLayout() { @@ -52,51 +74,53 @@ public CompositeJsonLayout() { protected abstract CompositeJsonFormatter createFormatter(); + @Override public String doLayout(Event event) { if (!isStarted()) { throw new IllegalStateException("Layout is not started"); } - final String result; - try { - result = formatter.writeEventAsString(event); + + ReusableByteBuffer buffer = this.bufferPool.acquire(); + try (OutputStreamWriter writer = new OutputStreamWriter(buffer)) { + writeLayout(prefix, writer, event); + writeFormatter(writer, event); + writeLayout(suffix, writer, event); + + if (lineSeparator != null) { + writer.write(lineSeparator); + } + writer.flush(); + + return new String(buffer.toByteArray()); } catch (IOException e) { addWarn("Error formatting logging event", e); return null; + } finally { + bufferPool.release(buffer); } + } - if (prefix == null && suffix == null && lineSeparator == null) { - return result; - } - - String prefixResult = doLayoutWrapped(prefix, event); - String suffixResult = doLayoutWrapped(suffix, event); - - int size = result.length() - + (prefixResult == null ? 0 : prefixResult.length()) - + (suffixResult == null ? 0 : suffixResult.length()) - + (lineSeparator == null ? 0 : lineSeparator.length()); - - StringBuilder stringBuilder = new StringBuilder(size); - if (prefixResult != null) { - stringBuilder.append(prefixResult); + + private void writeLayout(Layout wrapped, Writer writer, Event event) throws IOException { + if (wrapped == null) { + return; } - stringBuilder.append(result); - if (suffixResult != null) { - stringBuilder.append(suffixResult); + + String str = wrapped.doLayout(event); + if (str != null) { + writer.write(str); } - if (lineSeparator != null) { - stringBuilder.append(lineSeparator); - } - return stringBuilder.toString(); } - - private String doLayoutWrapped(Layout wrapped, Event event) { - return wrapped == null ? null : wrapped.doLayout(event); + + private void writeFormatter(Writer writer, Event event) throws IOException { + this.formatter.writeEventToWriter(event, writer); } + @Override public void start() { super.start(); + this.bufferPool = new ReusableByteBufferPool(this.minBufferSize); formatter.setContext(getContext()); formatter.start(); startWrapped(prefix); @@ -212,4 +236,23 @@ public void setLineSeparator(String lineSeparator) { this.lineSeparator = SeparatorParser.parseSeparator(lineSeparator); } + public int getMinBufferSize() { + return minBufferSize; + } + + /** + * The minimum size of the byte buffer used when encoding events. + * + *

The buffer automatically grows above the {@code #minBufferSize} when needed to + * accommodate with larger events. However, only the first {@code minBufferSize} bytes + * will be reused by subsequent invocations. It is therefore strongly advised to set + * the minimum size at least equal to the average size of the encoded events to reduce + * unnecessary memory allocations and reduce pressure on the garbage collector. + * + *

Note: changes to the buffer size will not be taken into account after the encoder + * is started. + */ + public void setMinBufferSize(int minBufferSize) { + this.minBufferSize = minBufferSize; + } } diff --git a/src/main/java/net/logstash/logback/util/ReusableByteBuffer.java b/src/main/java/net/logstash/logback/util/ReusableByteBuffer.java new file mode 100644 index 00000000..5e18884f --- /dev/null +++ b/src/main/java/net/logstash/logback/util/ReusableByteBuffer.java @@ -0,0 +1,240 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.logstash.logback.util; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * A speedy alternative to {@link java.io.ByteArrayOutputStream}. + * + *

Unlike {@link java.io.ByteArrayOutputStream}, this implementation is backed by an {@link ArrayList} + * of {@code byte[]} instead of 1 constantly resizing {@code byte[]} array. It does not copy buffers when + * it gets expanded. + * + *

The initial buffer is only created when the stream is first written. + * There is also no copying of the internal buffer if its contents is extracted with the + * {@link #writeTo(OutputStream)} method. + * + *

The {@link #reset()} method clears the content and resets the buffer to its initial state. + * Buffers are disposed except the initial buffer which is reused by subsequent usage. + * + *

This class is *not* thread-safe! + * + * @author brenuart + * + */ +public class ReusableByteBuffer extends OutputStream { + + /** + * The default size of the initial buffer + */ + static final int DEFAULT_INITIAL_CAPACITY = 1024; + + /** + * Constant with an empty byte array + */ + private static final byte[] EMPTY_BYTES = new byte[0]; + + /** + * The buffers used to store the content bytes + */ + private final List buffers = new ArrayList<>(); + + /** + * The number of bytes already written in previous buffers (other than tail). + */ + private int alreadyBufferedSize = 0; + + /** + * The write index in the tail buffer + */ + private int tailWriteIndex = 0; + + /** + * Is the stream closed? + */ + private boolean closed = false; + + + /** + * Create a new {@link ReusableByteBuffer} with the default initial capacity of 1024 bytes. + */ + public ReusableByteBuffer() { + this(DEFAULT_INITIAL_CAPACITY); + } + + /** + * Create a new {@link ReusableByteBuffer} with the specified initial capacity. + * + * @param initialCapacity the initial buffer size in bytes + */ + public ReusableByteBuffer(int initialCapacity) { + if (initialCapacity <= 0) { + throw new IllegalArgumentException("initialCapacity must be greater than 0"); + } + this.buffers.add(new byte[initialCapacity]); + } + + + @Override + public void write(int datum) throws IOException { + if (this.closed) { + throw new IOException("Stream closed"); + } + + growIfNeeded(); + getTailBuffer()[this.tailWriteIndex++] = (byte) datum; + } + + + @Override + public void write(byte[] data, int offset, int length) throws IOException { + Objects.requireNonNull(data, "data must not be null"); + if (offset < 0 || offset + length > data.length || length < 0) { + throw new IndexOutOfBoundsException(); + } + if (this.closed) { + throw new IOException("Stream closed"); + } + + + while (length > 0) { + byte[] buffer = getTailBuffer(); + int freeSpace = buffer.length - this.tailWriteIndex; + + if (freeSpace > 0) { + int toCopy = Math.min(freeSpace, length); + System.arraycopy(data, offset, buffer, this.tailWriteIndex, toCopy); + offset += toCopy; + this.tailWriteIndex += toCopy; + length -= toCopy; + } + + if (length > 0) { + growIfNeeded(); + } + } + } + + + @Override + public void close() { + this.closed = true; + } + + + /** + * Return the current size of the buffer. + * + * @return the current size of the buffer. + */ + public int size() { + return this.alreadyBufferedSize + this.tailWriteIndex; + } + + + /** + * Reset the contents of this {@link ReusableByteBuffer}. + *

All currently accumulated output in the output stream is discarded. + * The output stream can be used again. + */ + public void reset() { + // Clear allocated buffers but keep the first one + byte[] initialBuffer = this.buffers.get(0); + this.buffers.clear(); + this.buffers.add(initialBuffer); + + this.closed = false; + this.tailWriteIndex = 0; + this.alreadyBufferedSize = 0; + } + + + /** + * Write the buffers content to the given OutputStream. + * + * @param out the OutputStream to write to + */ + public void writeTo(OutputStream out) throws IOException { + Iterator it = this.buffers.iterator(); + while (it.hasNext()) { + byte[] buffer = it.next(); + if (it.hasNext()) { + out.write(buffer, 0, buffer.length); + } else { + out.write(buffer, 0, this.tailWriteIndex); + } + } + } + + + /** + * Creates a newly allocated byte array. + *

Its size is the current + * size of this output stream and the valid contents of the buffer + * have been copied into it.

+ * + * @return the current contents of this output stream, as a byte array. + * @see #size() + * @see #toByteArrayUnsafe() + */ + public byte[] toByteArray() { + int totalSize = size(); + if (totalSize == 0) { + return EMPTY_BYTES; + } + + byte[] result = new byte[totalSize]; + + int offset = 0; + Iterator it = this.buffers.iterator(); + while (it.hasNext()) { + byte[] buffer = it.next(); + if (it.hasNext()) { + System.arraycopy(buffer, 0, result, offset, buffer.length); + offset += buffer.length; + } else { + System.arraycopy(buffer, 0, result, offset, this.tailWriteIndex); + } + } + + return result; + } + + + /** + * Allocate a new chunk if needed + */ + private void growIfNeeded() { + if (getTailBuffer().length == this.tailWriteIndex) { + this.alreadyBufferedSize += this.tailWriteIndex; + this.buffers.add(new byte[this.tailWriteIndex * 2]); // block size doubles each time + this.tailWriteIndex = 0; + } + } + + /** + * Convenience method to get the tail buffer (the one to write into) + * + * @return the tail buffer + */ + private byte[] getTailBuffer() { + return this.buffers.get(this.buffers.size() - 1); + } +} diff --git a/src/main/java/net/logstash/logback/util/ReusableByteBufferPool.java b/src/main/java/net/logstash/logback/util/ReusableByteBufferPool.java new file mode 100644 index 00000000..6a8281c8 --- /dev/null +++ b/src/main/java/net/logstash/logback/util/ReusableByteBufferPool.java @@ -0,0 +1,103 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.logstash.logback.util; + +import java.lang.ref.Reference; +import java.lang.ref.SoftReference; +import java.util.Deque; +import java.util.concurrent.ConcurrentLinkedDeque; + +/** + * A pool of {@link ReusableByteBuffer}. + * + *

The pool is technically unbounded but will never hold more buffers than the number of concurrent + * threads accessing it. Buffers are kept in the pool using soft references so they can be garbage + * collected by the JVM when running low in memory. + * + * @author brenuart + */ +public class ReusableByteBufferPool { + + /** + * Pool of reusable buffers. + */ + private final Deque> buffers = new ConcurrentLinkedDeque<>(); + + /** + * The capacity (in bytes) of the initial buffer that is reused across consecutive usages. + */ + private final int initialCapacity; + + /** + * Create a new buffer pool holding buffers with an initial capacity of {@code initialSize} bytes. + * + * @param intialCapacity the initial capacity of buffers created by this pool. + */ + public ReusableByteBufferPool(int intialCapacity) { + if (intialCapacity <= 0) { + throw new IllegalArgumentException("initialCapacity must be greater than 0"); + } + this.initialCapacity = intialCapacity; + } + + /** + * Create a new buffer pool holding buffers with a default initial capacity. + */ + public ReusableByteBufferPool() { + this(ReusableByteBuffer.DEFAULT_INITIAL_CAPACITY); + } + + /** + * Create a new buffer with an initial size of {@link #initialCapacity} bytes. + * + * @return a new buffer instance + */ + private ReusableByteBuffer createBuffer() { + return new ReusableByteBuffer(initialCapacity); + } + + /** + * Get a buffer from the pool or create a new one if none is available. + * The buffer must be returned to the pool after usage by a call to {@link #release(ReusableByteBuffer)}. + * + * @return a reusable byte buffer + */ + public ReusableByteBuffer acquire() { + ReusableByteBuffer buffer = null; + + while (buffer == null) { + Reference ref = buffers.poll(); + if (ref == null) { + break; + } + buffer = ref.get(); + } + + if (buffer == null) { + buffer = createBuffer(); + } + + return buffer; + } + + /** + * Return a buffer to the pool after usage. + * + * @param buffer the buffer to return to the pool. + */ + public void release(ReusableByteBuffer buffer) { + buffer.reset(); + this.buffers.add(new SoftReference<>(buffer)); + } +} diff --git a/src/test/java/net/logstash/logback/composite/loggingevent/LoggingEventCompositeJsonFormatterTest.java b/src/test/java/net/logstash/logback/composite/loggingevent/LoggingEventCompositeJsonFormatterTest.java index ad6a0b5a..dfec6ed2 100644 --- a/src/test/java/net/logstash/logback/composite/loggingevent/LoggingEventCompositeJsonFormatterTest.java +++ b/src/test/java/net/logstash/logback/composite/loggingevent/LoggingEventCompositeJsonFormatterTest.java @@ -13,8 +13,10 @@ */ package net.logstash.logback.composite.loggingevent; +import static org.assertj.core.api.Assertions.assertThatCode; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; import java.io.IOException; import net.logstash.logback.argument.StructuredArguments; @@ -48,7 +50,9 @@ public void testDoesNotFailOnEmptyBeans() throws IOException { /* * This should not throw an exception, since SerializationFeature.FAIL_ON_EMPTY_BEANS is disabled */ - formatter.writeEventAsString(event); + try(ByteArrayOutputStream bos = new ByteArrayOutputStream()) { + assertThatCode(() -> formatter.writeEventToOutputStream(event, bos)).doesNotThrowAnyException(); + } } } diff --git a/src/test/java/net/logstash/logback/encoder/CompositeJsonEncoderTest.java b/src/test/java/net/logstash/logback/encoder/CompositeJsonEncoderTest.java index 857fcac5..00dd2fbf 100644 --- a/src/test/java/net/logstash/logback/encoder/CompositeJsonEncoderTest.java +++ b/src/test/java/net/logstash/logback/encoder/CompositeJsonEncoderTest.java @@ -14,21 +14,27 @@ package net.logstash.logback.encoder; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.assertj.core.api.Assertions.assertThatNoException; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; +import net.logstash.logback.TestJsonProvider; import net.logstash.logback.composite.CompositeJsonFormatter; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -36,9 +42,12 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import com.fasterxml.jackson.core.JsonEncoding; + import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Context; import ch.qos.logback.core.encoder.Encoder; +import ch.qos.logback.core.encoder.EncoderBase; import ch.qos.logback.core.encoder.LayoutWrappingEncoder; import ch.qos.logback.core.status.StatusManager; import ch.qos.logback.core.status.WarnStatus; @@ -47,16 +56,10 @@ @ExtendWith(MockitoExtension.class) public class CompositeJsonEncoderTest { - private final CompositeJsonFormatter formatter = mock(CompositeJsonFormatter.class); - @InjectMocks - private final CompositeJsonEncoder encoder = new CompositeJsonEncoder() { - - @Override - protected CompositeJsonFormatter createFormatter() { - return formatter; - } - }; + private final CompositeJsonEncoder encoder = new TestCompositeJsonEncoder(); + + private CompositeJsonFormatter formatter; @Mock(lenient = true) private Context context; @@ -69,119 +72,221 @@ protected CompositeJsonFormatter createFormatter() { @BeforeEach public void setup() { - when(formatter.getEncoding()).thenReturn("UTF-8"); + // suppress line separator to make test platform independent + this.encoder.setLineSeparator(""); + this.formatter = encoder.getFormatter(); + when(context.getStatusManager()).thenReturn(statusManager); } + @Test - public void testNoPrefixNoSuffix_logback12OrLater() throws IOException { - - encoder.start(); + public void startStop() { + Encoder prefix = spy(new TestEncoder("prefix")); + encoder.setPrefix(prefix); - Assertions.assertTrue(encoder.isStarted()); + // stopped by default + assertThat(encoder.isStarted()).isFalse(); + assertThat(formatter.isStarted()).isFalse(); + assertThat(prefix.isStarted()).isFalse(); + // start encoder + encoder.start(); + assertThat(encoder.isStarted()).isTrue(); + assertThat(formatter.isStarted()).isTrue(); + assertThat(prefix.isStarted()).isTrue(); verify(formatter).setContext(context); - verify(formatter).start(); - - byte[] encoded = encoder.encode(event); - verify(formatter).writeEventToOutputStream(eq(event), any(OutputStream.class)); + // providers are not started a second time + encoder.start(); + verify(formatter, times(1)).start(); + verify(prefix, times(1)).start(); - assertThat(encoded).containsExactly(System.getProperty("line.separator").getBytes(StandardCharsets.UTF_8)); + // stop encoder + encoder.stop(); + assertThat(encoder.isStarted()).isFalse(); + assertThat(formatter.isStarted()).isFalse(); + assertThat(prefix.isStarted()).isFalse(); + // providers are not stopped a second time encoder.stop(); - Assertions.assertFalse(encoder.isStarted()); - verify(formatter).stop(); + verify(formatter, times(1)).stop(); + verify(prefix, times(1)).stop(); } + @Test - public void testPrefixAndSuffix_logback12OrLater() throws IOException { - - LayoutWrappingEncoder prefix = mock(LayoutWrappingEncoder.class); - Encoder suffix = mock(Encoder.class); + public void notStarted() { + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> encoder.encode(event)) + .withMessage("Encoder is not started"); + } + + + @Test + public void encode_noPrefixSuffix() { + encoder.start(); + assertThat(new String(encoder.encode(event))).isEqualTo("{}"); + } + + + /* + * Encode log event with prefix and suffix encoders. + * + * NOTE: Encoder#headerBytes and Encoder#footerBytes are ignored + */ + @Test + public void encode_withPrefixSuffix() { + encoder.setPrefix( new TestEncoder("prefix") ); + encoder.setSuffix( new TestEncoder("suffix") ); + encoder.start(); - when(prefix.encode(event)).thenReturn("prefix".getBytes(StandardCharsets.UTF_8)); - when(suffix.encode(event)).thenReturn("suffix".getBytes(StandardCharsets.UTF_8)); + assertThat(new String(encoder.encode(event))).isEqualTo("prefix/event{}suffix/event"); + } + + + /* + * Use a custom line separator + */ + @Test + public void encode_customLineSeparator() { + encoder.setLineSeparator("-"); + encoder.start(); + assertThat(new String(encoder.encode(event))).isEqualTo("{}-"); + } + + + /* + * Prefix/Suffix of type LayoutWrappingEncoder have their charset set to the same value + * as the Formatter used by the CompositeJsonEncoder + */ + @Test + public void charsetOnLayoutWrappingEncoder() { + formatter.setEncoding(JsonEncoding.UTF16_BE.getJavaName()); // use an encoding that is not likely to be + // the default to avoid false positives + LayoutWrappingEncoder prefix = mock(LayoutWrappingEncoder.class); encoder.setPrefix(prefix); - encoder.setSuffix(suffix); encoder.start(); - Assertions.assertTrue(encoder.isStarted()); - - verify(formatter).setContext(context); - verify(formatter).start(); - - verify(prefix).setCharset(StandardCharsets.UTF_8); - verify(prefix).start(); - verify(suffix).start(); - - byte[] encoded = encoder.encode(event); - - verify(prefix).encode(event); - verify(suffix).encode(event); - - verify(formatter).writeEventToOutputStream(eq(event), any(OutputStream.class)); + verify(prefix).setCharset(StandardCharsets.UTF_16BE); + } + + + /* + * Encode using the StreamingEncoder API + */ + @Test + public void streamingEncode() { + encoder.start(); - assertThat(encoded).containsExactly(("prefixsuffix" + System.getProperty("line.separator")).getBytes(StandardCharsets.UTF_8)); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + assertThatNoException().isThrownBy(() -> encoder.encode(event, bos)); - encoder.stop(); - Assertions.assertFalse(encoder.isStarted()); - verify(formatter).stop(); - verify(prefix).stop(); - verify(suffix).stop(); + assertThat(new String(bos.toByteArray())).isEqualTo("{}"); } + + /* + * Test decoding of special line separators + */ @Test public void testLineEndings() { + // Use a brand new default instance to get rid of configuration done by the #setup() method + TestCompositeJsonEncoder encoder = new TestCompositeJsonEncoder(); - Assertions.assertEquals(System.getProperty("line.separator"), encoder.getLineSeparator()); + assertThat(encoder.getLineSeparator()).isEqualTo(System.getProperty("line.separator")); encoder.setLineSeparator("UNIX"); - Assertions.assertEquals("\n", encoder.getLineSeparator()); + assertThat(encoder.getLineSeparator()).isEqualTo("\n"); encoder.setLineSeparator(null); - Assertions.assertEquals(null, encoder.getLineSeparator()); + assertThat(encoder.getLineSeparator()).isNull(); encoder.setLineSeparator("WINDOWS"); - Assertions.assertEquals("\r\n", encoder.getLineSeparator()); + assertThat(encoder.getLineSeparator()).isEqualTo("\r\n"); encoder.setLineSeparator("foo"); - Assertions.assertEquals("foo", encoder.getLineSeparator()); + assertThat(encoder.getLineSeparator()).isEqualTo("foo"); encoder.setLineSeparator("SYSTEM"); - Assertions.assertEquals(System.getProperty("line.separator"), encoder.getLineSeparator()); + assertThat(encoder.getLineSeparator()).isEqualTo(System.getProperty("line.separator")); encoder.setLineSeparator(""); - Assertions.assertEquals(null, encoder.getLineSeparator()); + assertThat(encoder.getLineSeparator()).isNull(); } + + /* + * Failure to encode log event should log an warning status + */ @Test - public void testIOException_logback12OrLater() throws IOException { - + public void testIOException() throws IOException { encoder.start(); - Assertions.assertTrue(encoder.isStarted()); - - verify(formatter).setContext(context); - verify(formatter).start(); - IOException exception = new IOException(); + doThrow(exception).when(formatter).writeEventToOutputStream(eq(event), any(OutputStream.class)); encoder.encode(event); - Assertions.assertTrue(encoder.isStarted()); - verify(statusManager).add(new WarnStatus("Error encountered while encoding log event. " + "Event: " + event, context, exception)); } + + /* + * StreamingEncoder re-throws the IOException to the caller and does not log any warning + */ @Test - public void notStarted() { - assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> encoder.encode(event)) - .withMessage("Encoder is not started"); + public void testIOException_streaming() throws IOException { + encoder.start(); + + IOException exception = new IOException(); + + OutputStream stream = mock(OutputStream.class); + doThrow(exception).when(stream).write(any(byte[].class), any(int.class), any(int.class)); + + assertThatCode(() -> encoder.encode(event, stream)).isInstanceOf(IOException.class); + + verify(statusManager, never()).add(new WarnStatus("Error encountered while encoding log event. " + + "Event: " + event, context, exception)); + } + + + // ---------------------------------------------------------------------------------------------------------------- + + + private static class TestCompositeJsonEncoder extends CompositeJsonEncoder { + @Override + protected CompositeJsonFormatter createFormatter() { + CompositeJsonFormatter formatter = spy(new CompositeJsonFormatter(this) {}); + formatter.getProviders().addProvider(new TestJsonProvider()); + return formatter; + } } + + private static class TestEncoder extends EncoderBase { + private final String name; + + public TestEncoder(String name) { + this.name = name; + } + + public byte[] encode(ILoggingEvent event) { + return getBytes(name+"/event"); + } -} + public byte[] footerBytes() { + return getBytes(name+"/footer"); + } + + public byte[] headerBytes() { + return getBytes(name+"/header"); + } + + private byte[] getBytes(String s) { + return s.getBytes(); + } + } +} \ No newline at end of file diff --git a/src/test/java/net/logstash/logback/layout/CompositeJsonLayoutTest.java b/src/test/java/net/logstash/logback/layout/CompositeJsonLayoutTest.java index 05435209..452ffc47 100644 --- a/src/test/java/net/logstash/logback/layout/CompositeJsonLayoutTest.java +++ b/src/test/java/net/logstash/logback/layout/CompositeJsonLayoutTest.java @@ -15,118 +15,128 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; -import static org.mockito.Mockito.when; import java.io.IOException; -import ch.qos.logback.core.Layout; -import ch.qos.logback.core.spi.DeferredProcessingAware; +import net.logstash.logback.composite.AbstractJsonProvider; import net.logstash.logback.composite.CompositeJsonFormatter; -import org.junit.jupiter.api.BeforeEach; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import com.fasterxml.jackson.core.JsonGenerator; + +import ch.qos.logback.core.Layout; +import ch.qos.logback.core.LayoutBase; +import ch.qos.logback.core.spi.DeferredProcessingAware; + @ExtendWith(MockitoExtension.class) public class CompositeJsonLayoutTest { /** * create an implementation of the composite layout that format teh event using simply toString() */ - static class TesterCompositeJsonLayout extends CompositeJsonLayout { + static class TesterCompositeJsonLayout extends CompositeJsonLayout { @Override - protected CompositeJsonFormatter createFormatter() { - return new CompositeJsonFormatter(this){ + protected CompositeJsonFormatter createFormatter() { + CompositeJsonFormatter formatter = new CompositeJsonFormatter(this) {}; + formatter.getProviders().addProvider(new AbstractJsonProvider() { @Override - public String writeEventAsString(DeferredProcessingAware deferredProcessingAware) throws IOException { - return deferredProcessingAware.toString(); + public void writeTo(JsonGenerator generator, DeferredProcessingAware event) throws IOException { + generator.writeRaw("event"); } - }; + @Override + public void prepareForDeferredProcessing(DeferredProcessingAware event) { + super.prepareForDeferredProcessing(event); + } + }); + return formatter; } } - @Mock(lenient = true) - DeferredProcessingAware event; + private Layout prefixLayout = new LayoutBase() { + @Override + public String doLayout(DeferredProcessingAware event) { + return "prefix:"; + } + }; - @Mock(lenient = true) - Layout prefixLayout; + private Layout suffixLayout = new LayoutBase() { + public String doLayout(DeferredProcessingAware event) { + return ":suffix"; + }; + }; @Mock(lenient = true) - Layout suffixLayout; - - @BeforeEach - public void setup() { - when(event.toString()).thenReturn("event"); - when(prefixLayout.doLayout(event)).thenReturn("prefix:"); - when(suffixLayout.doLayout(event)).thenReturn(":suffix"); - } + private DeferredProcessingAware event; + @Test public void testDoLayoutWithoutPrefixSuffix() { - - CompositeJsonLayout layout = new TesterCompositeJsonLayout(); + CompositeJsonLayout layout = new TesterCompositeJsonLayout(); layout.start(); String layoutResult = layout.doLayout(event); - assertThat(layoutResult).isEqualTo("event"); + assertThat(layoutResult).isEqualTo("{event}"); } @Test public void testDoLayoutWithPrefixWithoutSuffix() { - CompositeJsonLayout layout = new TesterCompositeJsonLayout(); + CompositeJsonLayout layout = new TesterCompositeJsonLayout(); layout.setPrefix(prefixLayout); layout.start(); String layoutResult = layout.doLayout(event); - assertThat(layoutResult).isEqualTo("prefix:event"); + assertThat(layoutResult).isEqualTo("prefix:{event}"); } @Test public void testDoLayoutWithoutPrefixWithSuffix() { - CompositeJsonLayout layout = new TesterCompositeJsonLayout(); + CompositeJsonLayout layout = new TesterCompositeJsonLayout(); layout.setSuffix(suffixLayout); layout.start(); String layoutResult = layout.doLayout(event); - assertThat(layoutResult).isEqualTo("event:suffix"); + assertThat(layoutResult).isEqualTo("{event}:suffix"); } @Test public void testDoLayoutWithPrefixWithSuffix() { - CompositeJsonLayout layout = new TesterCompositeJsonLayout(); + CompositeJsonLayout layout = new TesterCompositeJsonLayout(); layout.setPrefix(prefixLayout); layout.setSuffix(suffixLayout); layout.start(); String layoutResult = layout.doLayout(event); - assertThat(layoutResult).isEqualTo("prefix:event:suffix"); + assertThat(layoutResult).isEqualTo("prefix:{event}:suffix"); } @Test public void testDoLayoutWithPrefixWithLineSeparator() { - CompositeJsonLayout layout = new TesterCompositeJsonLayout(); + CompositeJsonLayout layout = new TesterCompositeJsonLayout(); layout.setLineSeparator("SYSTEM"); layout.start(); String layoutResult = layout.doLayout(event); - assertThat(layoutResult).isEqualTo("event" + System.lineSeparator()); + assertThat(layoutResult).isEqualTo("{event}" + System.lineSeparator()); } @Test public void notStarted() { - CompositeJsonLayout layout = new TesterCompositeJsonLayout(); + CompositeJsonLayout layout = new TesterCompositeJsonLayout(); assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> layout.doLayout(event)) .withMessage("Layout is not started"); } diff --git a/src/test/java/net/logstash/logback/util/ReusableBufferTests.java b/src/test/java/net/logstash/logback/util/ReusableBufferTests.java new file mode 100644 index 00000000..c32dbd74 --- /dev/null +++ b/src/test/java/net/logstash/logback/util/ReusableBufferTests.java @@ -0,0 +1,103 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.logstash.logback.util; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.assertj.core.api.Assertions.assertThatIOException; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.junit.jupiter.api.Test; + +/** + * @author brenuart + * + */ +@SuppressWarnings("resource") +public class ReusableBufferTests { + + private final byte[] helloBytes = "0123456789".getBytes(StandardCharsets.UTF_8); + + + @Test + public void invalidInitialSize() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new ReusableByteBuffer(0)); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new ReusableByteBuffer(-1)); + } + + + @Test + public void size() throws IOException { + ReusableByteBuffer buffer = new ReusableByteBuffer(); + + buffer.write(helloBytes); + assertThat(buffer.size()).isEqualTo(helloBytes.length); + } + + + @Test + public void autoGrow() throws IOException { + ReusableByteBuffer buffer = new ReusableByteBuffer(1); + + for (int i=0; i<10; i++) { + buffer.write(1); + } + + assertThat(buffer.size()).isEqualTo(10); + assertThat(buffer.toByteArray()).containsExactly(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 1, 1}); + } + + + @Test + public void reset() throws IOException { + ReusableByteBuffer buffer = new ReusableByteBuffer(); + + buffer.write(helloBytes); + assertThat(buffer.toByteArray()).containsExactly(helloBytes); + + buffer.reset(); + assertThat(buffer.size()).isZero(); + + buffer.write(helloBytes); + assertThat(buffer.toByteArray()).containsExactly(helloBytes); + } + + + @Test + public void close() { + ReusableByteBuffer buffer = new ReusableByteBuffer(); + + buffer.close(); + assertThatIOException().isThrownBy(() -> buffer.write(this.helloBytes)); + + buffer.reset(); + assertThatCode(() -> buffer.write(this.helloBytes)).doesNotThrowAnyException(); + } + + + @Test + public void writeTo() throws IOException { + ReusableByteBuffer buffer = new ReusableByteBuffer(); + + buffer.write(this.helloBytes); + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + buffer.writeTo(baos); + assertThat(baos.toByteArray()).isEqualTo(this.helloBytes); + } +}