diff --git a/conf/defaults.yaml b/conf/defaults.yaml
index 041a628e19c..ba07fbf6f2a 100644
--- a/conf/defaults.yaml
+++ b/conf/defaults.yaml
@@ -197,7 +197,6 @@ zmq.threads: 1
zmq.linger.millis: 5000
zmq.hwm: 0
-
storm.messaging.netty.server_worker_threads: 1
storm.messaging.netty.client_worker_threads: 1
storm.messaging.netty.buffer_size: 5242880 #5MB buffer
@@ -205,6 +204,8 @@ storm.messaging.netty.buffer_size: 5242880 #5MB buffer
storm.messaging.netty.max_retries: 300
storm.messaging.netty.max_wait_ms: 1000
storm.messaging.netty.min_wait_ms: 100
+io.netty.noPreferDirect: false
+io.netty.allocator.type: "pooled"
# If the Netty messaging layer is busy(netty internal buffer not writable), the Netty client will try to batch message as more as possible up to the size of storm.messaging.netty.transfer.batch.size bytes, otherwise it will try to flush message as soon as possible to reduce latency.
storm.messaging.netty.transfer.batch.size: 262144
@@ -280,6 +281,7 @@ pacemaker.host: "localhost"
pacemaker.port: 6699
pacemaker.base.threads: 10
pacemaker.max.threads: 50
+pacemaker.client.max.threads: 2
pacemaker.thread.timeout: 10
pacemaker.childopts: "-Xmx1024m"
pacemaker.auth.method: "NONE"
diff --git a/pom.xml b/pom.xml
index ce5437c5ff3..b9e05db23ef 100644
--- a/pom.xml
+++ b/pom.xml
@@ -225,7 +225,8 @@
3.3.2
0.9.0
16.0.1
- 3.9.0.Final
+ 4.1.5.Final
+ 1.0.2
1.6.6
2.1
1.7.7
@@ -648,10 +649,6 @@
log4j
log4j
-
- org.jboss.netty
- netty
-
@@ -819,7 +816,7 @@
io.netty
- netty
+ netty-all
${netty.version}
diff --git a/storm-core/pom.xml b/storm-core/pom.xml
index 0b5ea0222da..cb087e57f48 100644
--- a/storm-core/pom.xml
+++ b/storm-core/pom.xml
@@ -270,7 +270,7 @@
io.netty
- netty
+ netty-all
io.dropwizard.metrics
@@ -490,7 +490,7 @@
compojure:compojure
clj-time:clj-time
org.apache.thrift:*
- io.netty:netty
+ io.netty:netty-all
com.google.guava:guava
org.apache.httpcomponents:http*
org.apache.zookeeper:zookeeper
@@ -538,6 +538,10 @@
clojure.core.incubator
org.apache.storm.shade.clojure.core.incubator
+
+ io.netty
+ org.apache.storm.netty
+
clojure.tools.namespace
org.apache.storm.shade.clojure.tools.namespace
@@ -583,10 +587,6 @@
org.apache.storm.thrift
-
- org.jboss.netty
- org.apache.storm.shade.org.jboss.netty
-
com.google.common
org.apache.storm.shade.com.google.common
@@ -760,7 +760,7 @@
- io.netty:netty
+ io.netty:netty-all
META-INF/LICENSE.txt
META-INF/NOTICE.txt
diff --git a/storm-core/src/jvm/org/apache/storm/Config.java b/storm-core/src/jvm/org/apache/storm/Config.java
index 5a97f4aff8e..6cc3f107bc5 100644
--- a/storm-core/src/jvm/org/apache/storm/Config.java
+++ b/storm-core/src/jvm/org/apache/storm/Config.java
@@ -72,6 +72,20 @@ public class Config extends HashMap {
@isPositiveNumber
public static final String STORM_MESSAGING_NETTY_BUFFER_SIZE = "storm.messaging.netty.buffer_size";
+ /**
+ * Netty based messaging: The write buffer high water mark for write buffer
+ */
+ @isInteger
+ @isPositiveNumber
+ public static final String STORM_MESSAGING_NETTY_WRITE_BUFFER_HIGH_WATER_MARK = "storm.messaging.netty.write.buffer.high.water.mark";
+
+ /**
+ * Netty based messaging: The write buffer low water mark for write buffer
+ */
+ @isInteger
+ @isPositiveNumber
+ public static final String STORM_MESSAGING_NETTY_WRITE_BUFFER_LOW_WATER_MARK = "storm.messaging.netty.write.buffer.low.water.mark";
+
/**
* Netty based messaging: Sets the backlog value to specify when the channel binds to a local address
*/
@@ -120,6 +134,12 @@ public class Config extends HashMap {
@isInteger
public static final String STORM_NETTY_MESSAGE_BATCH_SIZE = "storm.messaging.netty.transfer.batch.size";
+ /**
+ * The Netty message decoder will try to batch message as more as possible up to the size of STORM_NETTY_MESSAGE_DECODE_BATCH_SIZE
+ */
+ @isInteger
+ public static final String STORM_NETTY_MESSAGE_DECODE_BATCH_SIZE = "storm.messaging.netty.decode.batch.size";
+
/**
* We check with this interval that whether the Netty channel is writable and try to write pending messages
*/
@@ -954,6 +974,15 @@ public class Config extends HashMap {
@isPositiveNumber
public static final String PACEMAKER_MAX_THREADS = "pacemaker.max.threads";
+ /**
+ * The maximum number of threads that should be used by the Pacemaker client.
+ * When Pacemaker gets loaded it will spawn new threads, up to
+ * this many total, to handle the load.
+ */
+ @isNumber
+ @isPositiveNumber
+ public static final String PACEMAKER_CLIENT_MAX_THREADS = "pacemaker.client.max.threads";
+
/**
* This parameter is used by the storm-deploy project to configure the
* jvm options for the pacemaker daemon.
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/local/Context.java b/storm-core/src/jvm/org/apache/storm/messaging/local/Context.java
index 7300847a134..b043c97549d 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/local/Context.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/local/Context.java
@@ -17,6 +17,11 @@
*/
package org.apache.storm.messaging.local;
+import org.apache.storm.grouping.Load;
+import org.apache.storm.messaging.IConnection;
+import org.apache.storm.messaging.IConnectionCallback;
+import org.apache.storm.messaging.IContext;
+import org.apache.storm.messaging.TaskMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -28,16 +33,10 @@
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
-
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
-import org.apache.storm.grouping.Load;
-import org.apache.storm.messaging.IConnection;
-import org.apache.storm.messaging.TaskMessage;
-import org.apache.storm.messaging.IConnectionCallback;
-import org.apache.storm.messaging.IContext;
public class Context implements IContext {
private static final Logger LOG = LoggerFactory.getLogger(Context.class);
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/Client.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/Client.java
index 035eb1bcdf9..da078171e28 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/Client.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/Client.java
@@ -17,42 +17,42 @@
*/
package org.apache.storm.messaging.netty;
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
-import java.util.Iterator;
-import java.util.Collection;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.Timer;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicReference;
-import java.lang.InterruptedException;
-
+import io.netty.bootstrap.Bootstrap;
+import io.netty.buffer.PooledByteBufAllocator;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelFutureListener;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.socket.nio.NioSocketChannel;
+import io.netty.util.HashedWheelTimer;
+import io.netty.util.Timeout;
+import io.netty.util.TimerTask;
import org.apache.storm.Config;
import org.apache.storm.grouping.Load;
import org.apache.storm.messaging.ConnectionWithStatus;
-import org.apache.storm.messaging.TaskMessage;
import org.apache.storm.messaging.IConnectionCallback;
+import org.apache.storm.messaging.TaskMessage;
import org.apache.storm.metric.api.IStatefulObject;
import org.apache.storm.utils.StormBoundedExponentialBackoffRetry;
import org.apache.storm.utils.Utils;
-import org.jboss.netty.bootstrap.ClientBootstrap;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelFactory;
-import org.jboss.netty.channel.ChannelFuture;
-import org.jboss.netty.channel.ChannelFutureListener;
-import org.jboss.netty.util.HashedWheelTimer;
-import org.jboss.netty.util.Timeout;
-import org.jboss.netty.util.TimerTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
import java.util.List;
+import java.util.Map;
+import java.util.Timer;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
import static com.google.common.base.Preconditions.checkState;
@@ -69,6 +69,7 @@
* the remote destination is currently unavailable.
*/
public class Client extends ConnectionWithStatus implements IStatefulObject, ISaslClient {
+
private static final long PENDING_MESSAGES_FLUSH_TIMEOUT_MS = 600000L;
private static final long PENDING_MESSAGES_FLUSH_INTERVAL_MS = 1000L;
@@ -79,7 +80,8 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
private final Map stormConf;
private final StormBoundedExponentialBackoffRetry retryPolicy;
- private final ClientBootstrap bootstrap;
+ private final EventLoopGroup eventLoopGroup;
+ private final Bootstrap bootstrap;
private final InetSocketAddress dstAddress;
protected final String dstAddressPrefixedName;
private volatile Map serverLoad = null;
@@ -136,15 +138,19 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
private final MessageBuffer batcher;
+ private final AtomicBoolean inFlush = new AtomicBoolean(false);
+
private final Object writeLock = new Object();
@SuppressWarnings("rawtypes")
- Client(Map stormConf, ChannelFactory factory, HashedWheelTimer scheduler, String host, int port, Context context) {
+ Client(Map stormConf, EventLoopGroup eventLoopGroup, HashedWheelTimer scheduler, String host, int port, Context context) {
this.stormConf = stormConf;
closing = false;
this.scheduler = scheduler;
this.context = context;
int bufferSize = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_BUFFER_SIZE));
+ int write_buffer_high_water_mark = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_WRITE_BUFFER_HIGH_WATER_MARK), 5242880);
+ int write_buffer_low_water_mark = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_WRITE_BUFFER_LOW_WATER_MARK), 2097152);
// if SASL authentication is disabled, saslChannelReady is initialized as true; otherwise false
saslChannelReady.set(!Utils.getBoolean(stormConf.get(Config.STORM_MESSAGING_NETTY_AUTHENTICATION), false));
LOG.info("creating Netty Client, connecting to {}:{}, bufferSize: {}", host, port, bufferSize);
@@ -155,8 +161,18 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
int maxWaitMs = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_MAX_SLEEP_MS));
retryPolicy = new StormBoundedExponentialBackoffRetry(minWaitMs, maxWaitMs, maxReconnectionAttempts);
+ this.eventLoopGroup = eventLoopGroup;
// Initiate connection to remote destination
- bootstrap = createClientBootstrap(factory, bufferSize, stormConf);
+ bootstrap = new Bootstrap()
+ .group(this.eventLoopGroup)
+ .channel(NioSocketChannel.class)
+ .option(ChannelOption.TCP_NODELAY, true)
+ .option(ChannelOption.SO_SNDBUF, bufferSize)
+ .option(ChannelOption.SO_KEEPALIVE, true)
+ .option(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, write_buffer_high_water_mark)
+ .option(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, write_buffer_low_water_mark)
+ .option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
+ .handler(new StormClientPipelineFactory(this, stormConf));
dstAddress = new InetSocketAddress(host, port);
dstAddressPrefixedName = prefixedName(dstAddress);
launchChannelAliveThread();
@@ -189,15 +205,6 @@ public void run() {
}, 0, CHANNEL_ALIVE_INTERVAL_MS);
}
- private ClientBootstrap createClientBootstrap(ChannelFactory factory, int bufferSize, Map stormConf) {
- ClientBootstrap bootstrap = new ClientBootstrap(factory);
- bootstrap.setOption("tcpNoDelay", true);
- bootstrap.setOption("sendBufferSize", bufferSize);
- bootstrap.setOption("keepAlive", true);
- bootstrap.setPipelineFactory(new StormClientPipelineFactory(this, stormConf));
- return bootstrap;
- }
-
private String prefixedName(InetSocketAddress dstAddress) {
if (null != dstAddress) {
return PREFIX + dstAddress.toString();
@@ -217,14 +224,10 @@ private boolean reconnectingAllowed() {
}
private boolean connectionEstablished(Channel channel) {
- // Because we are using TCP (which is a connection-oriented transport unlike UDP), a connection is only fully
- // established iff the channel is connected. That is, a TCP-based channel must be in the CONNECTED state before
- // anything can be read or written to the channel.
- //
+ // since netty 4.x, the state model has been simplified
// See:
- // - http://netty.io/3.9/api/org/jboss/netty/channel/ChannelEvent.html
- // - http://stackoverflow.com/questions/13356622/what-are-the-netty-channel-state-transitions
- return channel != null && channel.isConnected();
+ // - http://netty.io/wiki/new-and-noteworthy-in-4.0.html#wiki-h4-19
+ return channel != null && channel.isActive();
}
/**
@@ -308,7 +311,8 @@ public void send(Iterator msgs) {
}
}
- if(channel.isWritable()){
+ // as channel.writeAndFlush is expensive, we don't want to call it every time
+ if(channel.isWritable() && !inFlush.get()){
synchronized (writeLock) {
// Netty's internal buffer is not full and we still have message left in the buffer.
// We should write the unfilled MessageBatch immediately to reduce latency
@@ -320,7 +324,7 @@ public void send(Iterator msgs) {
} else {
// Channel's buffer is full, meaning that we have time to wait other messages to arrive, and create a bigger
// batch. This yields better throughput.
- // We can rely on `notifyInterestChanged` to push these messages as soon as there is spece in Netty's buffer
+ // We can rely on `notifyChannelFlushabilityChanged` to push these messages as soon as there is space in Netty's buffer
// because we know `Channel.isWritable` was false after the messages were already in the buffer.
}
}
@@ -370,7 +374,7 @@ private int iteratorSize(Iterator msgs) {
*
* If the write operation fails, then we will close the channel and trigger a reconnect.
*/
- private void flushMessages(Channel channel, final MessageBatch batch) {
+ private void flushMessages(final Channel channel, final MessageBatch batch) {
if (null == batch || batch.isEmpty()) {
return;
}
@@ -378,23 +382,35 @@ private void flushMessages(Channel channel, final MessageBatch batch) {
final int numMessages = batch.size();
LOG.debug("writing {} messages to channel {}", batch.size(), channel.toString());
pendingMessages.addAndGet(numMessages);
+ inFlush.set(true);
+ // call channel.writeAndFlush in the same eventloop suppose to be more efficient
+ eventLoopGroup.execute(new Runnable() {
+ @Override
+ public void run() {
- ChannelFuture future = channel.write(batch);
- future.addListener(new ChannelFutureListener() {
- public void operationComplete(ChannelFuture future) throws Exception {
- pendingMessages.addAndGet(0 - numMessages);
- if (future.isSuccess()) {
- LOG.debug("sent {} messages to {}", numMessages, dstAddressPrefixedName);
- messagesSent.getAndAdd(batch.size());
- } else {
- LOG.error("failed to send {} messages to {}: {}", numMessages, dstAddressPrefixedName,
- future.getCause());
- closeChannelAndReconnect(future.getChannel());
- messagesLost.getAndAdd(numMessages);
- }
- }
+ ChannelFuture future = channel.writeAndFlush(batch);
+ inFlush.set(false);
+ future.addListener(new ChannelFutureListener() {
+ public void operationComplete(ChannelFuture future) throws Exception {
+ pendingMessages.addAndGet(0 - numMessages);
+ if (future.isSuccess()) {
+ LOG.debug("sent {} messages to {}", numMessages, dstAddressPrefixedName);
+ messagesSent.getAndAdd(batch.size());
+ } else {
+ LOG.error("failed to send {} messages to {}: {}", numMessages, dstAddressPrefixedName,
+ future.cause());
+ closeChannelAndReconnect(future.channel());
+ messagesLost.getAndAdd(numMessages);
+ }
+ }
+ });
+
+ // need to call it so we can flush
+ notifyChannelFlushabilityChanged(channel);
+ }
});
+
}
/**
@@ -499,10 +515,6 @@ public Map getConfig() {
}
/** ISaslClient interface **/
- public void channelConnected(Channel channel) {
-// setChannel(channel);
- }
-
public void channelReady() {
saslChannelReady.set(true);
}
@@ -520,7 +532,7 @@ private String srcAddressName() {
String name = null;
Channel channel = channelRef.get();
if (channel != null) {
- SocketAddress address = channel.getLocalAddress();
+ SocketAddress address = channel.localAddress();
if (address != null) {
name = address.toString();
}
@@ -534,13 +546,13 @@ public String toString() {
}
/**
- * Called by Netty thread on change in channel interest
- * @param channel
+ * Called by Netty thread on change in channel writability or after channel.writeAndFlush call is done
+ * @param channel channel
*/
- public void notifyInterestChanged(Channel channel) {
+ public void notifyChannelFlushabilityChanged(Channel channel) {
if(channel.isWritable()){
synchronized (writeLock) {
- // Channel is writable again, write if there are any messages pending
+ // Channel is flushable again, write if there are any messages pending
MessageBatch pending = batcher.drain();
flushMessages(channel, pending);
}
@@ -569,7 +581,6 @@ private void reschedule(Throwable t) {
scheduleConnect(nextDelayMs);
}
-
@Override
public void run(Timeout timeout) throws Exception {
if (reconnectingAllowed()) {
@@ -582,7 +593,7 @@ public void run(Timeout timeout) throws Exception {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
// This call returns immediately
- Channel newChannel = future.getChannel();
+ Channel newChannel = future.channel();
if (future.isSuccess() && connectionEstablished(newChannel)) {
boolean setChannel = channelRef.compareAndSet(null, newChannel);
@@ -593,7 +604,7 @@ public void operationComplete(ChannelFuture future) throws Exception {
LOG.warn("Re-connection to {} was successful but {} messages has been lost so far", address.toString(), messagesLost.get());
}
} else {
- Throwable cause = future.getCause();
+ Throwable cause = future.cause();
reschedule(cause);
if (newChannel != null) {
newChannel.close();
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/Context.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/Context.java
index 3b65eb50a2f..8c664ca9371 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/Context.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/Context.java
@@ -17,24 +17,25 @@
*/
package org.apache.storm.messaging.netty;
-import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
-import org.jboss.netty.util.HashedWheelTimer;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ThreadFactory;
-import java.util.HashMap;
-import java.util.Map;
-
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.util.HashedWheelTimer;
import org.apache.storm.Config;
import org.apache.storm.messaging.IConnection;
import org.apache.storm.messaging.IContext;
import org.apache.storm.utils.Utils;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ThreadFactory;
+
public class Context implements IContext {
@SuppressWarnings("rawtypes")
private Map storm_conf;
private Map connections;
- private NioClientSocketChannelFactory clientChannelFactory;
-
+
+ private EventLoopGroup workerEventLoopGroup;
+
private HashedWheelTimer clientScheduleService;
/**
@@ -45,18 +46,17 @@ public void prepare(Map storm_conf) {
this.storm_conf = storm_conf;
connections = new HashMap<>();
- //each context will have a single client channel factory
+ //each context will have a single client channel workerEventLoopGroup
int maxWorkers = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS));
- ThreadFactory bossFactory = new NettyRenameThreadFactory("client" + "-boss");
ThreadFactory workerFactory = new NettyRenameThreadFactory("client" + "-worker");
+
if (maxWorkers > 0) {
- clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
- Executors.newCachedThreadPool(workerFactory), maxWorkers);
+ workerEventLoopGroup = new NioEventLoopGroup(maxWorkers, workerFactory);
} else {
- clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
- Executors.newCachedThreadPool(workerFactory));
+ // 0 means DEFAULT_EVENT_LOOP_THREADS
+ workerEventLoopGroup = new NioEventLoopGroup(0, workerFactory);
}
-
+
clientScheduleService = new HashedWheelTimer(new NettyRenameThreadFactory("client-schedule-service"));
}
@@ -78,7 +78,7 @@ public synchronized IConnection connect(String storm_id, String host, int port)
{
return connection;
}
- IConnection client = new Client(storm_conf, clientChannelFactory,
+ IConnection client = new Client(storm_conf, workerEventLoopGroup,
clientScheduleService, host, port, this);
connections.put(key(host, port), client);
return client;
@@ -102,9 +102,8 @@ public synchronized void term() {
connections = null;
- //we need to release resources associated with client channel factory
- clientChannelFactory.releaseExternalResources();
-
+ //we need to release resources associated
+ workerEventLoopGroup.shutdownGracefully();
}
private String key(String host, int port) {
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/ControlMessage.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/ControlMessage.java
index 3c7aaba613a..fcc4c979bda 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/ControlMessage.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/ControlMessage.java
@@ -17,11 +17,11 @@
*/
package org.apache.storm.messaging.netty;
-import java.io.IOException;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.buffer.ByteBufOutputStream;
-import org.jboss.netty.buffer.ChannelBuffer;
-import org.jboss.netty.buffer.ChannelBufferOutputStream;
-import org.jboss.netty.buffer.ChannelBuffers;
+import java.io.IOException;
public enum ControlMessage implements INettySerializable {
CLOSE_MESSAGE((short)-100),
@@ -55,21 +55,29 @@ public int encodeLength() {
/**
* encode the current Control Message into a channel buffer
- * @throws IOException
+ * @throws Exception if failed to write to buffer
*/
- public ChannelBuffer buffer() throws IOException {
- ChannelBufferOutputStream bout = new ChannelBufferOutputStream(ChannelBuffers.directBuffer(encodeLength()));
+ public ByteBuf buffer() throws IOException {
+ ByteBufOutputStream bout = new ByteBufOutputStream(ByteBufAllocator.DEFAULT.ioBuffer(encodeLength()));
write(bout);
bout.close();
return bout.buffer();
}
+ void write(ByteBufOutputStream bout) throws IOException {
+ bout.writeShort(code);
+ }
+
public static ControlMessage read(byte[] serial) {
- ChannelBuffer cm_buffer = ChannelBuffers.copiedBuffer(serial);
- return mkMessage(cm_buffer.getShort(0));
+ ByteBuf cm_buffer = ByteBufAllocator.DEFAULT.buffer(serial.length);
+ try {
+ cm_buffer.writeBytes(serial);
+ short messageCode = cm_buffer.getShort(0);
+ return mkMessage(messageCode);
+ } finally {
+ if (cm_buffer != null) {
+ cm_buffer.release();
+ }
+ }
}
-
- public void write(ChannelBufferOutputStream bout) throws IOException {
- bout.writeShort(code);
- }
}
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/INettySerializable.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/INettySerializable.java
index 0a0236facad..c891779b932 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/INettySerializable.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/INettySerializable.java
@@ -17,10 +17,11 @@
*/
package org.apache.storm.messaging.netty;
+import io.netty.buffer.ByteBuf;
+
import java.io.IOException;
-import org.jboss.netty.buffer.ChannelBuffer;
public interface INettySerializable {
- ChannelBuffer buffer() throws IOException;
+ ByteBuf buffer() throws IOException;
int encodeLength();
}
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslClient.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslClient.java
index 681c1990324..263ccbfeb7b 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslClient.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslClient.java
@@ -17,12 +17,9 @@
*/
package org.apache.storm.messaging.netty;
-import org.jboss.netty.channel.Channel;
-import org.apache.storm.Config;
-
public interface ISaslClient {
- void channelConnected(Channel channel);
+
void channelReady();
String name();
String secretKey();
-}
+}
\ No newline at end of file
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslServer.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslServer.java
index 997dbebaac7..b72b393574c 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslServer.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/ISaslServer.java
@@ -17,7 +17,7 @@
*/
package org.apache.storm.messaging.netty;
-import org.jboss.netty.channel.Channel;
+import io.netty.channel.Channel;
public interface ISaslServer extends IServer {
String name();
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/IServer.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/IServer.java
index b04d7154ec9..6a184d30be7 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/IServer.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/IServer.java
@@ -17,7 +17,7 @@
*/
package org.apache.storm.messaging.netty;
-import org.jboss.netty.channel.Channel;
+import io.netty.channel.Channel;
public interface IServer {
void channelConnected(Channel c);
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslClientHandler.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslClientHandler.java
index 9f4632978df..9119185a48b 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslClientHandler.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslClientHandler.java
@@ -17,18 +17,16 @@
*/
package org.apache.storm.messaging.netty;
-import java.io.IOException;
-import java.util.Map;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelHandlerContext;
-import org.jboss.netty.channel.ChannelStateEvent;
-import org.jboss.netty.channel.Channels;
-import org.jboss.netty.channel.MessageEvent;
-import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
+import java.io.IOException;
+import java.util.Map;
+
+public class KerberosSaslClientHandler extends ChannelInboundHandlerAdapter {
private static final Logger LOG = LoggerFactory
.getLogger(KerberosSaslClientHandler.class);
@@ -46,56 +44,48 @@ public KerberosSaslClientHandler(ISaslClient client, Map storm_conf, String jaas
}
@Override
- public void channelConnected(ChannelHandlerContext ctx,
- ChannelStateEvent event) {
+ public void channelActive(ChannelHandlerContext ctx) throws Exception {
// register the newly established channel
- Channel channel = ctx.getChannel();
- client.channelConnected(channel);
+ Channel channel = ctx.channel();
+ client.channelReady();
LOG.info("Connection established from {} to {}",
- channel.getLocalAddress(), channel.getRemoteAddress());
+ channel.localAddress(), channel.remoteAddress());
try {
- KerberosSaslNettyClient saslNettyClient = KerberosSaslNettyClientState.getKerberosSaslNettyClient
- .get(channel);
+ KerberosSaslNettyClient saslNettyClient = channel.attr(KerberosSaslNettyClientState.KERBEROS_SASL_NETTY_CLIENT).get();
if (saslNettyClient == null) {
LOG.debug("Creating saslNettyClient now for channel: {}",
channel);
saslNettyClient = new KerberosSaslNettyClient(storm_conf, jaas_section);
- KerberosSaslNettyClientState.getKerberosSaslNettyClient.set(channel,
- saslNettyClient);
+ channel.attr(KerberosSaslNettyClientState.KERBEROS_SASL_NETTY_CLIENT).set(saslNettyClient);
}
LOG.debug("Going to initiate Kerberos negotiations.");
byte[] initialChallenge = saslNettyClient.saslResponse(new SaslMessageToken(new byte[0]));
LOG.debug("Sending initial challenge: {}", initialChallenge);
- channel.write(new SaslMessageToken(initialChallenge));
+ channel.writeAndFlush(new SaslMessageToken(initialChallenge));
} catch (Exception e) {
LOG.error("Failed to authenticate with server due to error: ",
e);
}
- return;
-
}
@Override
- public void messageReceived(ChannelHandlerContext ctx, MessageEvent event)
- throws Exception {
- LOG.debug("send/recv time (ms): {}",
- (System.currentTimeMillis() - start_time));
+ public void channelRead(ChannelHandlerContext ctx, Object rawMsg) throws Exception {
+ LOG.debug("send/recv time (ms): {}", (System.currentTimeMillis() - start_time));
- Channel channel = ctx.getChannel();
+ Channel channel = ctx.channel();
// Generate SASL response to server using Channel-local SASL client.
- KerberosSaslNettyClient saslNettyClient = KerberosSaslNettyClientState.getKerberosSaslNettyClient
- .get(channel);
+ KerberosSaslNettyClient saslNettyClient = channel.attr(KerberosSaslNettyClientState.KERBEROS_SASL_NETTY_CLIENT).get();
if (saslNettyClient == null) {
throw new Exception("saslNettyClient was unexpectedly null for channel:" + channel);
}
// examine the response message from server
- if (event.getMessage() instanceof ControlMessage) {
- ControlMessage msg = (ControlMessage) event.getMessage();
+ if (rawMsg instanceof ControlMessage) {
+ ControlMessage msg = (ControlMessage) rawMsg;
if (msg == ControlMessage.SASL_COMPLETE_REQUEST) {
LOG.debug("Server has sent us the SaslComplete message. Allowing normal work to proceed.");
@@ -104,21 +94,19 @@ public void messageReceived(ChannelHandlerContext ctx, MessageEvent event)
LOG.error(message);
throw new Exception(message);
}
- ctx.getPipeline().remove(this);
+ ctx.pipeline().remove(this);
this.client.channelReady();
- // We call fireMessageReceived since the client is allowed to
+ // We call writeAndFlush since the client is allowed to
// perform this request. The client's request will now proceed
// to the next pipeline component namely StormClientHandler.
- Channels.fireMessageReceived(ctx, msg);
+ channel.writeAndFlush(msg);
} else {
LOG.warn("Unexpected control message: {}", msg);
}
- return;
}
- else if (event.getMessage() instanceof SaslMessageToken) {
- SaslMessageToken saslTokenMessage = (SaslMessageToken) event
- .getMessage();
+ else if (rawMsg instanceof SaslMessageToken) {
+ SaslMessageToken saslTokenMessage = (SaslMessageToken) rawMsg;
LOG.debug("Responding to server's token of length: {}",
saslTokenMessage.getSaslToken().length);
@@ -144,9 +132,9 @@ else if (event.getMessage() instanceof SaslMessageToken) {
// Construct a message containing the SASL response and send it to the
// server.
SaslMessageToken saslResponse = new SaslMessageToken(responseToServer);
- channel.write(saslResponse);
+ channel.writeAndFlush(saslResponse);
} else {
- LOG.error("Unexpected message from server: {}", event.getMessage());
+ LOG.error("Unexpected message from server: {}", rawMsg);
}
}
}
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClient.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClient.java
index d53bb7c1c45..fb68feb8dd8 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClient.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClient.java
@@ -19,12 +19,9 @@
import org.apache.storm.Config;
import org.apache.storm.security.auth.AuthUtils;
-import java.io.IOException;
-import java.security.Principal;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import java.util.Map;
-import java.util.TreeMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
@@ -35,9 +32,12 @@
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;
-import org.apache.zookeeper.server.auth.KerberosName;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Map;
+import java.util.TreeMap;
/**
* Implements SASL logic for storm worker client processes.
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClientState.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClientState.java
index dc76b0d9424..26ad4e6125a 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClientState.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyClientState.java
@@ -17,15 +17,9 @@
*/
package org.apache.storm.messaging.netty;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelLocal;
+import io.netty.util.AttributeKey;
final class KerberosSaslNettyClientState {
- public static final ChannelLocal getKerberosSaslNettyClient = new ChannelLocal() {
- protected KerberosSaslNettyClient initialValue(Channel channel) {
- return null;
- }
- };
-
+ public static final AttributeKey KERBEROS_SASL_NETTY_CLIENT = AttributeKey.valueOf("kerberos.sasl.netty.client");
}
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServer.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServer.java
index 72486ef597e..dcc90df79e1 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServer.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServer.java
@@ -19,14 +19,10 @@
import org.apache.storm.security.auth.AuthUtils;
import org.apache.storm.security.auth.KerberosPrincipalToLocal;
-import java.io.IOException;
-import java.security.Principal;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
+import org.apache.zookeeper.server.auth.KerberosName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
@@ -39,9 +35,13 @@
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
-import org.apache.zookeeper.server.auth.KerberosName;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
class KerberosSaslNettyServer {
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServerState.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServerState.java
index 2ee2bf4204a..ec1cd8ccc68 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServerState.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslNettyServerState.java
@@ -17,14 +17,9 @@
*/
package org.apache.storm.messaging.netty;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelLocal;
+import io.netty.util.AttributeKey;
final class KerberosSaslNettyServerState {
- public static final ChannelLocal getKerberosSaslNettyServer = new ChannelLocal() {
- protected KerberosSaslNettyServer initialValue(Channel channel) {
- return null;
- }
- };
+ public static final AttributeKey KERBOROS_SASL_NETTY_SERVER = AttributeKey.valueOf("kerboros.sasl.netty.server");
}
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslServerHandler.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslServerHandler.java
index 14ac1724f96..5fc388d7fab 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslServerHandler.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/KerberosSaslServerHandler.java
@@ -17,19 +17,17 @@
*/
package org.apache.storm.messaging.netty;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.IOException;
import java.util.List;
import java.util.Map;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelHandlerContext;
-import org.jboss.netty.channel.Channels;
-import org.jboss.netty.channel.ExceptionEvent;
-import org.jboss.netty.channel.MessageEvent;
-import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
+public class KerberosSaslServerHandler extends ChannelInboundHandlerAdapter {
ISaslServer server;
/** Used for client or server's token to send or receive from each other. */
@@ -48,14 +46,12 @@ public KerberosSaslServerHandler(ISaslServer server, Map storm_conf, String jaas
}
@Override
- public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
- throws Exception {
- Object msg = e.getMessage();
+ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg == null) {
return;
}
- Channel channel = ctx.getChannel();
+ Channel channel = ctx.channel();
if (msg instanceof SaslMessageToken) {
@@ -65,22 +61,20 @@ public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
try {
LOG.debug("Got SaslMessageToken!");
- KerberosSaslNettyServer saslNettyServer = KerberosSaslNettyServerState.getKerberosSaslNettyServer
- .get(channel);
+ KerberosSaslNettyServer saslNettyServer = channel.attr(KerberosSaslNettyServerState.KERBOROS_SASL_NETTY_SERVER).get();
if (saslNettyServer == null) {
LOG.debug("No saslNettyServer for {} yet; creating now, with topology token: ", channel);
try {
saslNettyServer = new KerberosSaslNettyServer(storm_conf, jaas_section, authorizedUsers);
- KerberosSaslNettyServerState.getKerberosSaslNettyServer.set(channel,
- saslNettyServer);
+ channel.attr(KerberosSaslNettyServerState.KERBOROS_SASL_NETTY_SERVER).set(saslNettyServer);
} catch (RuntimeException ioe) {
LOG.error("Error occurred while creating saslNettyServer on server {} for client {}",
- channel.getLocalAddress(), channel.getRemoteAddress());
+ channel.localAddress(), channel.remoteAddress());
throw ioe;
}
} else {
LOG.debug("Found existing saslNettyServer on server: {} for client {}",
- channel.getLocalAddress(), channel.getRemoteAddress());
+ channel.localAddress(), channel.remoteAddress());
}
byte[] responseBytes = saslNettyServer.response(((SaslMessageToken) msg)
@@ -89,10 +83,10 @@ public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
SaslMessageToken saslTokenMessageRequest = new SaslMessageToken(responseBytes);
if(saslTokenMessageRequest.getSaslToken() == null) {
- channel.write(ControlMessage.SASL_COMPLETE_REQUEST);
+ channel.writeAndFlush(ControlMessage.SASL_COMPLETE_REQUEST);
} else {
// Send response to client.
- channel.write(saslTokenMessageRequest);
+ channel.writeAndFlush(saslTokenMessageRequest);
}
if (saslNettyServer.isComplete()) {
@@ -100,12 +94,11 @@ public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
// SASL-Complete message to the client.
LOG.info("SASL authentication is complete for client with username: {}",
saslNettyServer.getUserName());
- channel.write(ControlMessage.SASL_COMPLETE_REQUEST);
+ channel.writeAndFlush(ControlMessage.SASL_COMPLETE_REQUEST);
LOG.debug("Removing SaslServerHandler from pipeline since SASL authentication is complete.");
- ctx.getPipeline().remove(this);
+ ctx.pipeline().remove(this);
server.authenticated(channel);
}
- return;
}
catch (Exception ex) {
LOG.error("Failed to handle SaslMessageToken: ", ex);
@@ -117,16 +110,15 @@ public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
// non-SASL requests will be denied by the Authorize channel handler
// (the next handler upstream in the server pipeline) if SASL
// authentication has not completed.
- LOG.warn("Sending upstream an unexpected non-SASL message : {}",
- msg);
- Channels.fireMessageReceived(ctx, msg);
+ LOG.warn("Sending upstream an unexpected non-SASL message : {}", msg);
+ channel.writeAndFlush(msg);
}
}
@Override
- public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
+ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if(server != null) {
- server.closeChannel(e.getChannel());
+ server.closeChannel(ctx.channel());
}
}
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageBatch.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageBatch.java
index 1c5dd9d2e64..521e7d2d5d2 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageBatch.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageBatch.java
@@ -17,10 +17,10 @@
*/
package org.apache.storm.messaging.netty;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.buffer.ByteBufOutputStream;
import org.apache.storm.messaging.TaskMessage;
-import org.jboss.netty.buffer.ChannelBuffer;
-import org.jboss.netty.buffer.ChannelBufferOutputStream;
-import org.jboss.netty.buffer.ChannelBuffers;
import java.util.ArrayList;
@@ -77,8 +77,8 @@ int size() {
/**
* create a buffer containing the encoding of this batch
*/
- ChannelBuffer buffer() throws Exception {
- ChannelBufferOutputStream bout = new ChannelBufferOutputStream(ChannelBuffers.directBuffer(encoded_length));
+ ByteBuf buffer() throws Exception {
+ ByteBufOutputStream bout = new ByteBufOutputStream(ByteBufAllocator.DEFAULT.ioBuffer(encoded_length));
for (TaskMessage msg : msgs) {
writeTaskMessage(bout, msg);
@@ -98,9 +98,9 @@ ChannelBuffer buffer() throws Exception {
* Each TaskMessage is encoded as:
* task ... short(2)
* len ... int(4)
- * payload ... byte[] *
+ * payload ... byte[]
*/
- private void writeTaskMessage(ChannelBufferOutputStream bout, TaskMessage message) throws Exception {
+ private void writeTaskMessage(ByteBufOutputStream bout, TaskMessage message) throws Exception {
int payload_len = 0;
if (message.message() != null)
payload_len = message.message().length;
diff --git a/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageDecoder.java b/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageDecoder.java
index 9030424fe6a..34acdfa6122 100644
--- a/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageDecoder.java
+++ b/storm-core/src/jvm/org/apache/storm/messaging/netty/MessageDecoder.java
@@ -17,16 +17,29 @@
*/
package org.apache.storm.messaging.netty;
+import io.netty.buffer.ByteBuf;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.ByteToMessageDecoder;
+import org.apache.storm.Config;
+import org.apache.storm.messaging.TaskMessage;
+import org.apache.storm.utils.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
-import org.apache.storm.messaging.TaskMessage;
-import org.jboss.netty.buffer.ChannelBuffer;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelHandlerContext;
-import org.jboss.netty.handler.codec.frame.FrameDecoder;
+public class MessageDecoder extends ByteToMessageDecoder {
+
+ private static final Logger LOG = LoggerFactory.getLogger(MessageDecoder.class);
+
+ private final int maxBatchSize;
+
+ public MessageDecoder(Map storm_conf) {
+ maxBatchSize = Utils.getInt(storm_conf.get(Config.STORM_NETTY_MESSAGE_DECODE_BATCH_SIZE), -1);
+ }
-public class MessageDecoder extends FrameDecoder {
/*
* Each ControlMessage is encoded as:
* code (<0) ... short(2)
@@ -35,16 +48,16 @@ public class MessageDecoder extends FrameDecoder {
* len ... int(4)
* payload ... byte[] *
*/
- protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buf) throws Exception {
- // Make sure that we have received at least a short
+ @Override
+ protected void decode(ChannelHandlerContext ctx, ByteBuf buf, List