diff --git a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala index e6bdef04512d..64b6ef6c53b6 100644 --- a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala +++ b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala @@ -23,7 +23,6 @@ import org.apache.kafka.clients.consumer._ import org.apache.kafka.common.TopicPartition import org.apache.spark.SparkContext -import org.apache.spark.annotation.Experimental import org.apache.spark.api.java.{ JavaRDD, JavaSparkContext } import org.apache.spark.internal.Logging import org.apache.spark.rdd.RDD @@ -32,13 +31,10 @@ import org.apache.spark.streaming.api.java.{ JavaInputDStream, JavaStreamingCont import org.apache.spark.streaming.dstream._ /** - * :: Experimental :: * object for constructing Kafka streams and RDDs */ -@Experimental object KafkaUtils extends Logging { /** - * :: Experimental :: * Scala constructor for a batch-oriented interface for consuming from Kafka. * Starting and ending offsets are specified in advance, * so that you can control exactly-once semantics. @@ -52,7 +48,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createRDD[K, V]( sc: SparkContext, kafkaParams: ju.Map[String, Object], @@ -75,7 +70,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Java constructor for a batch-oriented interface for consuming from Kafka. * Starting and ending offsets are specified in advance, * so that you can control exactly-once semantics. @@ -89,7 +83,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createRDD[K, V]( jsc: JavaSparkContext, kafkaParams: ju.Map[String, Object], @@ -101,7 +94,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Scala constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * The spark configuration spark.streaming.kafka.maxRatePerPartition gives the maximum number @@ -114,7 +106,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( ssc: StreamingContext, locationStrategy: LocationStrategy, @@ -125,7 +116,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Scala constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]], @@ -137,7 +127,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( ssc: StreamingContext, locationStrategy: LocationStrategy, @@ -148,7 +137,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Java constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]], @@ -158,7 +146,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( jssc: JavaStreamingContext, locationStrategy: LocationStrategy, @@ -170,7 +157,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Java constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]], @@ -182,7 +168,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( jssc: JavaStreamingContext, locationStrategy: LocationStrategy,