com.example.protos.ExampleEvent
.
- * The jar with these classes needs to be shaded as described above.
- * @since 3.4.0
- */
- @Experimental
- def from_protobuf(data: Column, messageClassName: String): Column = {
- Column.fn(
- "from_protobuf",
- data,
- lit(messageClassName)
- )
- }
-
- /**
- * Converts a binary column of Protobuf format into its corresponding catalyst value.
- * `messageClassName` points to Protobuf Java class. The jar containing Java class should be
- * shaded. Specifically, `com.google.protobuf.*` should be shaded to
- * `org.sparkproject.spark_protobuf.protobuf.*`.
- * https://github.com/rangadi/shaded-protobuf-classes is useful to create shaded jar from
- * Protobuf files.
- *
- * @param data
- * the binary column.
- * @param messageClassName
- * The full name for Protobuf Java class. E.g. com.example.protos.ExampleEvent
.
- * The jar with these classes needs to be shaded as described above.
- * @param options
- * @since 3.4.0
- */
- @Experimental
- def from_protobuf(
- data: Column,
- messageClassName: String,
- options: java.util.Map[String, String]): Column = {
- Column.fnWithOptions(
- "from_protobuf",
- options.asScala.iterator,
- data,
- lit(messageClassName)
- )
- }
-
- /**
- * Converts a column into binary of protobuf format. The Protobuf definition is provided
- * through Protobuf descriptor file.
- *
- * @param data
- * the data column.
- * @param messageName
- * the protobuf MessageName to look for in descriptor file.
- * @param descFilePath
- * The Protobuf descriptor file. This file is usually created using `protoc` with
- * `--descriptor_set_out` and `--include_imports` options.
- * @since 3.4.0
- */
- @Experimental
- def to_protobuf(data: Column, messageName: String, descFilePath: String): Column = {
- to_protobuf(data, messageName, descFilePath, Map.empty[String, String].asJava)
- }
-
- /**
- * Converts a column into binary of protobuf format.The Protobuf definition is provided
- * through Protobuf `FileDescriptorSet`.
- *
- * @param data
- * the binary column.
- * @param messageName
- * the protobuf MessageName to look for in the descriptor set.
- * @param binaryFileDescriptorSet
- * Serialized Protobuf descriptor (`FileDescriptorSet`). Typically contents of file created
- * using `protoc` with `--descriptor_set_out` and `--include_imports` options.
- *
- * @since 3.5.0
- */
- @Experimental
- def to_protobuf(data: Column, messageName: String, binaryFileDescriptorSet: Array[Byte])
- : Column = {
- Column.fn(
- "to_protobuf",
- data,
- lit(messageName),
- lit(binaryFileDescriptorSet)
- )
- }
- /**
- * Converts a column into binary of protobuf format. The Protobuf definition is provided
- * through Protobuf descriptor file.
- *
- * @param data
- * the data column.
- * @param messageName
- * the protobuf MessageName to look for in descriptor file.
- * @param descFilePath
- * the protobuf descriptor file.
- * @param options
- * @since 3.4.0
- */
- @Experimental
- def to_protobuf(
- data: Column,
- messageName: String,
- descFilePath: String,
- options: java.util.Map[String, String]): Column = {
- val fileContent = ProtobufUtils.readDescriptorFileContent(descFilePath)
- to_protobuf(data, messageName, fileContent, options)
- }
-
- /**
- * Converts a column into binary of protobuf format.The Protobuf definition is provided
- * through Protobuf `FileDescriptorSet`.
- *
- * @param data
- * the binary column.
- * @param messageName
- * the protobuf MessageName to look for in the descriptor set.
- * @param binaryFileDescriptorSet
- * Serialized Protobuf descriptor (`FileDescriptorSet`). Typically contents of file created
- * using `protoc` with `--descriptor_set_out` and `--include_imports` options.
- * @param options
- * @since 3.5.0
- */
- @Experimental
- def to_protobuf(
- data: Column,
- messageName: String,
- binaryFileDescriptorSet: Array[Byte],
- options: java.util.Map[String, String]
- ): Column = {
- Column.fnWithOptions(
- "to_protobuf",
- options.asScala.iterator,
- data,
- lit(messageName),
- lit(binaryFileDescriptorSet)
- )
- }
-
- /**
- * Converts a column into binary of protobuf format.
- * `messageClassName` points to Protobuf Java class. The jar containing Java class should be
- * shaded. Specifically, `com.google.protobuf.*` should be shaded to
- * `org.sparkproject.spark_protobuf.protobuf.*`.
- * https://github.com/rangadi/shaded-protobuf-classes is useful to create shaded jar from
- * Protobuf files.
- *
- * @param data
- * the data column.
- * @param messageClassName
- * The full name for Protobuf Java class. E.g. com.example.protos.ExampleEvent
.
- * The jar with these classes needs to be shaded as described above.
- * @since 3.4.0
- */
- @Experimental
- def to_protobuf(data: Column, messageClassName: String): Column = {
- Column.fn(
- "to_protobuf",
- data,
- lit(messageClassName)
- )
- }
-
- /**
- * Converts a column into binary of protobuf format.
- * `messageClassName` points to Protobuf Java class. The jar containing Java class should be
- * shaded. Specifically, `com.google.protobuf.*` should be shaded to
- * `org.sparkproject.spark_protobuf.protobuf.*`.
- * https://github.com/rangadi/shaded-protobuf-classes is useful to create shaded jar from
- * Protobuf files.
- *
- * @param data
- * the data column.
- * @param messageClassName
- * The full name for Protobuf Java class. E.g. com.example.protos.ExampleEvent
.
- * The jar with these classes needs to be shaded as described above.
- * @param options
- * @since 3.4.0
- */
- @Experimental
- def to_protobuf(data: Column, messageClassName: String, options: java.util.Map[String, String])
- : Column = {
- Column.fnWithOptions(
- "to_protobuf",
- options.asScala.iterator,
- data,
- lit(messageClassName)
- )
- }
-}
diff --git a/core/benchmarks/ZStandardBenchmark-jdk21-results.txt b/core/benchmarks/ZStandardBenchmark-jdk21-results.txt
index b3bffea826e5f..f6bd681451d5e 100644
--- a/core/benchmarks/ZStandardBenchmark-jdk21-results.txt
+++ b/core/benchmarks/ZStandardBenchmark-jdk21-results.txt
@@ -2,48 +2,48 @@
Benchmark ZStandardCompressionCodec
================================================================================================
-OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Benchmark ZStandardCompressionCodec: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
--------------------------------------------------------------------------------------------------------------------------------------
-Compression 10000 times at level 1 without buffer pool 657 670 14 0.0 65699.2 1.0X
-Compression 10000 times at level 2 without buffer pool 697 697 1 0.0 69673.4 0.9X
-Compression 10000 times at level 3 without buffer pool 799 802 3 0.0 79855.2 0.8X
-Compression 10000 times at level 1 with buffer pool 593 595 1 0.0 59326.9 1.1X
-Compression 10000 times at level 2 with buffer pool 622 624 3 0.0 62194.1 1.1X
-Compression 10000 times at level 3 with buffer pool 732 733 1 0.0 73178.6 0.9X
+Compression 10000 times at level 1 without buffer pool 659 676 16 0.0 65860.7 1.0X
+Compression 10000 times at level 2 without buffer pool 721 723 2 0.0 72135.5 0.9X
+Compression 10000 times at level 3 without buffer pool 815 816 1 0.0 81500.6 0.8X
+Compression 10000 times at level 1 with buffer pool 608 609 0 0.0 60846.6 1.1X
+Compression 10000 times at level 2 with buffer pool 645 647 3 0.0 64476.3 1.0X
+Compression 10000 times at level 3 with buffer pool 746 746 1 0.0 74584.0 0.9X
-OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Benchmark ZStandardCompressionCodec: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
------------------------------------------------------------------------------------------------------------------------------------------
-Decompression 10000 times from level 1 without buffer pool 813 820 11 0.0 81273.2 1.0X
-Decompression 10000 times from level 2 without buffer pool 810 813 3 0.0 80986.2 1.0X
-Decompression 10000 times from level 3 without buffer pool 812 813 2 0.0 81183.1 1.0X
-Decompression 10000 times from level 1 with buffer pool 746 747 2 0.0 74568.7 1.1X
-Decompression 10000 times from level 2 with buffer pool 744 746 2 0.0 74414.5 1.1X
-Decompression 10000 times from level 3 with buffer pool 745 746 1 0.0 74538.6 1.1X
+Decompression 10000 times from level 1 without buffer pool 828 829 1 0.0 82822.6 1.0X
+Decompression 10000 times from level 2 without buffer pool 829 829 1 0.0 82900.7 1.0X
+Decompression 10000 times from level 3 without buffer pool 828 833 8 0.0 82784.4 1.0X
+Decompression 10000 times from level 1 with buffer pool 758 760 2 0.0 75756.5 1.1X
+Decompression 10000 times from level 2 with buffer pool 758 758 1 0.0 75772.3 1.1X
+Decompression 10000 times from level 3 with buffer pool 759 759 0 0.0 75852.7 1.1X
-OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Parallel Compression at level 3: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
------------------------------------------------------------------------------------------------------------------------
-Parallel Compression with 0 workers 48 49 1 0.0 374256.1 1.0X
-Parallel Compression with 1 workers 34 36 3 0.0 267557.3 1.4X
-Parallel Compression with 2 workers 34 38 2 0.0 263684.3 1.4X
-Parallel Compression with 4 workers 37 39 2 0.0 289956.1 1.3X
-Parallel Compression with 8 workers 39 41 1 0.0 306975.2 1.2X
-Parallel Compression with 16 workers 44 45 1 0.0 340992.0 1.1X
+Parallel Compression with 0 workers 58 59 1 0.0 452489.9 1.0X
+Parallel Compression with 1 workers 42 45 4 0.0 330066.0 1.4X
+Parallel Compression with 2 workers 40 42 1 0.0 312560.3 1.4X
+Parallel Compression with 4 workers 40 42 2 0.0 308802.7 1.5X
+Parallel Compression with 8 workers 41 45 3 0.0 321331.3 1.4X
+Parallel Compression with 16 workers 44 45 1 0.0 343311.5 1.3X
-OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 21.0.4+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Parallel Compression at level 9: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
------------------------------------------------------------------------------------------------------------------------
-Parallel Compression with 0 workers 156 158 1 0.0 1220760.5 1.0X
-Parallel Compression with 1 workers 191 192 2 0.0 1495168.2 0.8X
-Parallel Compression with 2 workers 111 117 5 0.0 864459.9 1.4X
-Parallel Compression with 4 workers 106 109 2 0.0 831025.5 1.5X
-Parallel Compression with 8 workers 112 115 2 0.0 875732.7 1.4X
-Parallel Compression with 16 workers 110 114 2 0.0 858160.9 1.4X
+Parallel Compression with 0 workers 158 160 2 0.0 1234257.6 1.0X
+Parallel Compression with 1 workers 193 194 1 0.0 1507686.4 0.8X
+Parallel Compression with 2 workers 113 127 11 0.0 881068.0 1.4X
+Parallel Compression with 4 workers 109 111 2 0.0 849241.3 1.5X
+Parallel Compression with 8 workers 111 115 3 0.0 869455.2 1.4X
+Parallel Compression with 16 workers 113 116 2 0.0 881832.5 1.4X
diff --git a/core/benchmarks/ZStandardBenchmark-results.txt b/core/benchmarks/ZStandardBenchmark-results.txt
index b230f825fecac..136f0333590cc 100644
--- a/core/benchmarks/ZStandardBenchmark-results.txt
+++ b/core/benchmarks/ZStandardBenchmark-results.txt
@@ -2,48 +2,48 @@
Benchmark ZStandardCompressionCodec
================================================================================================
-OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Benchmark ZStandardCompressionCodec: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
--------------------------------------------------------------------------------------------------------------------------------------
-Compression 10000 times at level 1 without buffer pool 638 638 0 0.0 63765.0 1.0X
-Compression 10000 times at level 2 without buffer pool 675 676 1 0.0 67529.4 0.9X
-Compression 10000 times at level 3 without buffer pool 775 783 11 0.0 77531.6 0.8X
-Compression 10000 times at level 1 with buffer pool 572 573 1 0.0 57223.2 1.1X
-Compression 10000 times at level 2 with buffer pool 603 605 1 0.0 60323.7 1.1X
-Compression 10000 times at level 3 with buffer pool 720 727 6 0.0 71980.9 0.9X
+Compression 10000 times at level 1 without buffer pool 257 259 2 0.0 25704.2 1.0X
+Compression 10000 times at level 2 without buffer pool 674 676 2 0.0 67396.3 0.4X
+Compression 10000 times at level 3 without buffer pool 775 787 11 0.0 77497.9 0.3X
+Compression 10000 times at level 1 with buffer pool 573 574 0 0.0 57347.3 0.4X
+Compression 10000 times at level 2 with buffer pool 602 603 2 0.0 60162.8 0.4X
+Compression 10000 times at level 3 with buffer pool 722 725 3 0.0 72247.3 0.4X
-OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Benchmark ZStandardCompressionCodec: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
------------------------------------------------------------------------------------------------------------------------------------------
-Decompression 10000 times from level 1 without buffer pool 584 585 1 0.0 58381.0 1.0X
-Decompression 10000 times from level 2 without buffer pool 585 585 0 0.0 58465.9 1.0X
-Decompression 10000 times from level 3 without buffer pool 585 586 1 0.0 58499.5 1.0X
-Decompression 10000 times from level 1 with buffer pool 534 534 0 0.0 53375.7 1.1X
-Decompression 10000 times from level 2 with buffer pool 533 533 0 0.0 53312.3 1.1X
-Decompression 10000 times from level 3 with buffer pool 533 533 1 0.0 53255.1 1.1X
+Decompression 10000 times from level 1 without buffer pool 176 177 1 0.1 17641.2 1.0X
+Decompression 10000 times from level 2 without buffer pool 176 178 1 0.1 17628.9 1.0X
+Decompression 10000 times from level 3 without buffer pool 175 176 0 0.1 17506.1 1.0X
+Decompression 10000 times from level 1 with buffer pool 151 152 1 0.1 15051.5 1.2X
+Decompression 10000 times from level 2 with buffer pool 150 151 1 0.1 14998.0 1.2X
+Decompression 10000 times from level 3 with buffer pool 150 151 0 0.1 15019.4 1.2X
-OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Parallel Compression at level 3: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
------------------------------------------------------------------------------------------------------------------------
-Parallel Compression with 0 workers 46 48 1 0.0 360483.5 1.0X
-Parallel Compression with 1 workers 34 36 2 0.0 265816.1 1.4X
-Parallel Compression with 2 workers 33 36 2 0.0 254525.8 1.4X
-Parallel Compression with 4 workers 34 37 1 0.0 266270.8 1.4X
-Parallel Compression with 8 workers 37 39 1 0.0 289289.2 1.2X
-Parallel Compression with 16 workers 41 43 1 0.0 320243.3 1.1X
+Parallel Compression with 0 workers 57 57 0 0.0 444425.2 1.0X
+Parallel Compression with 1 workers 42 44 3 0.0 325107.6 1.4X
+Parallel Compression with 2 workers 38 39 2 0.0 294840.0 1.5X
+Parallel Compression with 4 workers 36 37 1 0.0 282143.1 1.6X
+Parallel Compression with 8 workers 39 40 1 0.0 303793.6 1.5X
+Parallel Compression with 16 workers 41 43 1 0.0 324165.5 1.4X
-OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.5.0-1025-azure
+OpenJDK 64-Bit Server VM 17.0.12+7-LTS on Linux 6.8.0-1014-azure
AMD EPYC 7763 64-Core Processor
Parallel Compression at level 9: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative
------------------------------------------------------------------------------------------------------------------------
-Parallel Compression with 0 workers 154 156 2 0.0 1205934.0 1.0X
-Parallel Compression with 1 workers 191 194 4 0.0 1495729.9 0.8X
-Parallel Compression with 2 workers 110 114 5 0.0 859158.9 1.4X
-Parallel Compression with 4 workers 105 108 3 0.0 822932.2 1.5X
-Parallel Compression with 8 workers 109 113 2 0.0 851560.0 1.4X
-Parallel Compression with 16 workers 111 115 2 0.0 870695.9 1.4X
+Parallel Compression with 0 workers 156 158 1 0.0 1220298.8 1.0X
+Parallel Compression with 1 workers 188 189 1 0.0 1467911.4 0.8X
+Parallel Compression with 2 workers 111 118 7 0.0 866985.2 1.4X
+Parallel Compression with 4 workers 106 109 2 0.0 827592.1 1.5X
+Parallel Compression with 8 workers 114 116 2 0.0 888419.5 1.4X
+Parallel Compression with 16 workers 111 115 2 0.0 868463.5 1.4X
diff --git a/core/pom.xml b/core/pom.xml
index 19f58940ed942..7805a3f37ae53 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -393,6 +393,16 @@
spark.shuffle.accurateBlockSkewedFactor
HighlyCompressedMapStatus
if its size is larger than this factor multiplying
+ the median shuffle block size or spark.shuffle.accurateBlockThreshold
. It is
+ recommended to set this parameter to be the same as
+ spark.sql.adaptive.skewJoin.skewedPartitionFactor
. Set to -1.0 to disable this
+ feature by default.
+ spark.shuffle.registration.timeout
spark.kubernetes.driver.volumes.[VolumeType].[VolumeName].annotation.[AnnotationName]
AnnotationName
as key having specified value, must conform with Kubernetes annotations format. For example,
+ spark.kubernetes.driver.volumes.persistentVolumeClaim.checkpointpvc.annotation.foo=bar
.
+ spark.kubernetes.executor.volumes.[VolumeType].[VolumeName].mount.path
spark.kubernetes.executor.volumes.[VolumeType].[VolumeName].annotation.[AnnotationName]
AnnotationName
as key having specified value, must conform with Kubernetes annotations format. For example,
+ spark.kubernetes.executor.volumes.persistentVolumeClaim.checkpointpvc.annotation.foo=bar
.
+ spark.kubernetes.local.dirs.tmpfs
false
recursiveFieldMaxDepth
.org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ * @return Whether the listStateCall field is set.
+ */
+ boolean hasListStateCall();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ * @return The listStateCall.
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall getListStateCall();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCallOrBuilder getListStateCallOrBuilder();
+
public org.apache.spark.sql.execution.streaming.state.StateMessage.StateVariableRequest.MethodCase getMethodCase();
}
/**
@@ -3510,6 +3525,7 @@ public enum MethodCase
implements com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
VALUESTATECALL(1),
+ LISTSTATECALL(2),
METHOD_NOT_SET(0);
private final int value;
private MethodCase(int value) {
@@ -3528,6 +3544,7 @@ public static MethodCase valueOf(int value) {
public static MethodCase forNumber(int value) {
switch (value) {
case 1: return VALUESTATECALL;
+ case 2: return LISTSTATECALL;
case 0: return METHOD_NOT_SET;
default: return null;
}
@@ -3574,6 +3591,37 @@ public org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateCal
return org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateCall.getDefaultInstance();
}
+ public static final int LISTSTATECALL_FIELD_NUMBER = 2;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ * @return Whether the listStateCall field is set.
+ */
+ @java.lang.Override
+ public boolean hasListStateCall() {
+ return methodCase_ == 2;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ * @return The listStateCall.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall getListStateCall() {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.getDefaultInstance();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCallOrBuilder getListStateCallOrBuilder() {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.getDefaultInstance();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
@@ -3591,6 +3639,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (methodCase_ == 1) {
output.writeMessage(1, (org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateCall) method_);
}
+ if (methodCase_ == 2) {
+ output.writeMessage(2, (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_);
+ }
getUnknownFields().writeTo(output);
}
@@ -3604,6 +3655,10 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, (org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateCall) method_);
}
+ if (methodCase_ == 2) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
@@ -3625,6 +3680,10 @@ public boolean equals(final java.lang.Object obj) {
if (!getValueStateCall()
.equals(other.getValueStateCall())) return false;
break;
+ case 2:
+ if (!getListStateCall()
+ .equals(other.getListStateCall())) return false;
+ break;
case 0:
default:
}
@@ -3644,6 +3703,10 @@ public int hashCode() {
hash = (37 * hash) + VALUESTATECALL_FIELD_NUMBER;
hash = (53 * hash) + getValueStateCall().hashCode();
break;
+ case 2:
+ hash = (37 * hash) + LISTSTATECALL_FIELD_NUMBER;
+ hash = (53 * hash) + getListStateCall().hashCode();
+ break;
case 0:
default:
}
@@ -3778,6 +3841,9 @@ public Builder clear() {
if (valueStateCallBuilder_ != null) {
valueStateCallBuilder_.clear();
}
+ if (listStateCallBuilder_ != null) {
+ listStateCallBuilder_.clear();
+ }
methodCase_ = 0;
method_ = null;
return this;
@@ -3813,6 +3879,13 @@ public org.apache.spark.sql.execution.streaming.state.StateMessage.StateVariable
result.method_ = valueStateCallBuilder_.build();
}
}
+ if (methodCase_ == 2) {
+ if (listStateCallBuilder_ == null) {
+ result.method_ = method_;
+ } else {
+ result.method_ = listStateCallBuilder_.build();
+ }
+ }
result.methodCase_ = methodCase_;
onBuilt();
return result;
@@ -3867,6 +3940,10 @@ public Builder mergeFrom(org.apache.spark.sql.execution.streaming.state.StateMes
mergeValueStateCall(other.getValueStateCall());
break;
}
+ case LISTSTATECALL: {
+ mergeListStateCall(other.getListStateCall());
+ break;
+ }
case METHOD_NOT_SET: {
break;
}
@@ -3904,6 +3981,13 @@ public Builder mergeFrom(
methodCase_ = 1;
break;
} // case 10
+ case 18: {
+ input.readMessage(
+ getListStateCallFieldBuilder().getBuilder(),
+ extensionRegistry);
+ methodCase_ = 2;
+ break;
+ } // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
@@ -4076,6 +4160,148 @@ public org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateCal
onChanged();;
return valueStateCallBuilder_;
}
+
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCallOrBuilder> listStateCallBuilder_;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ * @return Whether the listStateCall field is set.
+ */
+ @java.lang.Override
+ public boolean hasListStateCall() {
+ return methodCase_ == 2;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ * @return The listStateCall.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall getListStateCall() {
+ if (listStateCallBuilder_ == null) {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.getDefaultInstance();
+ } else {
+ if (methodCase_ == 2) {
+ return listStateCallBuilder_.getMessage();
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ public Builder setListStateCall(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall value) {
+ if (listStateCallBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ method_ = value;
+ onChanged();
+ } else {
+ listStateCallBuilder_.setMessage(value);
+ }
+ methodCase_ = 2;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ public Builder setListStateCall(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.Builder builderForValue) {
+ if (listStateCallBuilder_ == null) {
+ method_ = builderForValue.build();
+ onChanged();
+ } else {
+ listStateCallBuilder_.setMessage(builderForValue.build());
+ }
+ methodCase_ = 2;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ public Builder mergeListStateCall(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall value) {
+ if (listStateCallBuilder_ == null) {
+ if (methodCase_ == 2 &&
+ method_ != org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.getDefaultInstance()) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.newBuilder((org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_)
+ .mergeFrom(value).buildPartial();
+ } else {
+ method_ = value;
+ }
+ onChanged();
+ } else {
+ if (methodCase_ == 2) {
+ listStateCallBuilder_.mergeFrom(value);
+ } else {
+ listStateCallBuilder_.setMessage(value);
+ }
+ }
+ methodCase_ = 2;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ public Builder clearListStateCall() {
+ if (listStateCallBuilder_ == null) {
+ if (methodCase_ == 2) {
+ methodCase_ = 0;
+ method_ = null;
+ onChanged();
+ }
+ } else {
+ if (methodCase_ == 2) {
+ methodCase_ = 0;
+ method_ = null;
+ }
+ listStateCallBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.Builder getListStateCallBuilder() {
+ return getListStateCallFieldBuilder().getBuilder();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCallOrBuilder getListStateCallOrBuilder() {
+ if ((methodCase_ == 2) && (listStateCallBuilder_ != null)) {
+ return listStateCallBuilder_.getMessageOrBuilder();
+ } else {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateCall listStateCall = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCallOrBuilder>
+ getListStateCallFieldBuilder() {
+ if (listStateCallBuilder_ == null) {
+ if (!(methodCase_ == 2)) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.getDefaultInstance();
+ }
+ listStateCallBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCallOrBuilder>(
+ (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) method_,
+ getParentForChildren(),
+ isClean());
+ method_ = null;
+ }
+ methodCase_ = 2;
+ onChanged();;
+ return listStateCallBuilder_;
+ }
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
@@ -7482,37 +7708,135 @@ public org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateCal
}
- public interface SetImplicitKeyOrBuilder extends
- // @@protoc_insertion_point(interface_extends:org.apache.spark.sql.execution.streaming.state.SetImplicitKey)
+ public interface ListStateCallOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:org.apache.spark.sql.execution.streaming.state.ListStateCall)
com.google.protobuf.MessageOrBuilder {
/**
- * bytes key = 1;
- * @return The key.
+ * string stateName = 1;
+ * @return The stateName.
*/
- com.google.protobuf.ByteString getKey();
+ java.lang.String getStateName();
+ /**
+ * string stateName = 1;
+ * @return The bytes for stateName.
+ */
+ com.google.protobuf.ByteString
+ getStateNameBytes();
+
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ * @return Whether the exists field is set.
+ */
+ boolean hasExists();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ * @return The exists.
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Exists getExists();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ExistsOrBuilder getExistsOrBuilder();
+
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ * @return Whether the listStateGet field is set.
+ */
+ boolean hasListStateGet();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ * @return The listStateGet.
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet getListStateGet();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGetOrBuilder getListStateGetOrBuilder();
+
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ * @return Whether the listStatePut field is set.
+ */
+ boolean hasListStatePut();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ * @return The listStatePut.
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut getListStatePut();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePutOrBuilder getListStatePutOrBuilder();
+
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ * @return Whether the appendValue field is set.
+ */
+ boolean hasAppendValue();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ * @return The appendValue.
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue getAppendValue();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValueOrBuilder getAppendValueOrBuilder();
+
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ * @return Whether the appendList field is set.
+ */
+ boolean hasAppendList();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ * @return The appendList.
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList getAppendList();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendListOrBuilder getAppendListOrBuilder();
+
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ * @return Whether the clear field is set.
+ */
+ boolean hasClear();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ * @return The clear.
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Clear getClear();
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ClearOrBuilder getClearOrBuilder();
+
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.MethodCase getMethodCase();
}
/**
- * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.SetImplicitKey}
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.ListStateCall}
*/
- public static final class SetImplicitKey extends
+ public static final class ListStateCall extends
com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:org.apache.spark.sql.execution.streaming.state.SetImplicitKey)
- SetImplicitKeyOrBuilder {
+ // @@protoc_insertion_point(message_implements:org.apache.spark.sql.execution.streaming.state.ListStateCall)
+ ListStateCallOrBuilder {
private static final long serialVersionUID = 0L;
- // Use SetImplicitKey.newBuilder() to construct.
- private SetImplicitKey(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ // Use ListStateCall.newBuilder() to construct.
+ private ListStateCall(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
- private SetImplicitKey() {
- key_ = com.google.protobuf.ByteString.EMPTY;
+ private ListStateCall() {
+ stateName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
- return new SetImplicitKey();
+ return new ListStateCall();
}
@java.lang.Override
@@ -7522,31 +7846,3583 @@ protected java.lang.Object newInstance(
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_SetImplicitKey_descriptor;
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_ListStateCall_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_SetImplicitKey_fieldAccessorTable
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_ListStateCall_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey.class, org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey.Builder.class);
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.class, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall.Builder.class);
}
- public static final int KEY_FIELD_NUMBER = 1;
- private com.google.protobuf.ByteString key_;
- /**
- * bytes key = 1;
- * @return The key.
- */
- @java.lang.Override
- public com.google.protobuf.ByteString getKey() {
- return key_;
- }
+ private int methodCase_ = 0;
+ private java.lang.Object method_;
+ public enum MethodCase
+ implements com.google.protobuf.Internal.EnumLite,
+ com.google.protobuf.AbstractMessage.InternalOneOfEnum {
+ EXISTS(2),
+ LISTSTATEGET(3),
+ LISTSTATEPUT(4),
+ APPENDVALUE(5),
+ APPENDLIST(6),
+ CLEAR(7),
+ METHOD_NOT_SET(0);
+ private final int value;
+ private MethodCase(int value) {
+ this.value = value;
+ }
+ /**
+ * @param value The number of the enum to look for.
+ * @return The enum associated with the given number.
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+ @java.lang.Deprecated
+ public static MethodCase valueOf(int value) {
+ return forNumber(value);
+ }
- private byte memoizedIsInitialized = -1;
- @java.lang.Override
- public final boolean isInitialized() {
+ public static MethodCase forNumber(int value) {
+ switch (value) {
+ case 2: return EXISTS;
+ case 3: return LISTSTATEGET;
+ case 4: return LISTSTATEPUT;
+ case 5: return APPENDVALUE;
+ case 6: return APPENDLIST;
+ case 7: return CLEAR;
+ case 0: return METHOD_NOT_SET;
+ default: return null;
+ }
+ }
+ public int getNumber() {
+ return this.value;
+ }
+ };
+
+ public MethodCase
+ getMethodCase() {
+ return MethodCase.forNumber(
+ methodCase_);
+ }
+
+ public static final int STATENAME_FIELD_NUMBER = 1;
+ private volatile java.lang.Object stateName_;
+ /**
+ * string stateName = 1;
+ * @return The stateName.
+ */
+ @java.lang.Override
+ public java.lang.String getStateName() {
+ java.lang.Object ref = stateName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ stateName_ = s;
+ return s;
+ }
+ }
+ /**
+ * string stateName = 1;
+ * @return The bytes for stateName.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString
+ getStateNameBytes() {
+ java.lang.Object ref = stateName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ stateName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int EXISTS_FIELD_NUMBER = 2;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ * @return Whether the exists field is set.
+ */
+ @java.lang.Override
+ public boolean hasExists() {
+ return methodCase_ == 2;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ * @return The exists.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.Exists getExists() {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.getDefaultInstance();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ExistsOrBuilder getExistsOrBuilder() {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.getDefaultInstance();
+ }
+
+ public static final int LISTSTATEGET_FIELD_NUMBER = 3;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ * @return Whether the listStateGet field is set.
+ */
+ @java.lang.Override
+ public boolean hasListStateGet() {
+ return methodCase_ == 3;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ * @return The listStateGet.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet getListStateGet() {
+ if (methodCase_ == 3) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.getDefaultInstance();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGetOrBuilder getListStateGetOrBuilder() {
+ if (methodCase_ == 3) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.getDefaultInstance();
+ }
+
+ public static final int LISTSTATEPUT_FIELD_NUMBER = 4;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ * @return Whether the listStatePut field is set.
+ */
+ @java.lang.Override
+ public boolean hasListStatePut() {
+ return methodCase_ == 4;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ * @return The listStatePut.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut getListStatePut() {
+ if (methodCase_ == 4) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.getDefaultInstance();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePutOrBuilder getListStatePutOrBuilder() {
+ if (methodCase_ == 4) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.getDefaultInstance();
+ }
+
+ public static final int APPENDVALUE_FIELD_NUMBER = 5;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ * @return Whether the appendValue field is set.
+ */
+ @java.lang.Override
+ public boolean hasAppendValue() {
+ return methodCase_ == 5;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ * @return The appendValue.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue getAppendValue() {
+ if (methodCase_ == 5) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.getDefaultInstance();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValueOrBuilder getAppendValueOrBuilder() {
+ if (methodCase_ == 5) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.getDefaultInstance();
+ }
+
+ public static final int APPENDLIST_FIELD_NUMBER = 6;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ * @return Whether the appendList field is set.
+ */
+ @java.lang.Override
+ public boolean hasAppendList() {
+ return methodCase_ == 6;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ * @return The appendList.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList getAppendList() {
+ if (methodCase_ == 6) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.getDefaultInstance();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendListOrBuilder getAppendListOrBuilder() {
+ if (methodCase_ == 6) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.getDefaultInstance();
+ }
+
+ public static final int CLEAR_FIELD_NUMBER = 7;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ * @return Whether the clear field is set.
+ */
+ @java.lang.Override
+ public boolean hasClear() {
+ return methodCase_ == 7;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ * @return The clear.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.Clear getClear() {
+ if (methodCase_ == 7) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.getDefaultInstance();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ClearOrBuilder getClearOrBuilder() {
+ if (methodCase_ == 7) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.getDefaultInstance();
+ }
+
+ private byte memoizedIsInitialized = -1;
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stateName_)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 1, stateName_);
+ }
+ if (methodCase_ == 2) {
+ output.writeMessage(2, (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_);
+ }
+ if (methodCase_ == 3) {
+ output.writeMessage(3, (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_);
+ }
+ if (methodCase_ == 4) {
+ output.writeMessage(4, (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_);
+ }
+ if (methodCase_ == 5) {
+ output.writeMessage(5, (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_);
+ }
+ if (methodCase_ == 6) {
+ output.writeMessage(6, (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_);
+ }
+ if (methodCase_ == 7) {
+ output.writeMessage(7, (org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stateName_)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, stateName_);
+ }
+ if (methodCase_ == 2) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_);
+ }
+ if (methodCase_ == 3) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_);
+ }
+ if (methodCase_ == 4) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_);
+ }
+ if (methodCase_ == 5) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(5, (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_);
+ }
+ if (methodCase_ == 6) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(6, (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_);
+ }
+ if (methodCase_ == 7) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(7, (org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall)) {
+ return super.equals(obj);
+ }
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall other = (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall) obj;
+
+ if (!getStateName()
+ .equals(other.getStateName())) return false;
+ if (!getMethodCase().equals(other.getMethodCase())) return false;
+ switch (methodCase_) {
+ case 2:
+ if (!getExists()
+ .equals(other.getExists())) return false;
+ break;
+ case 3:
+ if (!getListStateGet()
+ .equals(other.getListStateGet())) return false;
+ break;
+ case 4:
+ if (!getListStatePut()
+ .equals(other.getListStatePut())) return false;
+ break;
+ case 5:
+ if (!getAppendValue()
+ .equals(other.getAppendValue())) return false;
+ break;
+ case 6:
+ if (!getAppendList()
+ .equals(other.getAppendList())) return false;
+ break;
+ case 7:
+ if (!getClear()
+ .equals(other.getClear())) return false;
+ break;
+ case 0:
+ default:
+ }
+ if (!getUnknownFields().equals(other.getUnknownFields())) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + STATENAME_FIELD_NUMBER;
+ hash = (53 * hash) + getStateName().hashCode();
+ switch (methodCase_) {
+ case 2:
+ hash = (37 * hash) + EXISTS_FIELD_NUMBER;
+ hash = (53 * hash) + getExists().hashCode();
+ break;
+ case 3:
+ hash = (37 * hash) + LISTSTATEGET_FIELD_NUMBER;
+ hash = (53 * hash) + getListStateGet().hashCode();
+ break;
+ case 4:
+ hash = (37 * hash) + LISTSTATEPUT_FIELD_NUMBER;
+ hash = (53 * hash) + getListStatePut().hashCode();
+ break;
+ case 5:
+ hash = (37 * hash) + APPENDVALUE_FIELD_NUMBER;
+ hash = (53 * hash) + getAppendValue().hashCode();
+ break;
+ case 6:
+ hash = (37 * hash) + APPENDLIST_FIELD_NUMBER;
+ hash = (53 * hash) + getAppendList().hashCode();
+ break;
+ case 7:
+ hash = (37 * hash) + CLEAR_FIELD_NUMBER;
+ hash = (53 * hash) + getClear().hashCode();
+ break;
+ case 0:
+ default:
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.ListStateCall}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessageV3.Builderstring stateName = 1;
+ * @return The stateName.
+ */
+ public java.lang.String getStateName() {
+ java.lang.Object ref = stateName_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ stateName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * string stateName = 1;
+ * @return The bytes for stateName.
+ */
+ public com.google.protobuf.ByteString
+ getStateNameBytes() {
+ java.lang.Object ref = stateName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ stateName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * string stateName = 1;
+ * @param value The stateName to set.
+ * @return This builder for chaining.
+ */
+ public Builder setStateName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ stateName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * string stateName = 1;
+ * @return This builder for chaining.
+ */
+ public Builder clearStateName() {
+
+ stateName_ = getDefaultInstance().getStateName();
+ onChanged();
+ return this;
+ }
+ /**
+ * string stateName = 1;
+ * @param value The bytes for stateName to set.
+ * @return This builder for chaining.
+ */
+ public Builder setStateNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ stateName_ = value;
+ onChanged();
+ return this;
+ }
+
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Exists, org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ExistsOrBuilder> existsBuilder_;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ * @return Whether the exists field is set.
+ */
+ @java.lang.Override
+ public boolean hasExists() {
+ return methodCase_ == 2;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ * @return The exists.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.Exists getExists() {
+ if (existsBuilder_ == null) {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.getDefaultInstance();
+ } else {
+ if (methodCase_ == 2) {
+ return existsBuilder_.getMessage();
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ public Builder setExists(org.apache.spark.sql.execution.streaming.state.StateMessage.Exists value) {
+ if (existsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ method_ = value;
+ onChanged();
+ } else {
+ existsBuilder_.setMessage(value);
+ }
+ methodCase_ = 2;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ public Builder setExists(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.Builder builderForValue) {
+ if (existsBuilder_ == null) {
+ method_ = builderForValue.build();
+ onChanged();
+ } else {
+ existsBuilder_.setMessage(builderForValue.build());
+ }
+ methodCase_ = 2;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ public Builder mergeExists(org.apache.spark.sql.execution.streaming.state.StateMessage.Exists value) {
+ if (existsBuilder_ == null) {
+ if (methodCase_ == 2 &&
+ method_ != org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.getDefaultInstance()) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.newBuilder((org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_)
+ .mergeFrom(value).buildPartial();
+ } else {
+ method_ = value;
+ }
+ onChanged();
+ } else {
+ if (methodCase_ == 2) {
+ existsBuilder_.mergeFrom(value);
+ } else {
+ existsBuilder_.setMessage(value);
+ }
+ }
+ methodCase_ = 2;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ public Builder clearExists() {
+ if (existsBuilder_ == null) {
+ if (methodCase_ == 2) {
+ methodCase_ = 0;
+ method_ = null;
+ onChanged();
+ }
+ } else {
+ if (methodCase_ == 2) {
+ methodCase_ = 0;
+ method_ = null;
+ }
+ existsBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.Builder getExistsBuilder() {
+ return getExistsFieldBuilder().getBuilder();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ExistsOrBuilder getExistsOrBuilder() {
+ if ((methodCase_ == 2) && (existsBuilder_ != null)) {
+ return existsBuilder_.getMessageOrBuilder();
+ } else {
+ if (methodCase_ == 2) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Exists exists = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Exists, org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ExistsOrBuilder>
+ getExistsFieldBuilder() {
+ if (existsBuilder_ == null) {
+ if (!(methodCase_ == 2)) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.getDefaultInstance();
+ }
+ existsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Exists, org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ExistsOrBuilder>(
+ (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) method_,
+ getParentForChildren(),
+ isClean());
+ method_ = null;
+ }
+ methodCase_ = 2;
+ onChanged();;
+ return existsBuilder_;
+ }
+
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGetOrBuilder> listStateGetBuilder_;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ * @return Whether the listStateGet field is set.
+ */
+ @java.lang.Override
+ public boolean hasListStateGet() {
+ return methodCase_ == 3;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ * @return The listStateGet.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet getListStateGet() {
+ if (listStateGetBuilder_ == null) {
+ if (methodCase_ == 3) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.getDefaultInstance();
+ } else {
+ if (methodCase_ == 3) {
+ return listStateGetBuilder_.getMessage();
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ public Builder setListStateGet(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet value) {
+ if (listStateGetBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ method_ = value;
+ onChanged();
+ } else {
+ listStateGetBuilder_.setMessage(value);
+ }
+ methodCase_ = 3;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ public Builder setListStateGet(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.Builder builderForValue) {
+ if (listStateGetBuilder_ == null) {
+ method_ = builderForValue.build();
+ onChanged();
+ } else {
+ listStateGetBuilder_.setMessage(builderForValue.build());
+ }
+ methodCase_ = 3;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ public Builder mergeListStateGet(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet value) {
+ if (listStateGetBuilder_ == null) {
+ if (methodCase_ == 3 &&
+ method_ != org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.getDefaultInstance()) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.newBuilder((org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_)
+ .mergeFrom(value).buildPartial();
+ } else {
+ method_ = value;
+ }
+ onChanged();
+ } else {
+ if (methodCase_ == 3) {
+ listStateGetBuilder_.mergeFrom(value);
+ } else {
+ listStateGetBuilder_.setMessage(value);
+ }
+ }
+ methodCase_ = 3;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ public Builder clearListStateGet() {
+ if (listStateGetBuilder_ == null) {
+ if (methodCase_ == 3) {
+ methodCase_ = 0;
+ method_ = null;
+ onChanged();
+ }
+ } else {
+ if (methodCase_ == 3) {
+ methodCase_ = 0;
+ method_ = null;
+ }
+ listStateGetBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.Builder getListStateGetBuilder() {
+ return getListStateGetFieldBuilder().getBuilder();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGetOrBuilder getListStateGetOrBuilder() {
+ if ((methodCase_ == 3) && (listStateGetBuilder_ != null)) {
+ return listStateGetBuilder_.getMessageOrBuilder();
+ } else {
+ if (methodCase_ == 3) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStateGet listStateGet = 3;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGetOrBuilder>
+ getListStateGetFieldBuilder() {
+ if (listStateGetBuilder_ == null) {
+ if (!(methodCase_ == 3)) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.getDefaultInstance();
+ }
+ listStateGetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGetOrBuilder>(
+ (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) method_,
+ getParentForChildren(),
+ isClean());
+ method_ = null;
+ }
+ methodCase_ = 3;
+ onChanged();;
+ return listStateGetBuilder_;
+ }
+
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePutOrBuilder> listStatePutBuilder_;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ * @return Whether the listStatePut field is set.
+ */
+ @java.lang.Override
+ public boolean hasListStatePut() {
+ return methodCase_ == 4;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ * @return The listStatePut.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut getListStatePut() {
+ if (listStatePutBuilder_ == null) {
+ if (methodCase_ == 4) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.getDefaultInstance();
+ } else {
+ if (methodCase_ == 4) {
+ return listStatePutBuilder_.getMessage();
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ public Builder setListStatePut(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut value) {
+ if (listStatePutBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ method_ = value;
+ onChanged();
+ } else {
+ listStatePutBuilder_.setMessage(value);
+ }
+ methodCase_ = 4;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ public Builder setListStatePut(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.Builder builderForValue) {
+ if (listStatePutBuilder_ == null) {
+ method_ = builderForValue.build();
+ onChanged();
+ } else {
+ listStatePutBuilder_.setMessage(builderForValue.build());
+ }
+ methodCase_ = 4;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ public Builder mergeListStatePut(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut value) {
+ if (listStatePutBuilder_ == null) {
+ if (methodCase_ == 4 &&
+ method_ != org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.getDefaultInstance()) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.newBuilder((org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_)
+ .mergeFrom(value).buildPartial();
+ } else {
+ method_ = value;
+ }
+ onChanged();
+ } else {
+ if (methodCase_ == 4) {
+ listStatePutBuilder_.mergeFrom(value);
+ } else {
+ listStatePutBuilder_.setMessage(value);
+ }
+ }
+ methodCase_ = 4;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ public Builder clearListStatePut() {
+ if (listStatePutBuilder_ == null) {
+ if (methodCase_ == 4) {
+ methodCase_ = 0;
+ method_ = null;
+ onChanged();
+ }
+ } else {
+ if (methodCase_ == 4) {
+ methodCase_ = 0;
+ method_ = null;
+ }
+ listStatePutBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.Builder getListStatePutBuilder() {
+ return getListStatePutFieldBuilder().getBuilder();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePutOrBuilder getListStatePutOrBuilder() {
+ if ((methodCase_ == 4) && (listStatePutBuilder_ != null)) {
+ return listStatePutBuilder_.getMessageOrBuilder();
+ } else {
+ if (methodCase_ == 4) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.ListStatePut listStatePut = 4;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePutOrBuilder>
+ getListStatePutFieldBuilder() {
+ if (listStatePutBuilder_ == null) {
+ if (!(methodCase_ == 4)) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.getDefaultInstance();
+ }
+ listStatePutBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePutOrBuilder>(
+ (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStatePut) method_,
+ getParentForChildren(),
+ isClean());
+ method_ = null;
+ }
+ methodCase_ = 4;
+ onChanged();;
+ return listStatePutBuilder_;
+ }
+
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValueOrBuilder> appendValueBuilder_;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ * @return Whether the appendValue field is set.
+ */
+ @java.lang.Override
+ public boolean hasAppendValue() {
+ return methodCase_ == 5;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ * @return The appendValue.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue getAppendValue() {
+ if (appendValueBuilder_ == null) {
+ if (methodCase_ == 5) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.getDefaultInstance();
+ } else {
+ if (methodCase_ == 5) {
+ return appendValueBuilder_.getMessage();
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ public Builder setAppendValue(org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue value) {
+ if (appendValueBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ method_ = value;
+ onChanged();
+ } else {
+ appendValueBuilder_.setMessage(value);
+ }
+ methodCase_ = 5;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ public Builder setAppendValue(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.Builder builderForValue) {
+ if (appendValueBuilder_ == null) {
+ method_ = builderForValue.build();
+ onChanged();
+ } else {
+ appendValueBuilder_.setMessage(builderForValue.build());
+ }
+ methodCase_ = 5;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ public Builder mergeAppendValue(org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue value) {
+ if (appendValueBuilder_ == null) {
+ if (methodCase_ == 5 &&
+ method_ != org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.getDefaultInstance()) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.newBuilder((org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_)
+ .mergeFrom(value).buildPartial();
+ } else {
+ method_ = value;
+ }
+ onChanged();
+ } else {
+ if (methodCase_ == 5) {
+ appendValueBuilder_.mergeFrom(value);
+ } else {
+ appendValueBuilder_.setMessage(value);
+ }
+ }
+ methodCase_ = 5;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ public Builder clearAppendValue() {
+ if (appendValueBuilder_ == null) {
+ if (methodCase_ == 5) {
+ methodCase_ = 0;
+ method_ = null;
+ onChanged();
+ }
+ } else {
+ if (methodCase_ == 5) {
+ methodCase_ = 0;
+ method_ = null;
+ }
+ appendValueBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.Builder getAppendValueBuilder() {
+ return getAppendValueFieldBuilder().getBuilder();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValueOrBuilder getAppendValueOrBuilder() {
+ if ((methodCase_ == 5) && (appendValueBuilder_ != null)) {
+ return appendValueBuilder_.getMessageOrBuilder();
+ } else {
+ if (methodCase_ == 5) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendValue appendValue = 5;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValueOrBuilder>
+ getAppendValueFieldBuilder() {
+ if (appendValueBuilder_ == null) {
+ if (!(methodCase_ == 5)) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.getDefaultInstance();
+ }
+ appendValueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValueOrBuilder>(
+ (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendValue) method_,
+ getParentForChildren(),
+ isClean());
+ method_ = null;
+ }
+ methodCase_ = 5;
+ onChanged();;
+ return appendValueBuilder_;
+ }
+
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendListOrBuilder> appendListBuilder_;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ * @return Whether the appendList field is set.
+ */
+ @java.lang.Override
+ public boolean hasAppendList() {
+ return methodCase_ == 6;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ * @return The appendList.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList getAppendList() {
+ if (appendListBuilder_ == null) {
+ if (methodCase_ == 6) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.getDefaultInstance();
+ } else {
+ if (methodCase_ == 6) {
+ return appendListBuilder_.getMessage();
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ public Builder setAppendList(org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList value) {
+ if (appendListBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ method_ = value;
+ onChanged();
+ } else {
+ appendListBuilder_.setMessage(value);
+ }
+ methodCase_ = 6;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ public Builder setAppendList(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.Builder builderForValue) {
+ if (appendListBuilder_ == null) {
+ method_ = builderForValue.build();
+ onChanged();
+ } else {
+ appendListBuilder_.setMessage(builderForValue.build());
+ }
+ methodCase_ = 6;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ public Builder mergeAppendList(org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList value) {
+ if (appendListBuilder_ == null) {
+ if (methodCase_ == 6 &&
+ method_ != org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.getDefaultInstance()) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.newBuilder((org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_)
+ .mergeFrom(value).buildPartial();
+ } else {
+ method_ = value;
+ }
+ onChanged();
+ } else {
+ if (methodCase_ == 6) {
+ appendListBuilder_.mergeFrom(value);
+ } else {
+ appendListBuilder_.setMessage(value);
+ }
+ }
+ methodCase_ = 6;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ public Builder clearAppendList() {
+ if (appendListBuilder_ == null) {
+ if (methodCase_ == 6) {
+ methodCase_ = 0;
+ method_ = null;
+ onChanged();
+ }
+ } else {
+ if (methodCase_ == 6) {
+ methodCase_ = 0;
+ method_ = null;
+ }
+ appendListBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.Builder getAppendListBuilder() {
+ return getAppendListFieldBuilder().getBuilder();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.AppendListOrBuilder getAppendListOrBuilder() {
+ if ((methodCase_ == 6) && (appendListBuilder_ != null)) {
+ return appendListBuilder_.getMessageOrBuilder();
+ } else {
+ if (methodCase_ == 6) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.AppendList appendList = 6;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendListOrBuilder>
+ getAppendListFieldBuilder() {
+ if (appendListBuilder_ == null) {
+ if (!(methodCase_ == 6)) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.getDefaultInstance();
+ }
+ appendListBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.AppendListOrBuilder>(
+ (org.apache.spark.sql.execution.streaming.state.StateMessage.AppendList) method_,
+ getParentForChildren(),
+ isClean());
+ method_ = null;
+ }
+ methodCase_ = 6;
+ onChanged();;
+ return appendListBuilder_;
+ }
+
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Clear, org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ClearOrBuilder> clearBuilder_;
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ * @return Whether the clear field is set.
+ */
+ @java.lang.Override
+ public boolean hasClear() {
+ return methodCase_ == 7;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ * @return The clear.
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.Clear getClear() {
+ if (clearBuilder_ == null) {
+ if (methodCase_ == 7) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.getDefaultInstance();
+ } else {
+ if (methodCase_ == 7) {
+ return clearBuilder_.getMessage();
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ public Builder setClear(org.apache.spark.sql.execution.streaming.state.StateMessage.Clear value) {
+ if (clearBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ method_ = value;
+ onChanged();
+ } else {
+ clearBuilder_.setMessage(value);
+ }
+ methodCase_ = 7;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ public Builder setClear(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.Builder builderForValue) {
+ if (clearBuilder_ == null) {
+ method_ = builderForValue.build();
+ onChanged();
+ } else {
+ clearBuilder_.setMessage(builderForValue.build());
+ }
+ methodCase_ = 7;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ public Builder mergeClear(org.apache.spark.sql.execution.streaming.state.StateMessage.Clear value) {
+ if (clearBuilder_ == null) {
+ if (methodCase_ == 7 &&
+ method_ != org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.getDefaultInstance()) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.newBuilder((org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_)
+ .mergeFrom(value).buildPartial();
+ } else {
+ method_ = value;
+ }
+ onChanged();
+ } else {
+ if (methodCase_ == 7) {
+ clearBuilder_.mergeFrom(value);
+ } else {
+ clearBuilder_.setMessage(value);
+ }
+ }
+ methodCase_ = 7;
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ public Builder clearClear() {
+ if (clearBuilder_ == null) {
+ if (methodCase_ == 7) {
+ methodCase_ = 0;
+ method_ = null;
+ onChanged();
+ }
+ } else {
+ if (methodCase_ == 7) {
+ methodCase_ = 0;
+ method_ = null;
+ }
+ clearBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.Builder getClearBuilder() {
+ return getClearFieldBuilder().getBuilder();
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ @java.lang.Override
+ public org.apache.spark.sql.execution.streaming.state.StateMessage.ClearOrBuilder getClearOrBuilder() {
+ if ((methodCase_ == 7) && (clearBuilder_ != null)) {
+ return clearBuilder_.getMessageOrBuilder();
+ } else {
+ if (methodCase_ == 7) {
+ return (org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_;
+ }
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.getDefaultInstance();
+ }
+ }
+ /**
+ * .org.apache.spark.sql.execution.streaming.state.Clear clear = 7;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Clear, org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ClearOrBuilder>
+ getClearFieldBuilder() {
+ if (clearBuilder_ == null) {
+ if (!(methodCase_ == 7)) {
+ method_ = org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.getDefaultInstance();
+ }
+ clearBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.spark.sql.execution.streaming.state.StateMessage.Clear, org.apache.spark.sql.execution.streaming.state.StateMessage.Clear.Builder, org.apache.spark.sql.execution.streaming.state.StateMessage.ClearOrBuilder>(
+ (org.apache.spark.sql.execution.streaming.state.StateMessage.Clear) method_,
+ getParentForChildren(),
+ isClean());
+ method_ = null;
+ }
+ methodCase_ = 7;
+ onChanged();;
+ return clearBuilder_;
+ }
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:org.apache.spark.sql.execution.streaming.state.ListStateCall)
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.spark.sql.execution.streaming.state.ListStateCall)
+ private static final org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall();
+ }
+
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateCall getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parserbytes key = 1;
+ * @return The key.
+ */
+ com.google.protobuf.ByteString getKey();
+ }
+ /**
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.SetImplicitKey}
+ */
+ public static final class SetImplicitKey extends
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:org.apache.spark.sql.execution.streaming.state.SetImplicitKey)
+ SetImplicitKeyOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use SetImplicitKey.newBuilder() to construct.
+ private SetImplicitKey(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+ private SetImplicitKey() {
+ key_ = com.google.protobuf.ByteString.EMPTY;
+ }
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new SetImplicitKey();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_SetImplicitKey_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_SetImplicitKey_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey.class, org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey.Builder.class);
+ }
+
+ public static final int KEY_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString key_;
+ /**
+ * bytes key = 1;
+ * @return The key.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getKey() {
+ return key_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (!key_.isEmpty()) {
+ output.writeBytes(1, key_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (!key_.isEmpty()) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, key_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey)) {
+ return super.equals(obj);
+ }
+ org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey other = (org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey) obj;
+
+ if (!getKey()
+ .equals(other.getKey())) return false;
+ if (!getUnknownFields().equals(other.getUnknownFields())) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + KEY_FIELD_NUMBER;
+ hash = (53 * hash) + getKey().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.SetImplicitKey}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessageV3.Builderbytes key = 1;
+ * @return The key.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getKey() {
+ return key_;
+ }
+ /**
+ * bytes key = 1;
+ * @param value The key to set.
+ * @return This builder for chaining.
+ */
+ public Builder setKey(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ key_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * bytes key = 1;
+ * @return This builder for chaining.
+ */
+ public Builder clearKey() {
+
+ key_ = getDefaultInstance().getKey();
+ onChanged();
+ return this;
+ }
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:org.apache.spark.sql.execution.streaming.state.SetImplicitKey)
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.spark.sql.execution.streaming.state.SetImplicitKey)
+ private static final org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey();
+ }
+
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parserbytes value = 1;
+ * @return The value.
+ */
+ com.google.protobuf.ByteString getValue();
+ }
+ /**
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.ValueStateUpdate}
+ */
+ public static final class ValueStateUpdate extends
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:org.apache.spark.sql.execution.streaming.state.ValueStateUpdate)
+ ValueStateUpdateOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use ValueStateUpdate.newBuilder() to construct.
+ private ValueStateUpdate(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+ private ValueStateUpdate() {
+ value_ = com.google.protobuf.ByteString.EMPTY;
+ }
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new ValueStateUpdate();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_ValueStateUpdate_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_ValueStateUpdate_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate.class, org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate.Builder.class);
+ }
+
+ public static final int VALUE_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString value_;
+ /**
+ * bytes value = 1;
+ * @return The value.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getValue() {
+ return value_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ @java.lang.Override
+ public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
@@ -7558,8 +11434,8 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!key_.isEmpty()) {
- output.writeBytes(1, key_);
+ if (!value_.isEmpty()) {
+ output.writeBytes(1, value_);
}
getUnknownFields().writeTo(output);
}
@@ -7570,9 +11446,9 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!key_.isEmpty()) {
+ if (!value_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, key_);
+ .computeBytesSize(1, value_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
@@ -7584,13 +11460,13 @@ public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
- if (!(obj instanceof org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey)) {
+ if (!(obj instanceof org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate)) {
return super.equals(obj);
}
- org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey other = (org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey) obj;
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate other = (org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate) obj;
- if (!getKey()
- .equals(other.getKey())) return false;
+ if (!getValue()
+ .equals(other.getValue())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@@ -7602,76 +11478,76 @@ public int hashCode() {
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
- hash = (37 * hash) + KEY_FIELD_NUMBER;
- hash = (53 * hash) + getKey().hashCode();
+ hash = (37 * hash) + VALUE_FIELD_NUMBER;
+ hash = (53 * hash) + getValue().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(byte[] data)
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(java.io.InputStream input)
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseDelimitedFrom(java.io.InputStream input)
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseDelimitedFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@@ -7684,7 +11560,7 @@ public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImp
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
- public static Builder newBuilder(org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey prototype) {
+ public static Builder newBuilder(org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
@@ -7700,26 +11576,26 @@ protected Builder newBuilderForType(
return builder;
}
/**
- * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.SetImplicitKey}
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.ValueStateUpdate}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builderbytes key = 1;
- * @return The key.
+ * bytes value = 1;
+ * @return The value.
*/
@java.lang.Override
- public com.google.protobuf.ByteString getKey() {
- return key_;
+ public com.google.protobuf.ByteString getValue() {
+ return value_;
}
/**
- * bytes key = 1;
- * @param value The key to set.
+ * bytes value = 1;
+ * @param value The value to set.
* @return This builder for chaining.
*/
- public Builder setKey(com.google.protobuf.ByteString value) {
+ public Builder setValue(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
- key_ = value;
+ value_ = value;
onChanged();
return this;
}
/**
- * bytes key = 1;
+ * bytes value = 1;
* @return This builder for chaining.
*/
- public Builder clearKey() {
+ public Builder clearValue() {
- key_ = getDefaultInstance().getKey();
+ value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
@@ -7905,23 +11781,23 @@ public final Builder mergeUnknownFields(
}
- // @@protoc_insertion_point(builder_scope:org.apache.spark.sql.execution.streaming.state.SetImplicitKey)
+ // @@protoc_insertion_point(builder_scope:org.apache.spark.sql.execution.streaming.state.ValueStateUpdate)
}
- // @@protoc_insertion_point(class_scope:org.apache.spark.sql.execution.streaming.state.SetImplicitKey)
- private static final org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey DEFAULT_INSTANCE;
+ // @@protoc_insertion_point(class_scope:org.apache.spark.sql.execution.streaming.state.ValueStateUpdate)
+ private static final org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey();
+ DEFAULT_INSTANCE = new org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate();
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.SetImplicitKey getDefaultInstance() {
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ValueStateUpdate getDefaultInstance() {
return DEFAULT_INSTANCE;
}
- private static final com.google.protobuf.Parserstring iteratorId = 1;
+ * @return The iteratorId.
+ */
+ java.lang.String getIteratorId();
+ /**
+ * string iteratorId = 1;
+ * @return The bytes for iteratorId.
+ */
+ com.google.protobuf.ByteString
+ getIteratorIdBytes();
}
/**
- * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.Exists}
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.ListStateGet}
*/
- public static final class Exists extends
+ public static final class ListStateGet extends
com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:org.apache.spark.sql.execution.streaming.state.Exists)
- ExistsOrBuilder {
+ // @@protoc_insertion_point(message_implements:org.apache.spark.sql.execution.streaming.state.ListStateGet)
+ ListStateGetOrBuilder {
private static final long serialVersionUID = 0L;
- // Use Exists.newBuilder() to construct.
- private Exists(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ // Use ListStateGet.newBuilder() to construct.
+ private ListStateGet(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
- private Exists() {
+ private ListStateGet() {
+ iteratorId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
- return new Exists();
+ return new ListStateGet();
}
@java.lang.Override
@@ -8389,15 +12278,53 @@ protected java.lang.Object newInstance(
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_Exists_descriptor;
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_ListStateGet_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_Exists_fieldAccessorTable
+ return org.apache.spark.sql.execution.streaming.state.StateMessage.internal_static_org_apache_spark_sql_execution_streaming_state_ListStateGet_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.class, org.apache.spark.sql.execution.streaming.state.StateMessage.Exists.Builder.class);
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.class, org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet.Builder.class);
+ }
+
+ public static final int ITERATORID_FIELD_NUMBER = 1;
+ private volatile java.lang.Object iteratorId_;
+ /**
+ * string iteratorId = 1;
+ * @return The iteratorId.
+ */
+ @java.lang.Override
+ public java.lang.String getIteratorId() {
+ java.lang.Object ref = iteratorId_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ iteratorId_ = s;
+ return s;
+ }
+ }
+ /**
+ * string iteratorId = 1;
+ * @return The bytes for iteratorId.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString
+ getIteratorIdBytes() {
+ java.lang.Object ref = iteratorId_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ iteratorId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
}
private byte memoizedIsInitialized = -1;
@@ -8414,6 +12341,9 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(iteratorId_)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 1, iteratorId_);
+ }
getUnknownFields().writeTo(output);
}
@@ -8423,6 +12353,9 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(iteratorId_)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, iteratorId_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
@@ -8433,11 +12366,13 @@ public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
- if (!(obj instanceof org.apache.spark.sql.execution.streaming.state.StateMessage.Exists)) {
+ if (!(obj instanceof org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet)) {
return super.equals(obj);
}
- org.apache.spark.sql.execution.streaming.state.StateMessage.Exists other = (org.apache.spark.sql.execution.streaming.state.StateMessage.Exists) obj;
+ org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet other = (org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet) obj;
+ if (!getIteratorId()
+ .equals(other.getIteratorId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@@ -8449,74 +12384,76 @@ public int hashCode() {
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + ITERATORID_FIELD_NUMBER;
+ hash = (53 * hash) + getIteratorId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(byte[] data)
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(java.io.InputStream input)
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseDelimitedFrom(java.io.InputStream input)
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseDelimitedFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists parseFrom(
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@@ -8529,7 +12466,7 @@ public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
- public static Builder newBuilder(org.apache.spark.sql.execution.streaming.state.StateMessage.Exists prototype) {
+ public static Builder newBuilder(org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
@@ -8545,26 +12482,26 @@ protected Builder newBuilderForType(
return builder;
}
/**
- * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.Exists}
+ * Protobuf type {@code org.apache.spark.sql.execution.streaming.state.ListStateGet}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builderstring iteratorId = 1;
+ * @return The iteratorId.
+ */
+ public java.lang.String getIteratorId() {
+ java.lang.Object ref = iteratorId_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ iteratorId_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * string iteratorId = 1;
+ * @return The bytes for iteratorId.
+ */
+ public com.google.protobuf.ByteString
+ getIteratorIdBytes() {
+ java.lang.Object ref = iteratorId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ iteratorId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * string iteratorId = 1;
+ * @param value The iteratorId to set.
+ * @return This builder for chaining.
+ */
+ public Builder setIteratorId(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ iteratorId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * string iteratorId = 1;
+ * @return This builder for chaining.
+ */
+ public Builder clearIteratorId() {
+
+ iteratorId_ = getDefaultInstance().getIteratorId();
+ onChanged();
+ return this;
+ }
+ /**
+ * string iteratorId = 1;
+ * @param value The bytes for iteratorId to set.
+ * @return This builder for chaining.
+ */
+ public Builder setIteratorIdBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ iteratorId_ = value;
+ onChanged();
+ return this;
+ }
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
@@ -8705,23 +12730,23 @@ public final Builder mergeUnknownFields(
}
- // @@protoc_insertion_point(builder_scope:org.apache.spark.sql.execution.streaming.state.Exists)
+ // @@protoc_insertion_point(builder_scope:org.apache.spark.sql.execution.streaming.state.ListStateGet)
}
- // @@protoc_insertion_point(class_scope:org.apache.spark.sql.execution.streaming.state.Exists)
- private static final org.apache.spark.sql.execution.streaming.state.StateMessage.Exists DEFAULT_INSTANCE;
+ // @@protoc_insertion_point(class_scope:org.apache.spark.sql.execution.streaming.state.ListStateGet)
+ private static final org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.spark.sql.execution.streaming.state.StateMessage.Exists();
+ DEFAULT_INSTANCE = new org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet();
}
- public static org.apache.spark.sql.execution.streaming.state.StateMessage.Exists getDefaultInstance() {
+ public static org.apache.spark.sql.execution.streaming.state.StateMessage.ListStateGet getDefaultInstance() {
return DEFAULT_INSTANCE;
}
- private static final com.google.protobuf.Parser