diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
index 1e48af9180e8f..72d0ef5ff6ae3 100644
--- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
+++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
@@ -44,8 +44,6 @@
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import javax.sql.DataSource;
-import org.apache.beam.sdk.annotations.Experimental;
-import org.apache.beam.sdk.annotations.Experimental.Kind;
import org.apache.beam.sdk.coders.CannotProvideCoderException;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderRegistry;
@@ -305,7 +303,6 @@
* Consider using MERGE ("upsert")
* statements supported by your database instead.
*/
-@Experimental(Kind.SOURCE_SINK)
@SuppressWarnings({
"rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
"nullness" // TODO(https://github.com/apache/beam/issues/20497)
@@ -327,7 +324,6 @@ public static Read read() {
}
/** Read Beam {@link Row}s from a JDBC data source. */
- @Experimental(Kind.SCHEMAS)
public static ReadRows readRows() {
return new AutoValue_JdbcIO_ReadRows.Builder()
.setFetchSize(DEFAULT_FETCH_SIZE)
@@ -594,7 +590,6 @@ public interface StatementPreparator extends Serializable {
/** Implementation of {@link #readRows()}. */
@AutoValue
- @Experimental(Kind.SCHEMAS)
public abstract static class ReadRows extends PTransform> {
abstract @Nullable SerializableFunction getDataSourceProviderFn();