diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetWriter.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetWriter.java
index a76b843e70..5c7fcaa222 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetWriter.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetWriter.java
@@ -495,7 +495,9 @@ protected Builder(OutputFile path) {
     /**
      * @param conf a configuration
      * @return an appropriate WriteSupport for the object model.
+     * @deprecated Use {@link #getWriteSupport(ParquetConfiguration)} instead
      */
+    @Deprecated
     protected abstract WriteSupport<T> getWriteSupport(Configuration conf);
 
     /**
@@ -503,8 +505,7 @@ protected Builder(OutputFile path) {
      * @return an appropriate WriteSupport for the object model.
      */
     protected WriteSupport<T> getWriteSupport(ParquetConfiguration conf) {
-      throw new UnsupportedOperationException(
-          "Override ParquetWriter$Builder#getWriteSupport(ParquetConfiguration)");
+      return getWriteSupport(ConfigurationUtil.createHadoopConfiguration(conf));
     }
 
     /**
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/InitContext.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/InitContext.java
index 06efbcf6a6..a9ee43f7f6 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/InitContext.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/InitContext.java
@@ -79,7 +79,9 @@ public Map<String, String> getMergedKeyValueMetaData() {
 
   /**
    * @return the configuration for this job
+   * @deprecated Use {@link #getParquetConfiguration()} instead
    */
+  @Deprecated
   public Configuration getConfiguration() {
     return ConfigurationUtil.createHadoopConfiguration(configuration);
   }
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/ReadSupport.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/ReadSupport.java
index 904def186b..1366b60600 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/ReadSupport.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/ReadSupport.java
@@ -21,6 +21,7 @@
 import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.parquet.conf.ParquetConfiguration;
+import org.apache.parquet.hadoop.util.ConfigurationUtil;
 import org.apache.parquet.io.api.RecordMaterializer;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.MessageTypeParser;
@@ -76,12 +77,12 @@ public ReadContext init(Configuration configuration, Map<String, String> keyValu
    * @param keyValueMetaData the app specific metadata from the file
    * @param fileSchema       the schema of the file
    * @return the readContext that defines how to read the file
-   * @deprecated override {@link ReadSupport#init(InitContext)} instead
+   * @deprecated override {@link #init(InitContext)} instead
    */
   @Deprecated
   public ReadContext init(
       ParquetConfiguration configuration, Map<String, String> keyValueMetaData, MessageType fileSchema) {
-    throw new UnsupportedOperationException("Override ReadSupport.init(InitContext)");
+    return init(ConfigurationUtil.createHadoopConfiguration(configuration), keyValueMetaData, fileSchema);
   }
 
   /**
@@ -103,7 +104,9 @@ public ReadContext init(InitContext context) {
    * @param fileSchema       the schema of the file
    * @param readContext      returned by the init method
    * @return the recordMaterializer that will materialize the records
+   * @deprecated override {@link #prepareForRead(ParquetConfiguration,Map,MessageType,ReadContext)} instead
    */
+  @Deprecated
   public abstract RecordMaterializer<T> prepareForRead(
       Configuration configuration,
       Map<String, String> keyValueMetaData,
@@ -125,8 +128,8 @@ public RecordMaterializer<T> prepareForRead(
       Map<String, String> keyValueMetaData,
       MessageType fileSchema,
       ReadContext readContext) {
-    throw new UnsupportedOperationException(
-        "Override ReadSupport.prepareForRead(ParquetConfiguration, Map<String, String>, MessageType, ReadContext)");
+    return prepareForRead(
+        ConfigurationUtil.createHadoopConfiguration(configuration), keyValueMetaData, fileSchema, readContext);
   }
 
   /**
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java
index 3a0b35f4f6..140206867d 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java
@@ -24,6 +24,7 @@
 import java.util.Objects;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.parquet.conf.ParquetConfiguration;
+import org.apache.parquet.hadoop.util.ConfigurationUtil;
 import org.apache.parquet.io.api.RecordConsumer;
 import org.apache.parquet.schema.MessageType;
 
@@ -99,7 +100,9 @@ public Map<String, String> getExtraMetaData() {
    *
    * @param configuration the job's configuration
    * @return the information needed to write the file
+   * @deprecated override {@link #init(ParquetConfiguration)} instead
    */
+  @Deprecated
   public abstract WriteContext init(Configuration configuration);
 
   /**
@@ -109,7 +112,7 @@ public Map<String, String> getExtraMetaData() {
    * @return the information needed to write the file
    */
   public WriteContext init(ParquetConfiguration configuration) {
-    throw new UnsupportedOperationException("Override WriteSupport#init(ParquetConfiguration)");
+    return init(ConfigurationUtil.createHadoopConfiguration(configuration));
   }
 
   /**