Skip to content

Commit

Permalink
fix r / w hadoop methods
Browse files Browse the repository at this point in the history
Signed-off-by: Grigory Pomadchin <gr.pomadchin@gmail.com>
  • Loading branch information
pomadchin committed Apr 3, 2017
1 parent db86ac5 commit c7547ff
Showing 1 changed file with 4 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ package geotrellis.spark.io.hadoop
import geotrellis.util.MethodExtensions

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.compress.CompressionCodecFactory
import org.apache.spark.SparkContext
import java.io.{DataInputStream, DataOutputStream}

import java.io.{DataInputStream, DataOutputStream}

trait HadoopRasterMethods[T] extends MethodExtensions[T] {
def write(path: Path)(implicit sc: SparkContext): Unit = write(path, sc.hadoopConfiguration)
Expand All @@ -32,7 +32,7 @@ trait HadoopRasterMethods[T] extends MethodExtensions[T] {

object HadoopRasterMethods {
def write(path: Path, conf: Configuration)(dosWrite: DataOutputStream => Unit): Unit = {
val fs = FileSystem.get(conf)
val fs = path.getFileSystem(conf)

val os = {
val factory = new CompressionCodecFactory(conf)
Expand All @@ -58,7 +58,7 @@ object HadoopRasterMethods {
}

def read[T](path: Path, conf: Configuration)(disRead: DataInputStream => T): T = {
val fs = FileSystem.get(conf)
val fs = path.getFileSystem(conf)

val is = {
val factory = new CompressionCodecFactory(conf)
Expand Down

0 comments on commit c7547ff

Please sign in to comment.