From afb56a27b61a81d17a16405c95872eddff7e0bd1 Mon Sep 17 00:00:00 2001 From: sharkd Date: Wed, 13 Jul 2016 07:59:26 +0800 Subject: [PATCH 1/3] rebase apache/master --- .../spark/deploy/yarn/YarnClusterSuite.scala | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 874e3045b405..c8cd857d61e4 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -190,8 +190,13 @@ class YarnClusterSuite extends BaseYarnClusterSuite { val result = File.createTempFile("result", null, tempDir) val finalState = runSpark(false, mainClassName(YarnClusterDriverUseSparkHadoopUtilConf.getClass), +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 appArgs = Seq("key=value", result.getAbsolutePath()), extraConf = Map("spark.hadoop.key" -> "value")) +======= + appArgs = Seq("spark.hadoop.key=value", result.getAbsolutePath()), + extraConf = Map(("spark.hadoop.key", "value"))) +>>>>>>> add a unit test checkResult(finalState, result) } @@ -294,9 +299,15 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc // scalastyle:off println System.err.println( s""" +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 |Invalid command line: ${args.mkString(" ")} | |Usage: YarnClusterDriverUseSparkHadoopUtilConf [hadoopConfKey=value] [result file] +======= + |Invalid command line: ${args.mkString(" ")} + | + |Usage: YarnClusterDriverUseSparkHadoopUtilConf [propertyKey=value] [result file] +>>>>>>> add a unit test """.stripMargin) // scalastyle:on println System.exit(1) @@ -306,11 +317,19 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc .set("spark.extraListeners", classOf[SaveExecutorInfo].getName) .setAppName("yarn test using SparkHadoopUtil's conf")) +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 val kv = args(0).split("=") val status = new File(args(1)) var result = "failure" try { SparkHadoopUtil.get.conf.get(kv(0)) should be (kv(1)) +======= + val propertyKeyValue = args(0).split("=") + val status = new File(args(1)) + var result = "failure" + try { + SparkHadoopUtil.get.conf.get(propertyKeyValue(0)) should be (propertyKeyValue(1)) +>>>>>>> add a unit test result = "success" } finally { Files.write(result, status, StandardCharsets.UTF_8) From 995d606243a95965cb0be28cf7006883400e09ac Mon Sep 17 00:00:00 2001 From: sharkd Date: Tue, 12 Jul 2016 00:49:56 +0800 Subject: [PATCH 2/3] fix style --- .../spark/deploy/yarn/YarnClusterSuite.scala | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index c8cd857d61e4..874e3045b405 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -190,13 +190,8 @@ class YarnClusterSuite extends BaseYarnClusterSuite { val result = File.createTempFile("result", null, tempDir) val finalState = runSpark(false, mainClassName(YarnClusterDriverUseSparkHadoopUtilConf.getClass), -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 appArgs = Seq("key=value", result.getAbsolutePath()), extraConf = Map("spark.hadoop.key" -> "value")) -======= - appArgs = Seq("spark.hadoop.key=value", result.getAbsolutePath()), - extraConf = Map(("spark.hadoop.key", "value"))) ->>>>>>> add a unit test checkResult(finalState, result) } @@ -299,15 +294,9 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc // scalastyle:off println System.err.println( s""" -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 |Invalid command line: ${args.mkString(" ")} | |Usage: YarnClusterDriverUseSparkHadoopUtilConf [hadoopConfKey=value] [result file] -======= - |Invalid command line: ${args.mkString(" ")} - | - |Usage: YarnClusterDriverUseSparkHadoopUtilConf [propertyKey=value] [result file] ->>>>>>> add a unit test """.stripMargin) // scalastyle:on println System.exit(1) @@ -317,19 +306,11 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc .set("spark.extraListeners", classOf[SaveExecutorInfo].getName) .setAppName("yarn test using SparkHadoopUtil's conf")) -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 val kv = args(0).split("=") val status = new File(args(1)) var result = "failure" try { SparkHadoopUtil.get.conf.get(kv(0)) should be (kv(1)) -======= - val propertyKeyValue = args(0).split("=") - val status = new File(args(1)) - var result = "failure" - try { - SparkHadoopUtil.get.conf.get(propertyKeyValue(0)) should be (propertyKeyValue(1)) ->>>>>>> add a unit test result = "success" } finally { Files.write(result, status, StandardCharsets.UTF_8) From 816979bc5e834aebd23e485bc6251640573fb0a4 Mon Sep 17 00:00:00 2001 From: sharkd Date: Wed, 13 Jul 2016 07:14:02 +0800 Subject: [PATCH 3/3] fix code error in yarn-cluster unit test --- .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 874e3045b405..1ccd7e5993f5 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -97,7 +97,7 @@ class YarnClusterSuite extends BaseYarnClusterSuite { } test("run Spark in yarn-cluster mode with different configurations") { - testBasicYarnApp(true, + testBasicYarnApp(false, Map( "spark.driver.memory" -> "512m", "spark.driver.cores" -> "1",