From 0ccb4ae38b797c10dfea45d762932bb07d1d9d53 Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Tue, 27 Oct 2015 01:33:17 +0800 Subject: [PATCH 1/8] Keep full stack trace in captured exception. --- python/pyspark/sql/tests.py | 6 ++++++ python/pyspark/sql/utils.py | 5 +++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py index 6356d4bd6669..d59215fc0b11 100644 --- a/python/pyspark/sql/tests.py +++ b/python/pyspark/sql/tests.py @@ -1079,6 +1079,12 @@ def test_capture_illegalargument_exception(self): df = self.sqlCtx.createDataFrame([(1, 2)], ["a", "b"]) self.assertRaisesRegexp(IllegalArgumentException, "1024 is not in the permitted values", lambda: df.select(sha2(df.a, 1024)).collect()) + try: + df.select(sha2(df.a, 1024)).collect() + except IllegalArgumentException as e: + self.assertRegexpMatches(e.args[0], "1024 is not in the permitted values") + self.assertRegexpMatches(e.args[1], + "org.apache.spark.sql.functions") def test_with_column_with_existing_name(self): keys = self.df.withColumn("key", self.df.key).select("key").collect() diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py index 0f795ca35b38..342048cc767f 100644 --- a/python/pyspark/sql/utils.py +++ b/python/pyspark/sql/utils.py @@ -36,10 +36,11 @@ def deco(*a, **kw): return f(*a, **kw) except py4j.protocol.Py4JJavaError as e: s = e.java_exception.toString() + stackTrace = '\n'.join(map(lambda x: x.toString(), e.java_exception.getStackTrace())) if s.startswith('org.apache.spark.sql.AnalysisException: '): - raise AnalysisException(s.split(': ', 1)[1]) + raise AnalysisException(s.split(': ', 1)[1], stackTrace) if s.startswith('java.lang.IllegalArgumentException: '): - raise IllegalArgumentException(s.split(': ', 1)[1]) + raise IllegalArgumentException(s.split(': ', 1)[1], stackTrace) raise return deco From 3769f01d66394c5b72612ef869687589e01c7279 Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Tue, 27 Oct 2015 10:52:46 +0800 Subject: [PATCH 2/8] Add custom exception class. --- python/pyspark/sql/tests.py | 5 +++-- python/pyspark/sql/utils.py | 11 +++++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py index d59215fc0b11..e948b6570c7a 100644 --- a/python/pyspark/sql/tests.py +++ b/python/pyspark/sql/tests.py @@ -1082,8 +1082,9 @@ def test_capture_illegalargument_exception(self): try: df.select(sha2(df.a, 1024)).collect() except IllegalArgumentException as e: - self.assertRegexpMatches(e.args[0], "1024 is not in the permitted values") - self.assertRegexpMatches(e.args[1], + self.assertEqual(repr(e), "IllegalArgumentException()") + self.assertRegexpMatches(e.desc, "1024 is not in the permitted values") + self.assertRegexpMatches(e.stackTrace, "org.apache.spark.sql.functions") def test_with_column_with_existing_name(self): diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py index 342048cc767f..839201d4a4ee 100644 --- a/python/pyspark/sql/utils.py +++ b/python/pyspark/sql/utils.py @@ -17,14 +17,21 @@ import py4j +class CapturedException(Exception): + def __init__(self, desc, stackTrace): + self.desc = desc + self.stackTrace = stackTrace + def __str__(self): + return repr(self.desc) -class AnalysisException(Exception): + +class AnalysisException(CapturedException): """ Failed to analyze a SQL query plan. """ -class IllegalArgumentException(Exception): +class IllegalArgumentException(CapturedException): """ Passed an illegal or inappropriate argument. """ From 18c169581bf69f896dae2ec584c110296436d655 Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Tue, 27 Oct 2015 11:26:37 +0800 Subject: [PATCH 3/8] Fix python style. --- python/pyspark/sql/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py index 839201d4a4ee..5d79715cb9d5 100644 --- a/python/pyspark/sql/utils.py +++ b/python/pyspark/sql/utils.py @@ -17,10 +17,12 @@ import py4j + class CapturedException(Exception): def __init__(self, desc, stackTrace): self.desc = desc self.stackTrace = stackTrace + def __str__(self): return repr(self.desc) From 8f694803194951391663b1d120b90996bb181019 Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Tue, 27 Oct 2015 11:57:49 +0800 Subject: [PATCH 4/8] Fix it. --- python/pyspark/sql/tests.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py index e948b6570c7a..4c03a0d4ffe9 100644 --- a/python/pyspark/sql/tests.py +++ b/python/pyspark/sql/tests.py @@ -1082,7 +1082,6 @@ def test_capture_illegalargument_exception(self): try: df.select(sha2(df.a, 1024)).collect() except IllegalArgumentException as e: - self.assertEqual(repr(e), "IllegalArgumentException()") self.assertRegexpMatches(e.desc, "1024 is not in the permitted values") self.assertRegexpMatches(e.stackTrace, "org.apache.spark.sql.functions") From 11d6f097d12520f43fd5618ce573eebe3393ff50 Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Wed, 28 Oct 2015 16:26:40 +0800 Subject: [PATCH 5/8] Make it looks like a Java stacktrace. --- python/pyspark/sql/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py index 5d79715cb9d5..b64233fa757f 100644 --- a/python/pyspark/sql/utils.py +++ b/python/pyspark/sql/utils.py @@ -45,7 +45,7 @@ def deco(*a, **kw): return f(*a, **kw) except py4j.protocol.Py4JJavaError as e: s = e.java_exception.toString() - stackTrace = '\n'.join(map(lambda x: x.toString(), e.java_exception.getStackTrace())) + stackTrace = '\n\t at'.join(map(lambda x: x.toString(), e.java_exception.getStackTrace())) if s.startswith('org.apache.spark.sql.AnalysisException: '): raise AnalysisException(s.split(': ', 1)[1], stackTrace) if s.startswith('java.lang.IllegalArgumentException: '): From f4cae6ce8ad1130cdefe9760e18e0a587fdb0e4d Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Wed, 28 Oct 2015 16:42:17 +0800 Subject: [PATCH 6/8] Fix python style. --- python/pyspark/sql/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py index b64233fa757f..dc0138e15890 100644 --- a/python/pyspark/sql/utils.py +++ b/python/pyspark/sql/utils.py @@ -45,7 +45,8 @@ def deco(*a, **kw): return f(*a, **kw) except py4j.protocol.Py4JJavaError as e: s = e.java_exception.toString() - stackTrace = '\n\t at'.join(map(lambda x: x.toString(), e.java_exception.getStackTrace())) + stackTrace = '\n\t at'.join(map(lambda x: x.toString(), + e.java_exception.getStackTrace())) if s.startswith('org.apache.spark.sql.AnalysisException: '): raise AnalysisException(s.split(': ', 1)[1], stackTrace) if s.startswith('java.lang.IllegalArgumentException: '): From 8d48e135d832c467d8dde98a1cdac9c475645b81 Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Thu, 29 Oct 2015 03:30:56 +0800 Subject: [PATCH 7/8] Add a space. --- python/pyspark/sql/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py index dc0138e15890..fcfe0237a3d4 100644 --- a/python/pyspark/sql/utils.py +++ b/python/pyspark/sql/utils.py @@ -45,7 +45,7 @@ def deco(*a, **kw): return f(*a, **kw) except py4j.protocol.Py4JJavaError as e: s = e.java_exception.toString() - stackTrace = '\n\t at'.join(map(lambda x: x.toString(), + stackTrace = '\n\t at '.join(map(lambda x: x.toString(), e.java_exception.getStackTrace())) if s.startswith('org.apache.spark.sql.AnalysisException: '): raise AnalysisException(s.split(': ', 1)[1], stackTrace) From 8535f06192bb64e4bf1e62bcfefeb7b49627c4c5 Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Thu, 29 Oct 2015 06:57:19 +0800 Subject: [PATCH 8/8] Fix python style. --- python/pyspark/sql/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py index fcfe0237a3d4..c4fda8bd3b89 100644 --- a/python/pyspark/sql/utils.py +++ b/python/pyspark/sql/utils.py @@ -46,7 +46,7 @@ def deco(*a, **kw): except py4j.protocol.Py4JJavaError as e: s = e.java_exception.toString() stackTrace = '\n\t at '.join(map(lambda x: x.toString(), - e.java_exception.getStackTrace())) + e.java_exception.getStackTrace())) if s.startswith('org.apache.spark.sql.AnalysisException: '): raise AnalysisException(s.split(': ', 1)[1], stackTrace) if s.startswith('java.lang.IllegalArgumentException: '):