@@ -68,7 +68,7 @@ class SparkContext(object):
6868
6969 def __init__ (self , master = None , appName = None , sparkHome = None , pyFiles = None ,
7070 environment = None , batchSize = 0 , serializer = PickleSerializer (), conf = None ,
71- gateway = None ):
71+ gateway = None , jsc = None ):
7272 """
7373 Create a new SparkContext. At least the master and app name should be set,
7474 either through the named parameters here or through C{conf}.
@@ -104,14 +104,14 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None,
104104 SparkContext ._ensure_initialized (self , gateway = gateway )
105105 try :
106106 self ._do_init (master , appName , sparkHome , pyFiles , environment , batchSize , serializer ,
107- conf )
107+ conf , jsc )
108108 except :
109109 # If an error occurs, clean up in order to allow future SparkContext creation:
110110 self .stop ()
111111 raise
112112
113113 def _do_init (self , master , appName , sparkHome , pyFiles , environment , batchSize , serializer ,
114- conf ):
114+ conf , jsc ):
115115 self .environment = environment or {}
116116 self ._conf = conf or SparkConf (_jvm = self ._jvm )
117117 self ._batchSize = batchSize # -1 represents an unlimited batch size
@@ -154,7 +154,7 @@ def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize,
154154 self .environment [varName ] = v
155155
156156 # Create the Java SparkContext through Py4J
157- self ._jsc = self ._initialize_context (self ._conf ._jconf )
157+ self ._jsc = jsc or self ._initialize_context (self ._conf ._jconf )
158158
159159 # Create a single Accumulator in Java that we'll send all our updates through;
160160 # they will be passed back to us through a TCP server
0 commit comments