1414from .fun_args import get_arg_ctx
1515from .introspect import introspect , _accepted_packages
1616from ._lru_store import LRUCacheStore , default_cache_size
17+
1718from .store import LocalFileStore , Store
1819from .structures import (
1920 DDSPath ,
20- KSException ,
21+ DDSException ,
2122 EvalContext ,
2223 PyHash ,
2324 ProcessingStage ,
2728 FunctionInteractionsUtils ,
2829 FunctionIndirectInteractionUtils ,
2930)
31+ from . import _options
3032
3133_Out = TypeVar ("_Out" )
3234_In = TypeVar ("_In" )
@@ -64,7 +66,7 @@ def load(path: Union[str, DDSPath, pathlib.Path]) -> Any:
6466 path_ = DDSPathUtils .create (path )
6567 key = _store .fetch_paths ([path_ ]).get (path_ )
6668 if key is None :
67- raise KSException (f"The store { _store } did not return path { path_ } " )
69+ raise DDSException (f"The store { _store } did not return path { path_ } " )
6870 else :
6971 return _store .fetch_blob (key )
7072
@@ -85,7 +87,7 @@ def set_store(
8587 global _store
8688 if isinstance (store , Store ):
8789 if cache_objects is not None :
88- raise KSException (
90+ raise DDSException (
8991 f"Cannot provide a caching option and a store object of type 'Store' at the same time"
9092 )
9193 # Directly setting the store
@@ -99,12 +101,12 @@ def set_store(
99101 _store = LocalFileStore (internal_dir , data_dir )
100102 elif store == "dbfs" :
101103 if data_dir is None :
102- raise KSException ("Missing data_dir argument" )
104+ raise DDSException ("Missing data_dir argument" )
103105 if internal_dir is None :
104- raise KSException ("Missing internal_dir argument" )
106+ raise DDSException ("Missing internal_dir argument" )
105107 dbutils = dbutils or _fetch_ipython_vars ().get ("dbutils" )
106108 if dbutils is None :
107- raise KSException (
109+ raise DDSException (
108110 "Missing dbutils objects from input or from arguments."
109111 " You must be using a databricks notebook to use the DBFS store"
110112 )
@@ -117,13 +119,13 @@ def set_store(
117119 DBFSURI .parse (internal_dir ), DBFSURI .parse (data_dir ), dbutils , commit_type_
118120 )
119121 else :
120- raise KSException (f"Unknown store { store } " )
122+ raise DDSException (f"Unknown store { store } " )
121123
122124 if cache_objects is not None :
123125 num_objects : Optional [int ] = None
124126
125127 if not isinstance (cache_objects , (int , bool )):
126- raise KSException (
128+ raise DDSException (
127129 f"cached_object should be int or bool, received type { type (cache_objects )} "
128130 )
129131 if isinstance (cache_objects , bool ) and cache_objects :
@@ -148,16 +150,16 @@ def check(s: Union[str, ProcessingStage], cur: ProcessingStage) -> ProcessingSta
148150 if isinstance (s , str ):
149151 s = s .upper ()
150152 if s not in dir (ProcessingStage ):
151- raise KSException (
153+ raise DDSException (
152154 f"{ s } is not a valid stage name. Valid names are { dir (ProcessingStage )} "
153155 )
154156 x = ProcessingStage [s ]
155157 elif isinstance (s , ProcessingStage ):
156158 x = s
157159 else :
158- raise KSException (f"Not a valid type: { s } { type (s )} " )
160+ raise DDSException (f"Not a valid type: { s } { type (s )} " )
159161 if x != cur :
160- raise KSException (
162+ raise DDSException (
161163 f"Wrong order for the stage name, expected { cur } but got { x } "
162164 )
163165 return cur
@@ -182,15 +184,15 @@ def _eval(
182184
183185 stages = _parse_stages (dds_stages )
184186
185- extra_debug = dds_extra_debug or False
187+ extra_debug = dds_extra_debug or _options . _dds_extra_debug
186188
187189 if not _eval_ctx :
188190 # Not in an evaluation context, create one and introspect
189191 return _eval_new_ctx (fun , path , args , kwargs , export_graph , extra_debug , stages )
190192 else :
191193 if not path :
192- raise KSException (
193- "Already in eval() context. Nested eval contexts are not supported"
194+ raise DDSException (
195+ "Already in dds. eval() context. Nested eval contexts are not supported"
194196 )
195197 key = None if path is None else _eval_ctx .requested_paths [path ]
196198 t = _time ()
@@ -302,7 +304,7 @@ def _eval_new_ctx(
302304
303305 _logger .debug (f"Interaction tree:" )
304306 FunctionInteractionsUtils .pprint_tree (
305- inters , present_blobs , printer = lambda s : _logger .debug ( s )
307+ inters , present_blobs , printer = _logger .debug
306308 )
307309 if export_graph is not None :
308310 # Attempt to run the export module:
0 commit comments