Skip to content

Commit

Permalink
Merge pull request #153 from Prodiguer/robust2ways
Browse files Browse the repository at this point in the history
Two bug fixes to prevent crashes
  • Loading branch information
AtefBN authored Sep 25, 2020
2 parents baad853 + 8a2f511 commit 5969d1b
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 4 deletions.
8 changes: 7 additions & 1 deletion sdt/bin/sdenqueue.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,13 @@ def add_file(f):
f.status=sdconst.TRANSFER_STATUS_WAITING
f.crea_date=sdtime.now()

sdfiledao.add_file(f,commit=False)
try:
sdfiledao.add_file(f,commit=False)
except Exception as e:
sdlog.error("SDENQUEU-005","Failed to create transfer (local_path=%s,url=%s)"%(f.get_full_local_path(),f.url))
sdlog.error("SDENQUEU-006","Exception was %s"%e)
sdlog.error("SDENQUEU-007","This file was found from the search url %s"
% getattr(f, 'search_url', '(unknown)') )

def add_dataset(f):
"""
Expand Down
6 changes: 3 additions & 3 deletions sdt/bin/sdjson.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def parse_parameters(buffer):
raise

params={}
footer_node=xmldoc["facet_counts"]
footer_node=xmldoc.get("facet_counts",{"facet_fields":{}})
fields_node=footer_node["facet_fields"]
for facet_name,li in fields_node.iteritems():
items=[]
Expand Down Expand Up @@ -74,10 +74,10 @@ def parse_metadata(buffer):

# retrieve header & footer (those nodes always exist)
header_node=xmldoc["responseHeader"]
footer_node=xmldoc["facet_counts"]
# not used: footer_node=xmldoc["facet_counts"]

# parse footer
fields_node=footer_node["facet_fields"]
# not used: fields_node=footer_node["facet_fields"]

# --- parse body node --- #

Expand Down
2 changes: 2 additions & 0 deletions sdt/bin/sdnetutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ def call_web_service(url,timeout=sdconst.SEARCH_API_HTTP_TIMEOUT,lowmem=False):
raise SDException('SDNETUTI-008','Network error (see log for details)') # we raise a new exception 'network error' here, because most of the time, 'xml parsing error' is due to an 'network error'.

sdlog.debug("SDNETUTI-044","files-count=%d"%len(di.get('files')))
for difile in di['files']:
difile['search_url'] = url

return sdtypes.Response(call_duration=elapsed_time,lowmem=lowmem,**di) # RAM storage is ok here as one response is limited by SEARCH_API_CHUNKSIZE

Expand Down

0 comments on commit 5969d1b

Please sign in to comment.