Skip to content
This repository has been archived by the owner on Sep 18, 2023. It is now read-only.

[DNM] update sparklog #951

Merged
merged 3 commits into from
Jun 3, 2022
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions tools/sparklog.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -305,9 +305,11 @@
" self.speculativetask = task.where(\"speculative = 'true'\").count()\n",
" self.speculativekilledtask = task.where(\"speculative = true and killed='true'\").count()\n",
" self.speculativestage = task.where(\"speculative = true and killed='true'\").select(\"`Stage ID`\").distinct().count()\n",
"\n",
" \n",
" taskjob=task.where(\"Failed<>'true' and killed<>'true'\").\\\n",
" validtsk = task.where(\"Event = 'SparkListenerTaskEnd' and (Failed<>'true' or killed<>'true')\").select(\"`Task ID`\")\n",
" task=task.join(validtsk,on='Task ID',how='inner')\n",
" \n",
" taskjob=task.\\\n",
" select(\"Host\",\"`Event`\",\"`Launch Time`\",\"`Executor ID`\",\"`Task ID`\",\"`Finish Time`\",\n",
" \"`Stage ID`\",\"`Input Metrics`.`Bytes Read`\",\"`Disk Bytes Spilled`\",\"`Memory Bytes Spilled`\",\"`Shuffle Read Metrics`.`Local Bytes Read`\",\"`Shuffle Read Metrics`.`Remote Bytes Read`\",\n",
" \"`Shuffle Write Metrics`.`Shuffle Bytes Written`\",\"`Executor Deserialize Time`\",\"`Shuffle Read Metrics`.`Fetch Wait Time`\",\"`Executor Run Time`\",\"`Shuffle Write Metrics`.`Shuffle Write Time`\",\n",
Expand Down Expand Up @@ -941,7 +943,7 @@
" else:\n",
" t=len(tids)\n",
" tids[t]=[tsk,stime]\n",
" #print(\"task {:d} tid is {:s}.{:d}\".format(tsk,pid,t))\n",
" #print(f\"task {tsk} tid is {pid}.{t}\")\n",
" coretrack[pid]=tids\n",
"\n",
" if l['Event']=='SparkListenerTaskEnd':\n",
Expand Down Expand Up @@ -1636,6 +1638,7 @@
"\n",
" value=accid2stageid[m[\"accumulatorId\"]][1]\n",
" stdev_value=accid2stageid[m[\"accumulatorId\"]][2]\n",
" stdev_value=0 if stdev_value is null else stdev_value\n",
" if m[\"metricType\"] in ['nsTiming','timing']:\n",
" totaltime=value/1000 if m[\"metricType\"] == 'timing' else value/1000000000\n",
" stdev_value=stdev_value/1000 if m[\"metricType\"] == 'timing' else stdev_value/1000000000\n",
Expand Down Expand Up @@ -2072,15 +2075,15 @@
" fig, ax = plt.subplots(figsize=(30,8))\n",
" ax.set_title('input size')\n",
" ax.errorbar(x=dfx.index,y=dfx['input'], yerr=dfx['input_err'], fmt='ok', ecolor='red', lw=3)\n",
" ax.errorbar(x=dfx.index,y=dfx['input'],yerr=[dfx['input_max'],dfx['input_min']],\n",
" ax.errorbar(x=dfx.index,y=dfx['input'],yerr=[dfx['input_min'],dfx['input_max']],\n",
" fmt='.k', ecolor='gray', lw=1)\n",
" self.print_real_queryid(ax,dfx)\n",
" \n",
" fig, ax = plt.subplots(figsize=(30,8))\n",
" ax.set_title('stage time')\n",
"\n",
" ax.errorbar(x=dfx.index,y=dfx['elapsed'], yerr=dfx['elapsedtime_err'], fmt='ok', ecolor='red', lw=5)\n",
" ax.errorbar(x=dfx.index,y=dfx['elapsed'],yerr=[dfx['elapsed_max'],dfx['elapsed_min']],\n",
" ax.errorbar(x=dfx.index,y=dfx['elapsed'],yerr=[dfx['elapsed_min'],dfx['elapsed_max']],\n",
" fmt='.k', ecolor='gray', lw=1)\n",
"\n",
" self.print_real_queryid(ax,dfx)\n",
Expand Down