diff --git a/atmo/jobs/models.py b/atmo/jobs/models.py index ed9a4cdc..f7286b04 100644 --- a/atmo/jobs/models.py +++ b/atmo/jobs/models.py @@ -1,5 +1,6 @@ -from datetime import datetime, timedelta +from datetime import timedelta +from django.core.urlresolvers import reverse from django.db import models from django.contrib.auth.models import User from django.utils import timezone @@ -77,7 +78,7 @@ def is_expired(self, at_time=None): return False # job isn't even running at the moment if at_time is None: at_time = timezone.now() - if self.last_run_date + timedelta(hours=self.job_timeout) >= at_time: + if self.last_run_date and self.last_run_date + timedelta(hours=self.job_timeout) >= at_time: return True # current job run expired return False @@ -134,10 +135,12 @@ def delete(self, *args, **kwargs): @classmethod def step_all(cls): """Run all the scheduled tasks that are supposed to run.""" - now = datetime.now() for spark_join in cls.objects.all(): - if spark_join.should_run(now): + if spark_join.should_run(): spark_join.run() spark_join.save() - if spark_join.is_expired(now): + if spark_join.is_expired(): spark_join.delete() + + def get_absolute_url(self): + return reverse('jobs-detail', kwargs={'id': self.id}) diff --git a/atmo/jobs/urls.py b/atmo/jobs/urls.py index ca612753..c5c3e89b 100644 --- a/atmo/jobs/urls.py +++ b/atmo/jobs/urls.py @@ -5,4 +5,5 @@ url(r'^new/', views.new_spark_job, name='jobs-new'), url(r'^edit/', views.edit_spark_job, name='jobs-edit'), url(r'^delete/', views.delete_spark_job, name='jobs-delete'), + url(r'^(?P[0-9]+)/$', views.detail_spark_job, name='jobs-detail'), ] diff --git a/atmo/jobs/views.py b/atmo/jobs/views.py index 7b0bb7eb..4a7bc94a 100644 --- a/atmo/jobs/views.py +++ b/atmo/jobs/views.py @@ -2,10 +2,11 @@ from django.views.decorators.http import require_POST from django.contrib.auth.decorators import login_required from django.http import HttpResponseBadRequest -from django.shortcuts import redirect +from django.shortcuts import redirect, get_object_or_404, render from session_csrf import anonymous_csrf +from .models import SparkJob from . import forms @@ -43,3 +44,9 @@ def delete_spark_job(request): return HttpResponseBadRequest(form.errors.as_json(escape_html=True)) form.save() # this will also delete the job for us return redirect("/") + + +@login_required +def detail_spark_job(request, id): + job = get_object_or_404(SparkJob, created_by=request.user, pk=id) + return render(request, 'atmo/detail-spark-job.html', context={'job': job}) diff --git a/atmo/templates/atmo/dashboard.html b/atmo/templates/atmo/dashboard.html index d684ba22..b42371e2 100644 --- a/atmo/templates/atmo/dashboard.html +++ b/atmo/templates/atmo/dashboard.html @@ -25,7 +25,7 @@

Launch a Spark Cluster

{% for cluster in active_clusters %} {{ cluster.id }} - {{ cluster.identifier }} + {{ cluster.identifier }} {{ cluster.size }} {{ cluster.start_date }} {{ cluster.most_recent_status }} @@ -60,7 +60,7 @@

Schedule a Spark Job

{% for spark_job in user_spark_jobs %} {{ spark_job.id }} - {{ spark_job.identifier }} + {{ spark_job.identifier }} {{ spark_job.result_visibility }} {{ spark_job.size }} {{ spark_job.interval_in_hours }} diff --git a/atmo/templates/atmo/detail-cluster.html b/atmo/templates/atmo/detail-cluster.html index c6c1c1fb..ec90e396 100644 --- a/atmo/templates/atmo/detail-cluster.html +++ b/atmo/templates/atmo/detail-cluster.html @@ -1,5 +1,8 @@ {% extends "atmo/base.html" %} {% block content %} + +{% block head_title %}Cluster {{ cluster.identifier }}{% endblock %} +

Cluster {{ cluster.identifier }}

Summary:

@@ -27,7 +30,7 @@

Cluster {{ cluster.identifier }}

{% if cluster.is_active %}
Terminates at:
-
{{cluster.end_date}}
+
{{cluster.end_date|date:"SHORT_DATETIME_FORMAT" }}

diff --git a/atmo/templates/atmo/detail-spark-job.html b/atmo/templates/atmo/detail-spark-job.html new file mode 100644 index 00000000..20f6ce64 --- /dev/null +++ b/atmo/templates/atmo/detail-spark-job.html @@ -0,0 +1,50 @@ +{% extends "atmo/base.html" %} + +{% block head_title %}Spark job {{ job.identifier }}{% endblock %} + +{% block content %} +

Spark Job {{ job.identifier }}

+

Summary:

+
+
Notebook S3 key:
+
{{ job.notebook_s3_key }}
+
+
+
Result visibility:
+
{{ job.result_visibility }}
+
+
+
Size:
+
{{ job.size }}
+
+
+
Interval in hours:
+
{{ job.interval_in_hours }}
+
+
+
Timeout:
+
{{ job.job_timeout }}
+
+
+
Start date:
+
{{ job.start_date|date:"SHORT_DATETIME_FORMAT" }}
+
+
+
End date:
+
{{ job.end_date|date:"SHORT_DATETIME_FORMAT" }}
+
+
+
Is enabled:
+
+ {% if job.is_enabled %} + + {% else %} + + {% endif %} +
+
+
+
Last run date:
+
{{ job.last_run_date|timesince }}
+
+{% endblock content %} diff --git a/atmo/utils/scheduling.py b/atmo/utils/scheduling.py index 634bd270..7c87895d 100644 --- a/atmo/utils/scheduling.py +++ b/atmo/utils/scheduling.py @@ -72,7 +72,7 @@ def spark_job_run(user_email, identifier, notebook_uri, result_is_public, size, 'Path': 's3://{}/bootstrap/telemetry.sh'.format( settings.AWS_CONFIG['SPARK_EMR_BUCKET'] ), - 'Args': ['--timeout', job_timeout] + 'Args': ['--timeout', str(job_timeout)] } }], Tags=[