diff --git a/qiita_db/handlers/plugin.py b/qiita_db/handlers/plugin.py index 5850df51c..ec8a8f514 100644 --- a/qiita_db/handlers/plugin.py +++ b/qiita_db/handlers/plugin.py @@ -104,12 +104,14 @@ def post(self, name, version): if outputs: outputs = loads(outputs) dflt_param_set = loads(self.get_argument('default_parameter_sets')) + analysis_only = self.get_argument('analysis_only', False) parameters = req_params parameters.update(opt_params) cmd = qdb.software.Command.create( - plugin, cmd_name, cmd_desc, parameters, outputs) + plugin, cmd_name, cmd_desc, parameters, outputs, + analysis_only=analysis_only) if dflt_param_set is not None: for name, vals in dflt_param_set.items(): diff --git a/qiita_db/handlers/tests/test_plugin.py b/qiita_db/handlers/tests/test_plugin.py index 036a58f32..3fc89a8a4 100644 --- a/qiita_db/handlers/tests/test_plugin.py +++ b/qiita_db/handlers/tests/test_plugin.py @@ -88,6 +88,25 @@ def test_post(self): self.assertEqual(obs.code, 200) obs = _get_command('QIIME', '1.9.1', 'New Command') self.assertEqual(obs.name, 'New Command') + self.assertFalse(obs.analysis_only) + + # Create a new command that is analysis only + data = { + 'name': 'New analysis command', + 'description': 'Analysis command added for testing', + 'required_parameters': dumps( + {'in_data': ['artifact:["BIOM"]', None]}), + 'optional_parameters': dumps({'param1': ['string', 'default']}), + 'outputs': dumps({'outtable': 'BIOM'}), + 'default_parameter_sets': dumps({'dflt1': {'param1': 'test'}}), + 'analysis_only': True + } + obs = self.post('/qiita_db/plugins/QIIME/1.9.1/commands/', data=data, + headers=self.header) + self.assertEqual(obs.code, 200) + obs = _get_command('QIIME', '1.9.1', 'New analysis command') + self.assertEqual(obs.name, 'New analysis command') + self.assertTrue(obs.analysis_only) class CommandHandlerTests(OauthTestingBase): diff --git a/qiita_db/private.py b/qiita_db/private.py new file mode 100644 index 000000000..78a286f51 --- /dev/null +++ b/qiita_db/private.py @@ -0,0 +1,74 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from json import dumps +from sys import exc_info +from time import sleep +import traceback + +import qiita_db as qdb + + +def build_analysis_files(job): + """Builds the files for an analysis + + Parameters + ---------- + job : qiita_db.processing_job.ProcessingJob + The processing job with the information for building the files + """ + with qdb.sql_connection.TRN: + params = job.parameters.values + analysis_id = params['analysis'] + merge_duplicated_sample_ids = params['merge_dup_sample_ids'] + analysis = qdb.analysis.Analysis(analysis_id) + biom_files = analysis.build_files(merge_duplicated_sample_ids) + + cmd = qdb.software.Command.get_validator('BIOM') + val_jobs = [] + for dtype, biom_fp in biom_files: + validate_params = qdb.software.Parameters.load( + cmd, values_dict={'files': dumps({'biom': [biom_fp]}), + 'artifact_type': 'BIOM', + 'provenance': dumps({'job': job.id, + 'data_type': dtype}), + 'analysis': analysis_id}) + val_jobs.append(qdb.processing_job.ProcessingJob.create( + analysis.owner, validate_params)) + + job._set_validator_jobs(val_jobs) + + for j in val_jobs: + j.submit() + sleep(1) + + +TASK_DICT = {'build_analysis_files': build_analysis_files} + + +def private_task(job_id): + """Complets a Qiita private task + + Parameters + ---------- + job_id : str + The job id + """ + if job_id == 'register': + # We don't need to do anything here if Qiita is registering plugins + return + + job = qdb.processing_job.ProcessingJob(job_id) + job.update_heartbeat_state() + task_name = job.command.name + + try: + TASK_DICT[task_name](job) + except Exception: + job.complete(False, error="Error executing private task: %s" + % traceback.format_exception(*exc_info())) diff --git a/qiita_db/processing_job.py b/qiita_db/processing_job.py index 47d404907..66e176feb 100644 --- a/qiita_db/processing_job.py +++ b/qiita_db/processing_job.py @@ -504,7 +504,8 @@ def _complete_artifact_definition(self, artifact_data): else: # The artifact is uploaded by the user or is the initial # artifact of an analysis - if job_params['analysis'] is not None: + if ('analysis' in job_params and + job_params['analysis'] is not None): pt = None an = qdb.analysis.Analysis(job_params['analysis']) sql = """SELECT data_type @@ -567,11 +568,21 @@ def _complete_artifact_transformation(self, artifacts_data): templates = set() for artifact in self.input_artifacts: templates.update(pt.id for pt in artifact.prep_templates) + template = None + analysis = None if len(templates) > 1: raise qdb.exceptions.QiitaDBError( "Currently only single prep template " "is allowed, found %d" % len(templates)) - template = templates.pop() + elif len(templates) == 1: + template = templates.pop() + else: + # In this case we have 0 templates. What this means is that + # this artifact is being generated in the analysis pipeline + # All the artifacts included in the analysis pipeline + # belong to the same analysis, so we can just ask the + # first artifact for the analysis that it belongs to + analysis = self.input_artifacts[0].analysis.id # Once the validate job completes, it needs to know if it has # been generated from a command (and how) or if it has been @@ -592,6 +603,7 @@ def _complete_artifact_transformation(self, artifacts_data): cmd, values_dict={'files': dumps(filepaths), 'artifact_type': atype, 'template': template, + 'analysis': analysis, 'provenance': dumps(provenance)}) validator_jobs.append( ProcessingJob.create(self.user, validate_params)) @@ -1196,7 +1208,16 @@ def _raise_if_not_in_construction(self): WHERE processing_job_workflow_id = %s""" qdb.sql_connection.TRN.add(sql, [self.id]) res = qdb.sql_connection.TRN.execute_fetchflatten() - if len(res) != 1 or res[0] != 'in_construction': + # If the above SQL query returns a single element and the value + # is different from in construction, it means that all the jobs + # in the workflow are in the same status and it is not + # 'in_construction', hence raise the error. If the above SQL query + # returns more than value (len(res) > 1) it means that the workflow + # is no longer in construction cause some jobs have been submited + # for processing. Note that if the above query doesn't retrun any + # value, it means that no jobs are in the workflow and that means + # that the workflow is in construction. + if (len(res) == 1 and res[0] != 'in_construction') or len(res) > 1: # The workflow is no longer in construction, raise an error raise qdb.exceptions.QiitaDBOperationNotPermittedError( "Workflow not in construction") diff --git a/qiita_db/software.py b/qiita_db/software.py index 7335e6100..7c0bf1b79 100644 --- a/qiita_db/software.py +++ b/qiita_db/software.py @@ -44,7 +44,8 @@ class Command(qdb.base.QiitaObject): _table = "software_command" @classmethod - def get_commands_by_input_type(cls, artifact_types, active_only=True): + def get_commands_by_input_type(cls, artifact_types, active_only=True, + exclude_analysis=True): """Returns the commands that can process the given artifact types Parameters @@ -70,6 +71,8 @@ def get_commands_by_input_type(cls, artifact_types, active_only=True): WHERE artifact_type IN %s""" if active_only: sql += " AND active = True" + if exclude_analysis: + sql += " AND is_analysis = False" qdb.sql_connection.TRN.add(sql, [tuple(artifact_types)]) for c_id in qdb.sql_connection.TRN.execute_fetchflatten(): yield cls(c_id) @@ -191,7 +194,8 @@ def exists(cls, software, name): return qdb.sql_connection.TRN.execute_fetchlast() @classmethod - def create(cls, software, name, description, parameters, outputs=None): + def create(cls, software, name, description, parameters, outputs=None, + analysis_only=False): r"""Creates a new command in the system The supported types for the parameters are: @@ -222,6 +226,9 @@ def create(cls, software, name, description, parameters, outputs=None): outputs : dict, optional The description of the outputs that this command generated. The format is: {output_name: artifact_type} + analysis_only : bool, optional + If true, then the command will only be available on the analysis + pipeline. Default: False. Returns ------- @@ -297,10 +304,10 @@ def create(cls, software, name, description, parameters, outputs=None): % (software.id, name)) # Add the command to the DB sql = """INSERT INTO qiita.software_command - (name, software_id, description) - VALUES (%s, %s, %s) + (name, software_id, description, is_analysis) + VALUES (%s, %s, %s, %s) RETURNING command_id""" - sql_params = [name, software.id, description] + sql_params = [name, software.id, description, analysis_only] qdb.sql_connection.TRN.add(sql, sql_params) c_id = qdb.sql_connection.TRN.execute_fetchlast() @@ -508,6 +515,22 @@ def activate(self): qdb.sql_connection.TRN.add(sql, [True, self.id]) return qdb.sql_connection.TRN.execute() + @property + def analysis_only(self): + """Returns if the command is an analysis-only command + + Returns + ------- + bool + Whether the command is analysis only or not + """ + with qdb.sql_connection.TRN: + sql = """SELECT is_analysis + FROM qiita.software_command + WHERE command_id = %s""" + qdb.sql_connection.TRN.add(sql, [self.id]) + return qdb.sql_connection.TRN.execute_fetchlast() + class Software(qdb.base.QiitaObject): r"""A software package available in the system diff --git a/qiita_db/support_files/patches/52.sql b/qiita_db/support_files/patches/52.sql index a484d5c24..24c0904d8 100644 --- a/qiita_db/support_files/patches/52.sql +++ b/qiita_db/support_files/patches/52.sql @@ -49,6 +49,11 @@ ALTER TABLE qiita.analysis ADD logging_id bigint ; CREATE INDEX idx_analysis_0 ON qiita.analysis ( logging_id ) ; ALTER TABLE qiita.analysis ADD CONSTRAINT fk_analysis_logging FOREIGN KEY ( logging_id ) REFERENCES qiita.logging( logging_id ) ; +-- Alter the software command table to differentiate between commands that +-- apply to the analysis pipeline or commands that apply on the study +-- processing pipeline +ALTER TABLE qiita.software_command ADD is_analysis bool DEFAULT 'False' NOT NULL; + -- We can handle some of the special cases here, so we simplify the work in the -- python patch @@ -102,7 +107,7 @@ DECLARE baf_cmd_id bigint; BEGIN INSERT INTO qiita.software (name, version, description, environment_script, start_script, software_type_id, active) - VALUES ('Qiita', 'alpha', 'Internal Qiita jobs', 'source activate qiita', 'qiita-private-2', 3, True) + VALUES ('Qiita', 'alpha', 'Internal Qiita jobs', 'source activate qiita', 'qiita-private-plugin', 3, True) RETURNING software_id INTO qiita_sw_id; INSERT INTO qiita.software_command (software_id, name, description) diff --git a/qiita_db/support_files/patches/python_patches/52.py b/qiita_db/support_files/patches/python_patches/52.py index 9d462d5c9..973895282 100644 --- a/qiita_db/support_files/patches/python_patches/52.py +++ b/qiita_db/support_files/patches/python_patches/52.py @@ -94,6 +94,7 @@ def create_non_rarefied_biom_artifact(analysis, biom_data, rarefied_table): # Note that we are sure that the biom table exists for sure, so # no need to check if biom_fp is undefined biom_table = load_table(biom_fp) + samples = set(samples).intersection(biom_table.ids()) biom_table.filter(samples, axis='sample', inplace=True) new_table = new_table.merge(biom_table) ids_map.update({sid: "%d.%s" % (a_id, sid) @@ -498,8 +499,9 @@ def transfer_job(analysis, command_id, params, input_artifact_id, job_data, qiime_id = TRN.execute_fetchlast() # Step 2: Insert the new commands in the software_command table - sql = """INSERT INTO qiita.software_command (software_id, name, description) - VALUES (%s, %s, %s) + sql = """INSERT INTO qiita.software_command + (software_id, name, description, is_analysis) + VALUES (%s, %s, %s, TRUE) RETURNING command_id""" TRN.add(sql, [qiime_id, 'Summarize Taxa', 'Plots taxonomy summaries at ' 'different taxonomy levels']) @@ -606,7 +608,7 @@ def transfer_job(analysis, command_id, params, input_artifact_id, job_data, [sum_taxa_cmd_id, 'Defaults', '{"sort": false, "metadata_category": ""}'], [bdiv_cmd_id, 'Unweighted UniFrac', - '{"metrics": "unweighted_unifrac", "tree": ""}'], + '{"metric": "unweighted_unifrac", "tree": ""}'], [arare_cmd_id, 'Defaults', '{"max_rare_depth": "Default", "tree": "", "num_steps": 10, ' '"min_rare_depth": 10, "metrics": ["chao1", "observed_otus"]}'], @@ -669,7 +671,10 @@ def transfer_job(analysis, command_id, params, input_artifact_id, job_data, srare_cmd_out_id) else: # The BIOM table was not rarefied, use current table as initial - initial_biom_id = transfer_file_to_artifact() + initial_biom_id = transfer_file_to_artifact( + analysis['analysis_id'], analysis['timestamp'], None, + biom_data['data_type_id'], None, 7, + biom_data['filepath_id']) # Loop through all the jobs that used this biom table as input sql = """SELECT * diff --git a/qiita_db/support_files/qiita-db.dbs b/qiita_db/support_files/qiita-db.dbs index ca22db34c..84ac07efc 100644 --- a/qiita_db/support_files/qiita-db.dbs +++ b/qiita_db/support_files/qiita-db.dbs @@ -1262,6 +1262,9 @@ 'True' + + 'False' + @@ -1622,16 +1625,11 @@ Controlled Vocabulary]]> - - - - - @@ -1639,16 +1637,9 @@ Controlled Vocabulary]]> - - - - - - - @@ -1675,7 +1666,6 @@ Controlled Vocabulary]]> - @@ -1701,8 +1691,6 @@ Controlled Vocabulary]]> - - @@ -1714,6 +1702,7 @@ Controlled Vocabulary]]> + analysis tables @@ -1721,8 +1710,6 @@ Controlled Vocabulary]]> - - @@ -1836,4 +1823,4 @@ ALTER TABLE oauth_software ADD CONSTRAINT fk_oauth_software FOREIGN KEY ( client ]]> - + \ No newline at end of file diff --git a/qiita_db/support_files/qiita-db.html b/qiita_db/support_files/qiita-db.html index b7b76b61e..97ddb21f5 100644 --- a/qiita_db/support_files/qiita-db.html +++ b/qiita_db/support_files/qiita-db.html @@ -792,21 +792,16 @@ software references software_type ( software_type_id ) software_type_id - Foreign Key fk_soft_command_software - software_command references software ( software_id ) - -software_id + software references software_type ( software_type_id )' style='fill:#a1a0a0;'>software_type_id Foreign Key fk_processing_job_qiita_user processing_job references qiita_user ( email ) email + processing_job references qiita_user ( email )' style='fill:#a1a0a0;'>email Foreign Key fk_processing_job processing_job references software_command ( command_id ) -command_id Foreign Key fk_processing_job_status processing_job references processing_job_status ( processing_job_status_id ) @@ -887,7 +882,12 @@ study_tags references qiita_user ( email ) email + study_tags references qiita_user ( email )' style='fill:#a1a0a0;'>email + Foreign Key fk_soft_command_software + software_command references software ( software_id ) + +software_id controlled_vocab_valuesTable qiita.controlled_vocab_values @@ -1865,25 +1865,6 @@ References software_type ( software_type_id ) activeactive bool not null default 'True' - - - -software_commandTable qiita.software_command - Primary Key ( command_id ) -command_idcommand_id bigserial not null -Referred by artifact ( command_id ) -Referred by command_output ( command_id ) -Referred by command_parameter ( command_id ) -Referred by default_parameter_set ( command_id ) -Referred by default_workflow_node ( command_id ) -Referred by processing_job ( command_id ) - namename varchar not null - Index ( software_id ) -software_idsoftware_id bigint not null -References software ( software_id ) - descriptiondescription varchar not null - activeactive bool not null default 'True' - @@ -2047,6 +2028,26 @@ emailemail varchar not null References qiita_user ( email ) + + + +software_commandTable qiita.software_command + Primary Key ( command_id ) +command_idcommand_id bigserial not null +Referred by artifact ( command_id ) +Referred by command_output ( command_id ) +Referred by command_parameter ( command_id ) +Referred by default_parameter_set ( command_id ) +Referred by default_workflow_node ( command_id ) +Referred by processing_job ( command_id ) + namename varchar not null + Index ( software_id ) +software_idsoftware_id bigint not null +References software ( software_id ) + descriptiondescription varchar not null + activeactive bool not null default 'True' + is_analysisis_analysis bool not null default 'False' +

@@ -5229,55 +5230,6 @@ -

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Table software_command
command_id bigserial NOT NULL
name varchar NOT NULL
software_id bigint NOT NULL
description varchar NOT NULL
active bool NOT NULL DEFO 'True'
Indexes
pk_soft_command primary key ON command_id
idx_soft_command ON software_id
Foreign Keys
fk_soft_command_software ( software_id ) ref software (software_id)
-

@@ -5799,4 +5751,58 @@
+

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table software_command
command_id bigserial NOT NULL
name varchar NOT NULL
software_id bigint NOT NULL
description varchar NOT NULL
active bool NOT NULL DEFO 'True'
is_analysis bool NOT NULL DEFO 'False'
Indexes
pk_soft_command primary key ON command_id
idx_soft_command ON software_id
Foreign Keys
fk_soft_command_software ( software_id ) ref software (software_id)
+ \ No newline at end of file diff --git a/qiita_db/test/test_software.py b/qiita_db/test/test_software.py index a19d577fe..9300aea23 100644 --- a/qiita_db/test/test_software.py +++ b/qiita_db/test/test_software.py @@ -55,6 +55,21 @@ def test_get_commands_by_input_type(self): exp = [qdb.software.Command(1), qdb.software.Command(2)] self.assertItemsEqual(obs, exp) + new_cmd = qdb.software.Command.create( + self.software, "Analysis Only Command", + "This is a command for testing", + {'req_art': ['artifact:["FASTQ"]', None]}, + analysis_only=True) + obs = list(qdb.software.Command.get_commands_by_input_type( + ['FASTQ', 'SFF'], active_only=False)) + exp = [qdb.software.Command(1), qdb.software.Command(2)] + self.assertItemsEqual(obs, exp) + + obs = list(qdb.software.Command.get_commands_by_input_type( + ['FASTQ', 'SFF'], active_only=False, exclude_analysis=False)) + exp = [qdb.software.Command(1), qdb.software.Command(2), new_cmd] + self.assertItemsEqual(obs, exp) + def test_get_html_artifact(self): obs = qdb.software.Command.get_html_generator('BIOM') exp = qdb.software.Command(5) @@ -278,10 +293,11 @@ def test_create(self): 'opt_choice_param': ['choice:["opt1", "opt2"]', 'opt1'], 'opt_bool': ['boolean', 'False']} self.assertEqual(obs.optional_parameters, exp_optional) + self.assertFalse(obs.analysis_only) obs = qdb.software.Command.create( self.software, "Test Command 2", "This is a command for testing", - self.parameters) + self.parameters, analysis_only=True) self.assertEqual(obs.name, "Test Command 2") self.assertEqual(obs.description, "This is a command for testing") exp_required = {'req_param': ('string', [None]), @@ -292,6 +308,7 @@ def test_create(self): 'opt_choice_param': ['choice:["opt1", "opt2"]', 'opt1'], 'opt_bool': ['boolean', 'False']} self.assertEqual(obs.optional_parameters, exp_optional) + self.assertTrue(obs.analysis_only) def test_activate(self): qdb.software.Software.deactivate_all() diff --git a/qiita_pet/handlers/analysis_handlers/base_handlers.py b/qiita_pet/handlers/analysis_handlers/base_handlers.py index 52c00a852..f69eda5be 100644 --- a/qiita_pet/handlers/analysis_handlers/base_handlers.py +++ b/qiita_pet/handlers/analysis_handlers/base_handlers.py @@ -52,13 +52,16 @@ def analysis_description_handler_get_request(analysis_id, user): job_info = loads(job_info) job_id = job_info['job_id'] if job_id: - redis_info = loads(r_client.get(job_id)) - if redis_info['status_msg'] == 'running': - alert_msg = 'An artifact is being deleted from this analysis' - elif redis_info['return'] is not None: - alert_type = redis_info['return']['status'] - alert_msg = redis_info['return']['message'].replace( - '\n', '
') + r_payload = r_client.get(job_id) + if r_payload: + redis_info = loads(r_client.get(job_id)) + if redis_info['status_msg'] == 'running': + alert_msg = ('An artifact is being deleted from this ' + 'analysis') + elif redis_info['return'] is not None: + alert_type = redis_info['return']['status'] + alert_msg = redis_info['return']['message'].replace( + '\n', '
') return {'analysis_name': analysis.name, 'analysis_id': analysis.id, diff --git a/qiita_pet/handlers/artifact_handlers/process_handlers.py b/qiita_pet/handlers/artifact_handlers/process_handlers.py new file mode 100644 index 000000000..3095d9e7b --- /dev/null +++ b/qiita_pet/handlers/artifact_handlers/process_handlers.py @@ -0,0 +1,48 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from tornado.web import authenticated + +from qiita_pet.handlers.base_handlers import BaseHandler +from qiita_pet.handlers.util import to_int +from qiita_db.artifact import Artifact + + +def process_artifact_handler_get_req(artifact_id): + """Returns the information for the process artifact handler + + Parameters + ---------- + artifact_id : int + The artifact to be processed + + Returns + ------- + dict of str + A dictionary containing the artifact information + {'status': str, + 'message': str, + 'name': str, + 'type': str} + """ + artifact = Artifact(artifact_id) + + return {'status': 'success', + 'message': '', + 'name': artifact.name, + 'type': artifact.artifact_type, + 'artifact_id': artifact.id} + + +class ProcessArtifactHandler(BaseHandler): + @authenticated + def get(self, artifact_id): + # Check if the user has access to the artifact + artifact_id = to_int(artifact_id) + res = process_artifact_handler_get_req(artifact_id) + self.render('artifact_ajax/processing_artifact.html', **res) diff --git a/qiita_pet/handlers/artifact_handlers/tests/test_process_handlers.py b/qiita_pet/handlers/artifact_handlers/tests/test_process_handlers.py new file mode 100644 index 000000000..81fe4878c --- /dev/null +++ b/qiita_pet/handlers/artifact_handlers/tests/test_process_handlers.py @@ -0,0 +1,25 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main + +from qiita_core.util import qiita_test_checker +from qiita_pet.handlers.artifact_handlers.process_handlers import ( + process_artifact_handler_get_req) + + +@qiita_test_checker() +class TestProcessHandlersUtils(TestCase): + def test_process_artifact_handler_get_req(self): + obs = process_artifact_handler_get_req(1) + exp = {} + self.assertEqual(obs, exp) + + +if __name__ == '__main__': + main() diff --git a/qiita_pet/templates/artifact_ajax/processing_artifact.html b/qiita_pet/templates/artifact_ajax/processing_artifact.html new file mode 100644 index 000000000..a38a19c9a --- /dev/null +++ b/qiita_pet/templates/artifact_ajax/processing_artifact.html @@ -0,0 +1,425 @@ +{% from qiita_core.qiita_settings import qiita_config %} + + + + +
+
+

Processing {{name}} (ID: {{artifact_id}})

+
+
+ +
+
+

+ Processing workflow + +

+
+ Don't forget to hit "Run" once you are done with your workflow! +
+
+ Wondering what to select? Check our data processing recommendations. +
+
+
+ +
+
+
+
+ +
+
+
+
diff --git a/qiita_pet/templates/list_analyses.html b/qiita_pet/templates/list_analyses.html index 689a9492f..af408ee59 100644 --- a/qiita_pet/templates/list_analyses.html +++ b/qiita_pet/templates/list_analyses.html @@ -37,9 +37,6 @@

Create an analysis {{analysis.description}} - - {{status}} - {{ analysis.timestamp.strftime("%m/%d/%y %H:%M:%S")}} diff --git a/qiita_pet/templates/sitebase.html b/qiita_pet/templates/sitebase.html index 106907bae..367b1358c 100644 --- a/qiita_pet/templates/sitebase.html +++ b/qiita_pet/templates/sitebase.html @@ -390,7 +390,7 @@