Skip to content

Commit

Permalink
Merge pull request #189 from nf-core/database-check
Browse files Browse the repository at this point in the history
Adds (native groovy!) database check
  • Loading branch information
jfy133 authored Dec 13, 2022
2 parents 971f6a6 + 7c66968 commit 578081d
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 53 deletions.
8 changes: 0 additions & 8 deletions conf/modules.config
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,6 @@

process {

withName: DATABASE_CHECK {
publishDir = [
path: { "${params.outdir}/pipeline_info" },
mode: params.publish_dir_mode,
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
]
}

withName: FASTQC {
ext.args = '--quiet'
ext.prefix = { "${meta.id}_${meta.run_accession}_raw" }
Expand Down
29 changes: 0 additions & 29 deletions modules/local/database_check.nf

This file was deleted.

53 changes: 43 additions & 10 deletions subworkflows/local/db_check.nf
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,34 @@
// Check input samplesheet and get read channels
//

include { DATABASE_CHECK } from '../../modules/local/database_check'
include { UNTAR } from '../../modules/nf-core/untar/main'

workflow DB_CHECK {
take:
dbsheet // file: /path/to/dbsheet.csv

main:
ch_versions = Channel.empty()

// TODO: make database sheet check
// Checks:
// 1) no duplicates,
// 2) args do not have quotes, e.g. just `,,` and NOT `,"",`
parsed_samplesheet = DATABASE_CHECK ( dbsheet )
.csv
// special check to check _between_ rows, for which we must group rows together
// note: this will run in parallel to within-row validity, but we can assume this will run faster thus will fail first
Channel.fromPath(dbsheet)
.splitCsv ( header:true, sep:',' )
.map {[it.tool, it.db_name] }
.groupTuple()
.map {
tool, db_name ->
def unique_names = db_name.unique(false)
if ( unique_names.size() < db_name.size() ) exit 1, "[nf-core/taxprofiler] ERROR: Each database for a tool must have a unique name, duplicated detected. Tool: ${tool}, Database name: ${unique_names}"
}

// normal checks for within-row validity, so can be moved to separate functions
parsed_samplesheet = Channel.fromPath(dbsheet)
.splitCsv ( header:true, sep:',' )
.map { create_db_channels(it) }
.map {
validate_db_rows(it)
create_db_channels(it)
}

ch_dbs_for_untar = parsed_samplesheet
.branch {
Expand All @@ -29,12 +40,32 @@ workflow DB_CHECK {
// TODO Filter to only run UNTAR on DBs of tools actually using?
// TODO make optional whether to save
UNTAR ( ch_dbs_for_untar.untar )
ch_versions = ch_versions.mix(UNTAR.out.versions.first())

ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar )

emit:
dbs = ch_final_dbs // channel: [ val(meta), [ db ] ]
versions = DATABASE_CHECK.out.versions.mix(UNTAR.out.versions.first()) // channel: [ versions.yml ]
versions = ch_versions // channel: [ versions.yml ]
}

def validate_db_rows(LinkedHashMap row){

// check minimum number of columns
if (row.size() < 4) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database input sheet - malformed row (e.g. missing column). See documentation for more information. Error in: ${row}"

// all columns there
def expected_headers = ['tool', 'db_name', 'db_params', 'db_path']
if ( !row.keySet().containsAll(expected_headers) ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database input sheet - malformed column names. Please check input TSV. Column names should be: ${expected_keys.join(", ")}"

// valid tools specified// TIFNISIH LIST
def expected_tools = [ "bracken", "centrifuge", "diamond", "kaiju", "kraken2", "krakenuniq", "malt", "metaphlan3", "motus" ]
if ( !expected_tools.contains(row.tool) ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid tool name. Please see documentation for all supported profilers. Error in: ${row}"

// detect quotes in params
if ( row.db_params.contains('"') ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database db_params entry. No quotes allowed. Error in: ${row}"
if ( row.db_params.contains("'") ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database db_params entry. No quotes allowed. Error in: ${row}"

}

def create_db_channels(LinkedHashMap row) {
Expand All @@ -45,9 +76,11 @@ def create_db_channels(LinkedHashMap row) {

def array = []
if (!file(row.db_path, type: 'dir').exists()) {
exit 1, "ERROR: Please check input samplesheet -> database could not be found!\n${row.db_path}"
exit 1, "ERROR: Please check input samplesheet -> database path could not be found!\n${row.db_path}"
}
array = [ meta, file(row.db_path) ]

return array
}


10 changes: 5 additions & 5 deletions subworkflows/local/profiling.nf
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,14 @@ workflow PROFILING {
}
.combine(databases)
.branch {
malt: it[2]['tool'] == 'malt'
kraken2: it[2]['tool'] == 'kraken2' || it[2]['tool'] == 'bracken' // to reuse the kraken module to produce the input data for bracken
metaphlan3: it[2]['tool'] == 'metaphlan3'
centrifuge: it[2]['tool'] == 'centrifuge'
kaiju: it[2]['tool'] == 'kaiju'
diamond: it[2]['tool'] == 'diamond'
motus: it[2]['tool'] == 'motus'
kaiju: it[2]['tool'] == 'kaiju'
kraken2: it[2]['tool'] == 'kraken2' || it[2]['tool'] == 'bracken' // to reuse the kraken module to produce the input data for bracken
krakenuniq: it[2]['tool'] == 'krakenuniq'
malt: it[2]['tool'] == 'malt'
metaphlan3: it[2]['tool'] == 'metaphlan3'
motus: it[2]['tool'] == 'motus'
unknown: true
}

Expand Down
2 changes: 1 addition & 1 deletion workflows/taxprofiler.nf
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ if ( params.input ) {
exit 1, "Input samplesheet, or PEP config and base directory not specified"
}

if (params.databases) { ch_databases = file(params.databases) } else { exit 1, 'Input database sheet not specified!' }
if (params.databases) { ch_databases = file(params.databases, checkIfExists: true) } else { exit 1, 'Input database sheet not specified!' }

if (params.shortread_qc_mergepairs && params.run_malt ) log.warn "[nf-core/taxprofiler] MALT does not accept uncollapsed paired-reads. Pairs will be profiled as separate files."
if (params.shortread_qc_includeunmerged && !params.shortread_qc_mergepairs) exit 1, "ERROR: [nf-core/taxprofiler] cannot include unmerged reads when merging is not turned on. Please specify --shortread_qc_mergepairs"
Expand Down

0 comments on commit 578081d

Please sign in to comment.