Skip to content

Commit

Permalink
Merge pull request #455 from maxulysse/dev_tools_update_2
Browse files Browse the repository at this point in the history
Second attempt at updating all modules
  • Loading branch information
maxulysse authored Nov 25, 2021
2 parents 6890014 + 76df2dc commit d01122b
Show file tree
Hide file tree
Showing 169 changed files with 4,103 additions and 2,581 deletions.
2 changes: 2 additions & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
*.config linguist-language=nextflow
modules/nf-core/** linguist-generated
subworkflows/nf-core/** linguist-generated
63 changes: 0 additions & 63 deletions .github/ISSUE_TEMPLATE/bug_report.md

This file was deleted.

52 changes: 52 additions & 0 deletions .github/ISSUE_TEMPLATE/bug_report.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@

name: Bug report
description: Report something that is broken or incorrect
labels: bug
body:

- type: markdown
attributes:
value: |
Before you post this issue, please check the documentation:
- [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting)
- [nf-core/sarek pipeline documentation](https://nf-co.re/sarek/usage)
- type: textarea
id: description
attributes:
label: Description of the bug
description: A clear and concise description of what the bug is.
validations:
required: true

- type: textarea
id: command_used
attributes:
label: Command used and terminal output
description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal.
render: console
placeholder: |
$ nextflow run ...
Some output where something broke
- type: textarea
id: files
attributes:
label: Relevant files
description: |
Please upload (drag and drop) and relevant files. Make into a `.zip` file if the extension is not allowed.
Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files.
- type: textarea
id: system
attributes:
label: System information
description: |
* Nextflow version _(eg. 21.04.01)_
* Hardware _(eg. HPC, Desktop, Cloud)_
* Executor _(eg. slurm, local, awsbatch)_
* Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_
* OS _(eg. CentOS Linux, macOS, Linux Mint)_
* Version of nf-core/sarek _(eg. 1.1, 1.5, 1.8.2)_
1 change: 0 additions & 1 deletion .github/ISSUE_TEMPLATE/config.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
blank_issues_enabled: false
contact_links:
- name: Join nf-core
url: https://nf-co.re/join
Expand Down
32 changes: 0 additions & 32 deletions .github/ISSUE_TEMPLATE/feature_request.md

This file was deleted.

11 changes: 11 additions & 0 deletions .github/ISSUE_TEMPLATE/feature_request.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
name: Feature request
description: Suggest an idea for the nf-core/sarek pipeline
labels: enhancement
body:
- type: textarea
id: description
attributes:
label: Description of feature
description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered.
validations:
required: true
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ jobs:
- 'gatk4_spark'
- 'save_bam_mapped'
- 'skip_markduplicates'
- 'targeted'
# - 'targeted'
- 'tumor_normal_pair'
- 'variant_calling'
# - 'variant_calling'
steps:
- name: Check out pipeline code
uses: actions/checkout@v2
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install nf-core
pip install nf-core git+https://github.com/nf-core/tools.git@dev
- name: Run nf-core lint
env:
Expand Down
2 changes: 2 additions & 0 deletions .nf-core.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
lint:
files_unchanged:
- .github/workflows/linting.yml
- assets/multiqc_config.yaml
- assets/nf-core-sarek_logo.png
- docs/images/nf-core-sarek_logo.png
- lib/NfcoreTemplate.groovy
2 changes: 1 addition & 1 deletion conf/genomes.config
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ params {
'small_hg38' {
dbsnp = "${params.genomes_base}/data/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz"
fasta = "${params.genomes_base}/data/genomics/homo_sapiens/genome/genome.fasta"
fasta_fai = "${params.genomes_base}/data/genomics/homo_sapiens/genome/genome.fasta.fai"
germline_resource = "${params.genomes_base}/data/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz"
intervals = "${params.genomes_base}/data/genomics/homo_sapiens/genome/genome.interval_list"
known_indels = "${params.genomes_base}/data/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz"
snpeff_db = 'GRCh38.99'
vep_genome = 'GRCh38'
Expand Down
8 changes: 3 additions & 5 deletions conf/modules.config
Original file line number Diff line number Diff line change
Expand Up @@ -94,13 +94,11 @@ params {
}
'samtools_index_mapping' {
publish_by_meta = true
publish_files = ['bai':'mapped']
publish_files = ['bam': 'mapped', 'bai': 'mapped']
publish_dir = 'preprocessing'
}
'merge_bam_mapping' {
publish_by_meta = true
publish_files = ['bam':'mapped']
publish_dir = 'preprocessing'
publish_files = false
}
'seqkit_split2' {
args = "--by-size ${params.split_fastq}"
Expand All @@ -115,7 +113,7 @@ params {
publish_files = ['metrics': 'markduplicates']
}
'markduplicates' {
args = 'REMOVE_DUPLICATES=false VALIDATION_STRINGENCY=LENIENT'
args = '-REMOVE_DUPLICATES false -VALIDATION_STRINGENCY LENIENT'
suffix = '.md'
publish_by_meta = true
publish_dir = 'preprocessing'
Expand Down
18 changes: 9 additions & 9 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@ params {
max_time = 6.h

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/sarek/testdata/csv/tiny-manta-normal-https.csv'
input = "${baseDir}/tests/csv/3.0/fastq_single.csv"

// Small reference genome
igenomes_ignore = true
genome = 'smallGRCh37'
genomes_base = "https://raw.githubusercontent.com/nf-core/test-datasets/sarek/reference"
genome = 'small_hg38'
genomes_base = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules'
snpeff_db = 'WBcel235.99'
vep_species = 'caenorhabditis_elegans'
vep_cache_version = '104'
Expand All @@ -42,7 +42,7 @@ profiles {
params.step = 'annotate'
}
pair {
params.input = 'https://raw.githubusercontent.com/nf-core/test-datasets/sarek/testdata/csv/tiny-manta-https.csv'
params.input = "${baseDir}/tests/csv/3.0/fastq_pair.csv"
}
prepare_recalibration {
params.input = 'https://raw.githubusercontent.com/nf-core/test-datasets/sarek/testdata/csv/tiny-mapped-normal-https.csv'
Expand Down Expand Up @@ -77,22 +77,22 @@ profiles {
params.trim_fastq = true
}
use_gatk_spark {
params.use_gatk_spark = 'markduplicates,bqsr'
params.use_gatk_spark = 'bqsr,markduplicates'
}
umi_quiaseq {
params.genome = 'smallGRCh38'
params.genomes_base = 'https://raw.githubusercontent.com/nf-core/test-datasets/sarek/reference/chr20_hg38'
params.input = 'https://raw.githubusercontent.com/nf-core/test-datasets/sarek/testdata/csv/tiny-umi-qiaseq-https.csv'
params.read_structure1 = "12M11S+T"
params.read_structure2 = "12M11S+T"
params.read_structure1 = '12M11S+T'
params.read_structure2 = '12M11S+T'
params.umi = true
}
umi_tso {
genome = 'smallGRCh38'
genomes_base = 'https://raw.githubusercontent.com/nf-core/test-datasets/sarek/reference/chr20_hg38'
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/sarek/testdata/csv/tiny-umi-tso-https.csv'
read_structure1 = "7M1S+T"
read_structure2 = "7M1S+T"
read_structure1 = '7M1S+T'
read_structure2 = '7M1S+T'
umi = true
}
}
Expand Down
22 changes: 17 additions & 5 deletions lib/NfcoreSchema.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,13 @@ class NfcoreSchema {

// Collect expected parameters from the schema
def expectedParams = []
def enums = [:]
for (group in schemaParams) {
for (p in group.value['properties']) {
expectedParams.push(p.key)
if (group.value['properties'][p.key].containsKey('enum')) {
enums[p.key] = group.value['properties'][p.key]['enum']
}
}
}

Expand Down Expand Up @@ -155,7 +159,7 @@ class NfcoreSchema {
println ''
log.error 'ERROR: Validation of pipeline parameters failed!'
JSONObject exceptionJSON = e.toJSON()
printExceptions(exceptionJSON, params_json, log)
printExceptions(exceptionJSON, params_json, log, enums)
println ''
has_error = true
}
Expand Down Expand Up @@ -260,13 +264,12 @@ class NfcoreSchema {

// Get pipeline parameters defined in JSON Schema
def Map params_summary = [:]
def blacklist = ['hostnames']
def params_map = paramsLoad(getSchemaPath(workflow, schema_filename=schema_filename))
for (group in params_map.keySet()) {
def sub_params = new LinkedHashMap()
def group_params = params_map.get(group) // This gets the parameters of that particular group
for (param in group_params.keySet()) {
if (params.containsKey(param) && !blacklist.contains(param)) {
if (params.containsKey(param)) {
def params_value = params.get(param)
def schema_value = group_params.get(param).default
def param_type = group_params.get(param).type
Expand Down Expand Up @@ -330,7 +333,7 @@ class NfcoreSchema {
//
// Loop over nested exceptions and print the causingException
//
private static void printExceptions(ex_json, params_json, log) {
private static void printExceptions(ex_json, params_json, log, enums, limit=5) {
def causingExceptions = ex_json['causingExceptions']
if (causingExceptions.length() == 0) {
def m = ex_json['message'] =~ /required key \[([^\]]+)\] not found/
Expand All @@ -346,7 +349,16 @@ class NfcoreSchema {
else {
def param = ex_json['pointerToViolation'] - ~/^#\//
def param_val = params_json[param].toString()
log.error "* --${param}: ${ex_json['message']} (${param_val})"
if (enums.containsKey(param)) {
def error_msg = "* --${param}: '${param_val}' is not a valid choice (Available choices"
if (enums[param].size() > limit) {
log.error "${error_msg} (${limit} of ${enums[param].size()}): ${enums[param][0..limit-1].join(', ')}, ... )"
} else {
log.error "${error_msg}: ${enums[param].join(', ')})"
}
} else {
log.error "* --${param}: ${ex_json['message']} (${param_val})"
}
}
}
for (ex in causingExceptions) {
Expand Down
Loading

0 comments on commit d01122b

Please sign in to comment.