diff --git a/modules/nf-core/ensemblvep/filtervep/tests/nextflow.config b/modules/nf-core/ensemblvep/filtervep/tests/nextflow.config
index aee2e62b02f..16cfe993999 100644
--- a/modules/nf-core/ensemblvep/filtervep/tests/nextflow.config
+++ b/modules/nf-core/ensemblvep/filtervep/tests/nextflow.config
@@ -1,10 +1,10 @@
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running tests
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
params {
- vep_cache_version = "112"
- vep_genome = "WBcel235"
- vep_species = "caenorhabditis_elegans"
+ vep_cache_version = "112"
+ vep_genome = "WBcel235"
+ vep_species = "caenorhabditis_elegans"
}
diff --git a/modules/nf-core/ensemblvep/filtervep/tests/tab.gz.config b/modules/nf-core/ensemblvep/filtervep/tests/tab.gz.config
index 0aa5ea75228..cdad2d94d35 100644
--- a/modules/nf-core/ensemblvep/filtervep/tests/tab.gz.config
+++ b/modules/nf-core/ensemblvep/filtervep/tests/tab.gz.config
@@ -1,7 +1,7 @@
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running tests
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
process {
@@ -13,12 +13,12 @@ process {
}
withName: ENSEMBLVEP_VEP {
- ext.args = '--tab'
+ ext.args = '--tab'
ext.prefix = { "${meta.id}_vep" }
}
withName: ENSEMBLVEP_FILTERVEP {
- ext.args = '--filter "Feature_type is Transcript"'
+ ext.args = '--filter "Feature_type is Transcript"'
ext.suffix = "tab"
}
}
diff --git a/modules/nf-core/ensemblvep/filtervep/tests/vcf.config b/modules/nf-core/ensemblvep/filtervep/tests/vcf.config
index 0b65fe5c4f1..ee2aef5745b 100644
--- a/modules/nf-core/ensemblvep/filtervep/tests/vcf.config
+++ b/modules/nf-core/ensemblvep/filtervep/tests/vcf.config
@@ -1,7 +1,7 @@
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running tests
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
process {
@@ -13,7 +13,7 @@ process {
}
withName: ENSEMBLVEP_VEP {
- ext.args = '--vcf'
+ ext.args = '--vcf'
ext.prefix = { "${meta.id}_vep" }
}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
index 28e32b200e4..2b0dc67a6a0 100644
--- a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
@@ -3,13 +3,12 @@
//
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
SUBWORKFLOW DEFINITION
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
workflow UTILS_NEXTFLOW_PIPELINE {
-
take:
print_version // boolean: print version
dump_parameters // boolean: dump parameters
@@ -22,7 +21,7 @@ workflow UTILS_NEXTFLOW_PIPELINE {
// Print workflow version and exit on --version
//
if (print_version) {
- log.info "${workflow.manifest.name} ${getWorkflowVersion()}"
+ log.info("${workflow.manifest.name} ${getWorkflowVersion()}")
System.exit(0)
}
@@ -45,9 +44,9 @@ workflow UTILS_NEXTFLOW_PIPELINE {
}
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
FUNCTIONS
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
//
@@ -72,11 +71,11 @@ def getWorkflowVersion() {
// Dump pipeline parameters to a JSON file
//
def dumpParametersToJSON(outdir) {
- def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
- def filename = "params_${timestamp}.json"
- def temp_pf = new File(workflow.launchDir.toString(), ".${filename}")
- def jsonStr = groovy.json.JsonOutput.toJson(params)
- temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr)
+ def timestamp = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss')
+ def filename = "params_${timestamp}.json"
+ def temp_pf = new File(workflow.launchDir.toString(), ".${filename}")
+ def jsonStr = groovy.json.JsonOutput.toJson(params)
+ temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr)
nextflow.extension.FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json")
temp_pf.delete()
@@ -91,9 +90,14 @@ def checkCondaChannels() {
try {
def config = parser.load("conda config --show channels".execute().text)
channels = config.channels
- } catch(NullPointerException | IOException e) {
- log.warn "Could not verify conda channel configuration."
- return
+ }
+ catch (NullPointerException e) {
+ log.warn("Could not verify conda channel configuration.")
+ return null
+ }
+ catch (IOException e) {
+ log.warn("Could not verify conda channel configuration.")
+ return null
}
// Check that all channels are present
@@ -106,19 +110,13 @@ def checkCondaChannels() {
required_channels_in_order.eachWithIndex { channel, index ->
if (index < required_channels_in_order.size() - 1) {
- channel_priority_violation |= !(channels.indexOf(channel) < channels.indexOf(required_channels_in_order[index+1]))
+ channel_priority_violation |= !(channels.indexOf(channel) < channels.indexOf(required_channels_in_order[index + 1]))
}
}
if (channels_missing | channel_priority_violation) {
- log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
- " There is a problem with your Conda configuration!\n\n" +
- " You will need to set-up the conda-forge and bioconda channels correctly.\n" +
- " Please refer to https://bioconda.github.io/\n" +
- " The observed channel order is \n" +
- " ${channels}\n" +
- " but the following channel order is required:\n" +
- " ${required_channels_in_order}\n" +
- "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+ log.warn(
+ "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + " There is a problem with your Conda configuration!\n\n" + " You will need to set-up the conda-forge and bioconda channels correctly.\n" + " Please refer to https://bioconda.github.io/\n" + " The observed channel order is \n" + " ${channels}\n" + " but the following channel order is required:\n" + " ${required_channels_in_order}\n" + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+ )
}
}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
index cbd8495bb60..8cda47bcff3 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
@@ -3,13 +3,12 @@
//
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
SUBWORKFLOW DEFINITION
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
workflow UTILS_NFCORE_PIPELINE {
-
take:
nextflow_cli_args
@@ -22,9 +21,9 @@ workflow UTILS_NFCORE_PIPELINE {
}
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
FUNCTIONS
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
//
@@ -33,12 +32,9 @@ workflow UTILS_NFCORE_PIPELINE {
def checkConfigProvided() {
def valid_config = true as Boolean
if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) {
- log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" +
- "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" +
- " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" +
- " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" +
- " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" +
- "Please refer to the quick start section and usage docs for the pipeline.\n "
+ log.warn(
+ "[${workflow.manifest.name}] You are attempting to run the pipeline without any custom configuration!\n\n" + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + "Please refer to the quick start section and usage docs for the pipeline.\n "
+ )
valid_config = false
}
return valid_config
@@ -49,12 +45,14 @@ def checkConfigProvided() {
//
def checkProfileProvided(nextflow_cli_args) {
if (workflow.profile.endsWith(',')) {
- error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" +
- "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+ error(
+ "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+ )
}
if (nextflow_cli_args[0]) {
- log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" +
- "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+ log.warn(
+ "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+ )
}
}
@@ -70,13 +68,7 @@ def workflowCitation() {
manifest_doi.each { doi_ref ->
temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n"
}
- return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" +
- "* The pipeline\n" +
- temp_doi_ref + "\n" +
- "* The nf-core framework\n" +
- " https://doi.org/10.1038/s41587-020-0439-x\n\n" +
- "* Software dependencies\n" +
- " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
+ return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
}
//
@@ -102,7 +94,7 @@ def getWorkflowVersion() {
//
def processVersionsFromYAML(yaml_file) {
def yaml = new org.yaml.snakeyaml.Yaml()
- def versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] }
+ def versions = yaml.load(yaml_file).collectEntries { k, v -> [k.tokenize(':')[-1], v] }
return yaml.dumpAsMap(versions).trim()
}
@@ -112,8 +104,8 @@ def processVersionsFromYAML(yaml_file) {
def workflowVersionToYAML() {
return """
Workflow:
- $workflow.manifest.name: ${getWorkflowVersion()}
- Nextflow: $workflow.nextflow.version
+ ${workflow.manifest.name}: ${getWorkflowVersion()}
+ Nextflow: ${workflow.nextflow.version}
""".stripIndent().trim()
}
@@ -121,11 +113,7 @@ def workflowVersionToYAML() {
// Get channel of software versions used in pipeline in YAML format
//
def softwareVersionsToYAML(ch_versions) {
- return ch_versions
- .unique()
- .map { version -> processVersionsFromYAML(version) }
- .unique()
- .mix(Channel.of(workflowVersionToYAML()))
+ return ch_versions.unique().map { version -> processVersionsFromYAML(version) }.unique().mix(Channel.of(workflowVersionToYAML()))
}
//
@@ -133,25 +121,31 @@ def softwareVersionsToYAML(ch_versions) {
//
def paramsSummaryMultiqc(summary_params) {
def summary_section = ''
- summary_params.keySet().each { group ->
- def group_params = summary_params.get(group) // This gets the parameters of that particular group
- if (group_params) {
- summary_section += "
$group
\n"
- summary_section += " \n"
- group_params.keySet().sort().each { param ->
- summary_section += " - $param
- ${group_params.get(param) ?: 'N/A'}
\n"
+ summary_params
+ .keySet()
+ .each { group ->
+ def group_params = summary_params.get(group)
+ // This gets the parameters of that particular group
+ if (group_params) {
+ summary_section += " ${group}
\n"
+ summary_section += " \n"
+ group_params
+ .keySet()
+ .sort()
+ .each { param ->
+ summary_section += " - ${param}
- ${group_params.get(param) ?: 'N/A'}
\n"
+ }
+ summary_section += "
\n"
}
- summary_section += "
\n"
}
- }
- def yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" as String
- yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n"
- yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
- yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
- yaml_file_text += "plot_type: 'html'\n"
- yaml_file_text += "data: |\n"
- yaml_file_text += "${summary_section}"
+ def yaml_file_text = "id: '${workflow.manifest.name.replace('/', '-')}-summary'\n" as String
+ yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n"
+ yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
+ yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
+ yaml_file_text += "plot_type: 'html'\n"
+ yaml_file_text += "data: |\n"
+ yaml_file_text += "${summary_section}"
return yaml_file_text
}
@@ -199,54 +193,54 @@ def logColours(monochrome_logs=true) {
colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m"
// Regular Colors
- colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m"
- colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m"
- colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m"
- colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m"
- colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m"
- colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m"
- colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m"
- colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m"
+ colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m"
+ colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m"
+ colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m"
+ colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m"
+ colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m"
+ colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m"
+ colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m"
+ colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m"
// Bold
- colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m"
- colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m"
- colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m"
- colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m"
- colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m"
- colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m"
- colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m"
- colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m"
+ colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m"
+ colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m"
+ colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m"
+ colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m"
+ colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m"
+ colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m"
+ colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m"
+ colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m"
// Underline
- colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m"
- colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m"
- colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m"
- colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m"
- colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m"
- colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m"
- colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m"
- colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m"
+ colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m"
+ colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m"
+ colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m"
+ colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m"
+ colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m"
+ colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m"
+ colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m"
+ colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m"
// High Intensity
- colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m"
- colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m"
- colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m"
- colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m"
- colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m"
- colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m"
- colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m"
- colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m"
+ colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m"
+ colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m"
+ colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m"
+ colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m"
+ colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m"
+ colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m"
+ colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m"
+ colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m"
// Bold High Intensity
- colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m"
- colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m"
- colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m"
- colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m"
- colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m"
- colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m"
- colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m"
- colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m"
+ colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m"
+ colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m"
+ colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m"
+ colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m"
+ colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m"
+ colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m"
+ colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m"
+ colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m"
return colorcodes
}
@@ -261,14 +255,15 @@ def attachMultiqcReport(multiqc_report) {
mqc_report = multiqc_report.getVal()
if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) {
if (mqc_report.size() > 1) {
- log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one"
+ log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one")
}
mqc_report = mqc_report[0]
}
}
- } catch (all) {
+ }
+ catch (Exception all) {
if (multiqc_report) {
- log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email"
+ log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email")
}
}
return mqc_report
@@ -280,26 +275,35 @@ def attachMultiqcReport(multiqc_report) {
def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) {
// Set up the e-mail variables
- def subject = "[$workflow.manifest.name] Successful: $workflow.runName"
+ def subject = "[${workflow.manifest.name}] Successful: ${workflow.runName}"
if (!workflow.success) {
- subject = "[$workflow.manifest.name] FAILED: $workflow.runName"
+ subject = "[${workflow.manifest.name}] FAILED: ${workflow.runName}"
}
def summary = [:]
- summary_params.keySet().sort().each { group ->
- summary << summary_params[group]
- }
+ summary_params
+ .keySet()
+ .sort()
+ .each { group ->
+ summary << summary_params[group]
+ }
def misc_fields = [:]
misc_fields['Date Started'] = workflow.start
misc_fields['Date Completed'] = workflow.complete
misc_fields['Pipeline script file path'] = workflow.scriptFile
misc_fields['Pipeline script hash ID'] = workflow.scriptId
- if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository
- if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId
- if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision
- misc_fields['Nextflow Version'] = workflow.nextflow.version
- misc_fields['Nextflow Build'] = workflow.nextflow.build
+ if (workflow.repository) {
+ misc_fields['Pipeline repository Git URL'] = workflow.repository
+ }
+ if (workflow.commitId) {
+ misc_fields['Pipeline repository Git Commit'] = workflow.commitId
+ }
+ if (workflow.revision) {
+ misc_fields['Pipeline Git branch/tag'] = workflow.revision
+ }
+ misc_fields['Nextflow Version'] = workflow.nextflow.version
+ misc_fields['Nextflow Build'] = workflow.nextflow.build
misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
def email_fields = [:]
@@ -337,7 +341,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi
// Render the sendmail template
def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit
- def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ]
+ def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()]
def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt")
def sendmail_template = engine.createTemplate(sf).make(smail_fields)
def sendmail_html = sendmail_template.toString()
@@ -346,30 +350,32 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi
def colors = logColours(monochrome_logs) as Map
if (email_address) {
try {
- if (plaintext_email) { throw new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') }
+ if (plaintext_email) {
+new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') }
// Try to send HTML e-mail using sendmail
def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
sendmail_tf.withWriter { w -> w << sendmail_html }
- [ 'sendmail', '-t' ].execute() << sendmail_html
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-"
- } catch (all) {
+ ['sendmail', '-t'].execute() << sendmail_html
+ log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-")
+ }
+ catch (Exception all) {
// Catch failures and try with plaintext
- def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ]
+ def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address]
mail_cmd.execute() << email_html
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-"
+ log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (mail)-")
}
}
// Write summary e-mail HTML to a file
def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html")
output_hf.withWriter { w -> w << email_html }
- nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html");
+ nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html")
output_hf.delete()
// Write summary e-mail TXT to a file
def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt")
output_tf.withWriter { w -> w << email_txt }
- nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt");
+ nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt")
output_tf.delete()
}
@@ -380,12 +386,14 @@ def completionSummary(monochrome_logs=true) {
def colors = logColours(monochrome_logs) as Map
if (workflow.success) {
if (workflow.stats.ignoredCount == 0) {
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-"
- } else {
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-"
+ log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Pipeline completed successfully${colors.reset}-")
+ }
+ else {
+ log.info("-${colors.purple}[${workflow.manifest.name}]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-")
}
- } else {
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-"
+ }
+ else {
+ log.info("-${colors.purple}[${workflow.manifest.name}]${colors.red} Pipeline completed with errors${colors.reset}-")
}
}
@@ -394,21 +402,30 @@ def completionSummary(monochrome_logs=true) {
//
def imNotification(summary_params, hook_url) {
def summary = [:]
- summary_params.keySet().sort().each { group ->
- summary << summary_params[group]
- }
+ summary_params
+ .keySet()
+ .sort()
+ .each { group ->
+ summary << summary_params[group]
+ }
def misc_fields = [:]
- misc_fields['start'] = workflow.start
- misc_fields['complete'] = workflow.complete
- misc_fields['scriptfile'] = workflow.scriptFile
- misc_fields['scriptid'] = workflow.scriptId
- if (workflow.repository) misc_fields['repository'] = workflow.repository
- if (workflow.commitId) misc_fields['commitid'] = workflow.commitId
- if (workflow.revision) misc_fields['revision'] = workflow.revision
- misc_fields['nxf_version'] = workflow.nextflow.version
- misc_fields['nxf_build'] = workflow.nextflow.build
- misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp
+ misc_fields['start'] = workflow.start
+ misc_fields['complete'] = workflow.complete
+ misc_fields['scriptfile'] = workflow.scriptFile
+ misc_fields['scriptid'] = workflow.scriptId
+ if (workflow.repository) {
+ misc_fields['repository'] = workflow.repository
+ }
+ if (workflow.commitId) {
+ misc_fields['commitid'] = workflow.commitId
+ }
+ if (workflow.revision) {
+ misc_fields['revision'] = workflow.revision
+ }
+ misc_fields['nxf_version'] = workflow.nextflow.version
+ misc_fields['nxf_build'] = workflow.nextflow.build
+ misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp
def msg_fields = [:]
msg_fields['version'] = getWorkflowVersion()
@@ -433,13 +450,13 @@ def imNotification(summary_params, hook_url) {
def json_message = json_template.toString()
// POST
- def post = new URL(hook_url).openConnection();
+ def post = new URL(hook_url).openConnection()
post.setRequestMethod("POST")
post.setDoOutput(true)
post.setRequestProperty("Content-Type", "application/json")
- post.getOutputStream().write(json_message.getBytes("UTF-8"));
- def postRC = post.getResponseCode();
- if (! postRC.equals(200)) {
- log.warn(post.getErrorStream().getText());
+ post.getOutputStream().write(json_message.getBytes("UTF-8"))
+ def postRC = post.getResponseCode()
+ if (!postRC.equals(200)) {
+ log.warn(post.getErrorStream().getText())
}
}
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
index 2585b65d1b0..e50a5e35ab5 100644
--- a/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
@@ -3,9 +3,9 @@
//
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
IMPORT NF-VALIDATION PLUGIN
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
include { paramsHelp } from 'plugin/nf-validation'
@@ -13,13 +13,12 @@ include { paramsSummaryLog } from 'plugin/nf-validation'
include { validateParameters } from 'plugin/nf-validation'
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
SUBWORKFLOW DEFINITION
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
workflow UTILS_NFVALIDATION_PLUGIN {
-
take:
print_help // boolean: print help
workflow_command // string: default commmand used to run pipeline
@@ -30,7 +29,7 @@ workflow UTILS_NFVALIDATION_PLUGIN {
main:
- log.debug "Using schema file: ${schema_filename}"
+ log.debug("Using schema file: ${schema_filename}")
// Default values for strings
pre_help_text = pre_help_text ?: ''
@@ -41,19 +40,19 @@ workflow UTILS_NFVALIDATION_PLUGIN {
// Print help message if needed
//
if (print_help) {
- log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text
+ log.info(pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text)
System.exit(0)
}
//
// Print parameter summary to stdout
//
- log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text
+ log.info(pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text)
//
// Validate parameters relative to the parameter JSON schema
//
- if (validate_params){
+ if (validate_params) {
validateParameters(parameters_schema: schema_filename)
}