Skip to content

Commit

Permalink
adjusted channels for failed spades
Browse files Browse the repository at this point in the history
  • Loading branch information
erinyoung committed May 17, 2024
1 parent 5299ef6 commit 583bf57
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 38 deletions.
10 changes: 5 additions & 5 deletions subworkflows/blobtools.nf
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@ include { blobtools_view } from '../modules/local/blobtools' addParams(param

workflow blobtools {
take:
ch_clean_reads
ch_contigs
ch_bams
ch_contig_bams
ch_blast_db

main:
blastn(ch_clean_reads.join(ch_contigs, by: 0).map{it -> tuple(it[0],it[2])}.combine(ch_blast_db))
blobtools_create(ch_contigs.join(blastn.out.blastn, by: 0).join(ch_bams, by: 0))
ch_contigs = ch_contig_bams.filter{it[1]}.map{it -> tuple(it[0], it[1])}

blastn(ch_contigs.combine(ch_blast_db))
blobtools_create(ch_contig_bams.join(blastn.out.blastn, by: 0, failOnMismatch: false, remainder: false))
blobtools_view(blobtools_create.out.json)
blobtools_plot(blobtools_create.out.json)

Expand Down
10 changes: 5 additions & 5 deletions subworkflows/de_novo_alignment.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ workflow de_novo_alignment {
bbduk(reads)
fastp(bbduk.out.fastq)

fastp.out.fastq
.join(fastp.out.fastp_results)
fastp.out.fastp_results
.filter ({ it[2] as int >= params.minimum_reads })
.map ( it -> tuple (it[0], it[1]))
.set{ read_check }
Expand All @@ -20,10 +19,11 @@ workflow de_novo_alignment {

emit:
// for downstream analyses
clean_reads = fastp.out.fastq
contigs = spades.out.contigs
reads_contigs = spades.out.reads_contigs
clean_reads = fastp.out.fastq
contigs = spades.out.contigs.filter{it[1] != null}

// for multiqc
for_multiqc = fastp.out.fastp_files.mix(bbduk.out.stats)
versions = bbduk.out.versions.mix(fastp.out.versions).mix(spades.out.versions)
versions = bbduk.out.versions.first().mix(fastp.out.versions.first()).mix(spades.out.versions.first())
}
52 changes: 34 additions & 18 deletions subworkflows/information.nf
Original file line number Diff line number Diff line change
Expand Up @@ -20,25 +20,41 @@ workflow information {
jsoncon_script

main:

// species specific
// TODO : add blobtools
int grouptuplesize = 2
if ( params.kraken2_db && ( params.sample_sheet || params.reads )) { grouptuplesize = grouptuplesize +1 }

//flag(ch_flag.groupTuple(size : grouptuplesize, remainder: true ))
flag(ch_flag.groupTuple())

amrfinderplus(ch_contigs.join(flag.out.organism, by:0))
drprg(ch_contigs.join(flag.out.myco_flag, by:0))
emmtyper(ch_contigs.join(flag.out.strepa_flag, by:0).combine(summfle_script))
kaptive(ch_contigs.join(flag.out.vibrio_flag, by:0))
kleborate(ch_contigs.join(flag.out.klebsiella_flag, by:0).combine(summfle_script))
elgato(ch_contigs.join(flag.out.legionella_flag, by:0))
mykrobe(ch_contigs.join(flag.out.myco_flag, by:0))
pbptyper(ch_contigs.join(flag.out.streppneu_flag, by:0))
seqsero2(ch_contigs.join(flag.out.salmonella_flag, by:0))
serotypefinder(ch_contigs.join(flag.out.ecoli_flag, by:0).combine(summfle_script))
shigatyper(ch_contigs.join(flag.out.ecoli_flag, by:0).combine(summfle_script))
// branch + join = faster than groupTuple
ch_flag
.branch {
blobtools: it[1] =~ /blobtools.txt/
kraken2: it[1] =~ /kraken2.csv/
mash: it[1] =~ /mash.csv/
fastani: it[1] =~ /fastani.csv/
}
.set { ch_flag_branch }

ch_contigs
.filter{it[1] != null}
.join(ch_flag_branch.blobtools, by:0, failOnMismatch: false, remainder: true)
.join(ch_flag_branch.kraken2, by:0, failOnMismatch: false, remainder: true)
.join(ch_flag_branch.mash, by:0, failOnMismatch: false, remainder: true)
.join(ch_flag_branch.fastani, by:0, failOnMismatch: false, remainder: true)
.filter{it[1] != null}
.map{ it -> tuple(it[0],[it[1], it[2], it[3], it[4], it[5]])}
.set {ch_for_flag}

flag(ch_for_flag)

amrfinderplus(flag.out.organism)
drprg(flag.out.myco_flag)
emmtyper(flag.out.strepa_flag.combine(summfle_script))
kaptive(flag.out.vibrio_flag)
kleborate(flag.out.klebsiella_flag.combine(summfle_script))
elgato(flag.out.legionella_flag)
mykrobe(flag.out.myco_flag)
pbptyper(flag.out.streppneu_flag)
seqsero2(flag.out.salmonella_flag)
serotypefinder(flag.out.ecoli_flag.combine(summfle_script))
shigatyper(flag.out.ecoli_flag.combine(summfle_script))

json_convert(drprg.out.json.combine(jsoncon_script))

Expand Down
14 changes: 4 additions & 10 deletions subworkflows/quality_assessment.nf
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ workflow quality_assessment {
take:
ch_reads
ch_contigs
ch_reads_contigs
summfle_script

main:
Expand All @@ -20,13 +21,7 @@ workflow quality_assessment {
if ( params.sample_sheet || params.reads || params.sra_accessions ) {
fastqc(ch_reads)

ch_reads
.join(ch_contigs, by: 0, remainder: true)
.filter {it[1]}
.filter {it[2]}
.set { for_circulocov }

circulocov(for_circulocov)
circulocov(ch_reads_contigs.filter{it[1]}.filter{it[2]})

for_multiqc = for_multiqc.mix(fastqc.out.for_multiqc)

Expand All @@ -46,12 +41,11 @@ workflow quality_assessment {

ch_summary = ch_summary.mix(circulocov_summary).mix(fastqc_summary)
ch_versions = ch_versions.mix(fastqc.out.versions.first()).mix(circulocov.out.versions.first())
ch_bams = ch_bams.mix(circulocov.out.bam)

ch_bams = ch_bams.mix(circulocov.out.contig_bam)
}

// contigs
quast(ch_contigs.join(ch_reads, by: 0, remainder: true ))
quast(ch_reads_contigs.filter{it[2]})
mlst(ch_contigs.combine(summfle_script))
plasmidfinder(ch_contigs.combine(summfle_script))

Expand Down

0 comments on commit 583bf57

Please sign in to comment.