Skip to content

Commit

Permalink
updated tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mattheww95 committed Nov 7, 2024
1 parent 733db44 commit 71260a9
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 10 deletions.
13 changes: 7 additions & 6 deletions subworkflows/local/input_check.nf
Original file line number Diff line number Diff line change
Expand Up @@ -23,32 +23,33 @@ workflow INPUT_CHECK {
skip_duplicate_check: true).map {
// Create grouping value
meta ->

// Remove any unallowed charactars in the meta.id field
meta[0].id = meta[0].id.replaceAll(/[^A-Za-z0-9_.\-]/, '_')
meta[0].id = meta[0].id.replaceAll(/[^A-Za-z0-9_\-]/, '_')

if (meta[0].external_id != null) {
// remove any charactars in the external_id that should not be used
meta[0].id = meta[0].external_id.replaceAll(/[^A-Za-z0-9_.\-]/, '_')
meta[0].id = meta[0].external_id.replaceAll(/[^A-Za-z0-9_\-]/, '_')
}else{
meta[0].external_id = meta[0].id
}


if(processedIDs.contains(meta.id) && params.skip_read_merging){
if(processedIDs.contains(meta[0].id) && params.skip_read_merging){
// If the id is already contained and read merging is not to be
// performed, then we make the id's unique to proceed with processing
// read merging is set to false by default, so that when it is run
// in IRIDANext reads are only merged in irida next
while (processedIDs.contains(meta.id)) {
meta.id = "${meta.id}_${meta.external_id}"
while (processedIDs.contains(meta[0].id)) {
meta[0].id = "${meta[0].id}_${meta[0].external_id}"
}
}else{
// Set the external id to the input ID.
meta[0].external_id = meta[0].id
}


processedIDs << meta.id
processedIDs << meta[0].id
tuple(meta[0].id, meta[0])
}

Expand Down
5 changes: 5 additions & 0 deletions tests/data/samplesheets/samplesheet-make-names-unique.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
sample,fastq_1,fastq_2,long_reads,assembly
ha,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads/campy-staph1.fq.gz,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads/campy-staph2.fq.gz,,
ha,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads/1_R1.fq.gz,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads/1_R2.fq.gz,,
ha,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads/metagenomic_reads1.fq.gz,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads/metagenomic_reads2.fq.gz,,
ha,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads.fastq,https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/reads/1_R2.fq,,
46 changes: 42 additions & 4 deletions tests/subworkflows/local/input_check/input_check.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -99,14 +99,52 @@ nextflow_workflow {

then {

// TODO beef up assertions
assert workflow.success
println workflow.out.reads
//assert workflow.out.reads[0][0].id == "better_faster_stronger_name"
assert workflow.out.reads[0][0].id == 'an_even_stronger_name_'
assert workflow.out.reads[0][0].merge == false
assert workflow.out.reads[1][0].id == 'better_faster_stronger_name'
assert workflow.out.reads[1][0].merge == false
assert workflow.out.reads[2][0].id == 'this_is_getting_ridiculous'
assert workflow.out.reads[2][0].merge == true
assert workflow.out.reads[2][1][0].endsWith("this_is_getting_ridiculous_R1.merged.fastq.gz")
assert workflow.out.reads[2][1][1].endsWith("this_is_getting_ridiculous_R2.merged.fastq.gz")
}

}

test("Test make ids unique") {
tag "pass_make_ids_unique"

when {

params {
input = "$baseDir/tests/data/samplesheets/samplesheet-make-names-unique.csv"
outdir = "results"
min_reads = 1
mash_sketch = "https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/databases/campy-staph-ecoli.msh"
mh_min_kmer = 1
skip_read_merging = true
dehosting_idx = "https://github.com/phac-nml/mikrokondo/raw/dev/tests/data/databases/campy.mmi"

kraken2_db = "$baseDir/tests/data/kraken2/test"


max_memory = "2.GB"
max_cpus = 1
}

}

then {

assert workflow.success
assert workflow.out.reads[0][0].id = ha
assert workflow.out.reads[1][0].id = ha_ha
assert workflow.out.reads[2][0].id = ha_ha_ha
assert workflow.out.reads[3][0].id = ha_ha_ha_ha
}

}

// TODO add test case for making read ids unique

}

0 comments on commit 71260a9

Please sign in to comment.