Skip to content

Commit

Permalink
update test profiole
Browse files Browse the repository at this point in the history
  • Loading branch information
jfy133 committed Oct 14, 2024
1 parent f71c6cd commit a0f7385
Show file tree
Hide file tree
Showing 5 changed files with 76 additions and 30 deletions.
31 changes: 30 additions & 1 deletion conf/modules.config
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,37 @@ process {
]
}

withName: BRACKEN_BUILD {
ext.prefix = { "${meta.id}-bracken" }
}

withName: CENTRIFUGE_BUILD {
ext.prefix = { "${meta.id}-centrifuge" }
}

withName: DIAMOND_MAKEDB {
ext.prefix = { "${meta.id}-diamond" }
}

withName: KAIJU_MKFMI {
ext.prefix = { "${meta.id}-kaiju" }
}

withName: KRAKEN2_ADD {
ext.prefix = { "${meta.id}-kraken2" }
}

withName: KRAKEN2_BUILD {
ext.prefix = { "${meta.id}-kraken2" }
}

withName: KRAKENUNIQ_BUILD {
ext.prefix = { "${meta.id}-krakenuniq" }
}

withName: MALT_BUILD {
ext.args = { "--sequenceType ${params.malt_sequencetype}" }
ext.prefix = { "${meta.id}-malt" }
ext.args = { "--sequenceType ${params.malt_sequencetype}" }
}

withName: TAR {
Expand Down
6 changes: 4 additions & 2 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ process {
}

params {
config_profile_name = 'Test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'
config_profile_name = 'Test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'

// Input data
// TODO nf-core: Specify the paths to your test data on nf-core/test-datasets
Expand All @@ -44,8 +44,10 @@ params {
namesdmp = params.pipelines_testdata_base_path + 'createtaxdb/data/taxonomy/names.dmp'
malt_mapdb = 's3://ngi-igenomes/test-data/createtaxdb/taxonomy/megan-nucl-Feb2022.db.zip'

generate_tar_archive = true
generate_downstream_samplesheets = true
generate_pipeline_samplesheets = "taxprofiler"
generate_samplesheet_dbtype = 'raw'
}

process {
Expand Down
4 changes: 4 additions & 0 deletions nf-test.config
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,8 @@ config {

// run all test with the defined docker profile from the main nextflow.config
profile ""

plugins {
load "nft-csv@0.1.0"
}
}
39 changes: 24 additions & 15 deletions tests/test.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -15,24 +15,33 @@ nextflow_pipeline {
}

then {
def samplesheetlines = path("$outputDir/downstream_samplesheets/taxprofiler.csv").readLines()
assertAll(
{ assert workflow.success },
{ assert snapshot(
// Publish files
file("$outputDir/bracken/database/database100mers.kmer_distrib").name,
file("$outputDir/bracken/database/database100mers.kraken").name,
file("$outputDir/bracken/database/database.kraken").name,
path("$outputDir/centrifuge/"),
path("$outputDir/diamond/database.dmnd"),
path("$outputDir/kaiju/database.fmi"),
path("$outputDir/kraken2/database/hash.k2d"),
file("$outputDir/kraken2/database/opts.k2d").name,
path("$outputDir/kraken2/database/taxo.k2d"),
path("$outputDir/krakenuniq/database/database-build.log").readLines().last().contains('database.idx'),
file("$outputDir/krakenuniq/database/database.idx").name,
file("$outputDir/krakenuniq/database/database.kdb"),
file("$outputDir/krakenuniq/database/taxDB"),
file("$outputDir/bracken/database-bracken.tar.gz").name,
file("$outputDir/bracken/database-kraken2/database100mers.kmer_distrib").name,
file("$outputDir/bracken/database-kraken2/database100mers.kraken").name,
file("$outputDir/bracken/database-kraken2/database.kraken").name,
path("$outputDir/centrifuge/database-centrifuge/database-centrifuge.1.cf"),
path("$outputDir/centrifuge/database-centrifuge/database-centrifuge.2.cf"),
path("$outputDir/centrifuge/database-centrifuge/database-centrifuge.3.cf"),
path("$outputDir/centrifuge/database-centrifuge/database-centrifuge.4.cf"),
file("$outputDir/centrifuge/database-centrifuge.tar.gz").name,
path("$outputDir/diamond/database-diamond.dmnd"),
file("$outputDir/diamond/database-diamond.tar.gz").name,
path("$outputDir/downstream_samplesheets/taxprofiler.csv").csv.rowCount == 6,
path("$outputDir/kaiju/database-kaiju.fmi"),
file("$outputDir/kaiju/database-kaiju.tar.gz").name,
path("$outputDir/kraken2/database-kraken2/hash.k2d"),
file("$outputDir/kraken2/database-kraken2/opts.k2d").name,
path("$outputDir/kraken2/database-kraken2/taxo.k2d"),
// file("$outputDir/kraken2/database-kraken2.tar.gz").name, // COVERED BY BRACKEN - see docs
path("$outputDir/krakenuniq/database-krakenuniq/database-build.log").readLines().last().contains('database.idx'),
file("$outputDir/krakenuniq/database-krakenuniq/database.idx").name,
file("$outputDir/krakenuniq/database-krakenuniq/database.kdb"),
file("$outputDir/krakenuniq/database-krakenuniq/taxDB"),
file("$outputDir/krakenuniq/database-krakenuniq.tar.gz").name,
path("$outputDir/malt/malt-build.log").readLines().last().contains('Peak memory'),
path("$outputDir/malt/malt_index/index0.idx"),
path("$outputDir/malt/malt_index/ref.db"),
Expand All @@ -41,7 +50,7 @@ nextflow_pipeline {
path("$outputDir/malt/malt_index/taxonomy.idx"),
path("$outputDir/malt/malt_index/taxonomy.map"),
path("$outputDir/malt/malt_index/taxonomy.tre"),
samplesheetlines.size()
file("$outputDir/malt/database-malt.tar.gz").name
).match()
},
{ assert new File("$outputDir/pipeline_info/nf_core_pipeline_software_mqc_versions.yml").exists() },
Expand Down
26 changes: 14 additions & 12 deletions tests/test.nf.test.snap
Original file line number Diff line number Diff line change
@@ -1,26 +1,28 @@
{
"test_profile": {
"content": [
"database-bracken.tar.gz",
"database100mers.kmer_distrib",
"database100mers.kraken",
"database.kraken",
[
[
"database.1.cf:md5,1481615ab90b5573f6d9e57f97890178",
"database.2.cf:md5,d50fa66e215e80284314ff6521dcd4a4",
"database.3.cf:md5,beafa92166ba461f9bda1dac0b640f45",
"database.4.cf:md5,2902ec5df0db6da41a91b40d2f46b30d"
]
],
"database.dmnd:md5,b2ea49ef5490c526e2c56cae19bcb462",
"database.fmi:md5,54fd89f5e4eab61af30175e8aa389598",
"database-centrifuge.1.cf:md5,1481615ab90b5573f6d9e57f97890178",
"database-centrifuge.2.cf:md5,d50fa66e215e80284314ff6521dcd4a4",
"database-centrifuge.3.cf:md5,beafa92166ba461f9bda1dac0b640f45",
"database-centrifuge.4.cf:md5,2902ec5df0db6da41a91b40d2f46b30d",
"database-centrifuge.tar.gz",
"database-diamond.dmnd:md5,b2ea49ef5490c526e2c56cae19bcb462",
"database-diamond.tar.gz",
true,
"database-kaiju.fmi:md5,54fd89f5e4eab61af30175e8aa389598",
"database-kaiju.tar.gz",
"hash.k2d:md5,941118164b4bcc010593f7a7c7b30029",
"opts.k2d",
"taxo.k2d:md5,cd8170a8c5a1b763a9ac1ffa2107cc88",
true,
"database.idx",
"database.kdb:md5,a24fce43bedbc6c420f6e36d10c112a3",
"taxDB:md5,1aed1afa948daffc236deba1c5d635db",
"database-krakenuniq.tar.gz",
true,
"index0.idx:md5,876139dc930e68992cd2625e08bba48a",
"ref.db:md5,377073f58a9f9b85acca59fcf21744a9",
Expand All @@ -29,12 +31,12 @@
"taxonomy.idx:md5,1e2e8fdc703a6d2707e7cbefd2b6d93f",
"taxonomy.map:md5,5bb3f2192e925bca2e61e4b54f1671e0",
"taxonomy.tre:md5,f76fb2d5aa9b0d637234d48175841e0e",
7
"database-malt.tar.gz"
],
"meta": {
"nf-test": "0.9.0",
"nextflow": "24.04.4"
},
"timestamp": "2024-10-12T15:58:59.738949666"
"timestamp": "2024-10-14T12:31:26.458571925"
}
}

0 comments on commit a0f7385

Please sign in to comment.