forked from broadinstitute/gatk
-
Notifications
You must be signed in to change notification settings - Fork 0
/
testsettings.gradle
85 lines (77 loc) · 3.36 KB
/
testsettings.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
tasks.withType(Test) {
outputs.upToDateWhen { false } //tests will never be "up to date" so you can always rerun them
String TEST_VERBOSITY = "$System.env.TEST_VERBOSITY"
/**
* Valid options for TEST_TYPE are:
* cloud, integration, unit : run one of the three disjoint partitions of the test suite
* all : run all the tests
* anything else : run the non-cloud tests
*/
String TEST_TYPE = "$System.env.TEST_TYPE"
forkEvery 100
useTestNG {
if (TEST_TYPE == "cloud") {
// run only the cloud tests
includeGroups "cloud", "bucket"
} else if (TEST_TYPE == "integration"){
include "**/*IntegrationTest.class"
excludeGroups "cloud", "bucket", "python", "R", "funcotatorValidation", "variantcalling"
} else if (TEST_TYPE == "unit") {
exclude "**/*IntegrationTest.class"
excludeGroups "cloud", "bucket", "python", "R", "funcotatorValidation", "variantcalling"
} else if (TEST_TYPE == "variantcalling") {
includeGroups "variantcalling"
excludeGroups "cloud", "bucket", "python", "R", "funcotatorValidation"
} else if (TEST_TYPE == "spark") {
includeGroups "spark"
excludeGroups "cloud", "bucket", "python", "R", "funcotatorValidation", "variantcalling"
} else if (TEST_TYPE == "conda") {
includeGroups "python", "R"
} else if (TEST_TYPE == "all"){
//include everything
} else {
excludeGroups "cloud", "bucket", "python", "R", "funcotatorValidation"
}
}
systemProperty "samjdk.use_async_io_read_samtools", "false"
systemProperty "samjdk.use_async_io_write_samtools", "true"
systemProperty "samjdk.use_async_io_write_tribble", "false"
systemProperty "samjdk.compression_level", "2"
systemProperty "gatk.spark.debug", System.getProperty("gatk.spark.debug")
environment "SPARK_LOCAL_IP","127.0.0.1"
environment "SUPPRESS_GCLOUD_CREDS_WARNING","true"
// set heap size for the test JVM(s)
minHeapSize = "500M"
maxHeapSize = "3500M"
if (TEST_VERBOSITY == "minimal") {
int count = 0
// listen to events in the test execution lifecycle
beforeTest { descriptor ->
count++
if( count % 10000 == 0) {
logger.lifecycle("Finished "+ Integer.toString(count++) + " tests")
}
}
} else {
// show standard out and standard error of the test JVM(s) on the console
testLogging.showStandardStreams = true
beforeTest { descriptor ->
logger.lifecycle("Running Test: " + descriptor)
}
// listen to standard out and standard error of the test JVM(s)
onOutput { descriptor, event ->
logger.lifecycle("Test: " + descriptor + " produced standard out/err: " + event.message )
}
}
testLogging {
testLogging {
events "skipped", "failed"
exceptionFormat = "full"
}
afterSuite { desc, result ->
if (!desc.parent) { // will match the outermost suite
println "Results: ${result.resultType} (${result.testCount} tests, ${result.successfulTestCount} successes, ${result.failedTestCount} failures, ${result.skippedTestCount} skipped)"
}
}
}
}