Skip to content
This repository has been archived by the owner on Apr 19, 2023. It is now read-only.

Commit

Permalink
Move to new config structure: from params.sc to params.tools. Decided…
Browse files Browse the repository at this point in the history
… to not go for backward compatibility hence this will be a breaking change i.e.: older configs will very likely not work.
  • Loading branch information
dweemx committed Jul 9, 2021
1 parent b64291d commit e8b377d
Show file tree
Hide file tree
Showing 224 changed files with 497 additions and 567 deletions.
62 changes: 0 additions & 62 deletions conf/generic.config
Original file line number Diff line number Diff line change
Expand Up @@ -24,66 +24,4 @@ params {
}
return [global: paramsGlobal, local: pL]
}
getToolParams = { toolKey ->
def _get = { p ->
if(p.containsKey("tools")) { // weirdly p?.tools gives a WARN (same for the other if statements)
return p.tools[toolKey]
}
if(p.containsKey("sc")) {
return p.sc[toolKey]
}
if(p.containsKey("toolKey")) {
return p[toolKey]
}
throw new Exception("VSN ERROR: Cannot get tool params from NXF params.")
}
if(!toolKey.contains(".")) {
return _get(params)
}
def entry = null
if(params.containsKey("tools")) {
entry = params.tools
} else if(params.containsKey("sc")) {
entry = params.sc
} else if(params.containsKey("sc")) {
entry = params
} else {
throw new Exception("VSN ERROR: Missing params.<sc|tools>.")
}

toolKey.split('\\.').each { entry = entry?.get(it) }
return entry
}
hasToolParams = { toolKey ->
if(params.containsKey("tools")) {
return params.tools.containsKey(toolKey)
}
if(params.containsKey("sc")) {
return params.sc.containsKey(toolKey)
}
if(params.containsKey("toolKey")) {
return params.containsKey(toolKey)
}
return false
}
hasUtilsParams = { utilityKey ->
if(params.utils.containsKey(utilityKey)) {
return true
}
// backward-compatible
if(params.containsKey("sc")) {
return params.sc.containsKey(utilityKey)
}
return false
}
getUtilsParams = { utilityKey ->
if(params.utils.containsKey(utilityKey)) {
return params.utils[utilityKey]
}
// backward-compatible
if(params.containsKey("sc")) {
return params.sc[utilityKey]
}
throw new Exception("Cannot find utility " + utilityKey + " in Nextflow config.")
}
}
2 changes: 1 addition & 1 deletion conf/nemesh.config
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ params {
qsubaccount = ''
}

sc {
tools {
nemesh {
// User can extract custom cell barcodes by providing it with a TSV containing all the barcodes
// custom_selected_barcodes = ''
Expand Down
2 changes: 1 addition & 1 deletion conf/test.config
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
params {
misc {
mitools {
test {
enabled = true
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__bbknn.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = "testdata/*/outs/"
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__bbknn_scenic.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = "testdata/*/outs/"
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__cell_annotate_filter.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_converter {
off = 'h5ad'
tagCellWithSampleId = false
Expand Down
2 changes: 1 addition & 1 deletion conf/test__decontx.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__harmony.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = "testdata/*/outs/"
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__mnncorrect.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = "testdata/*/outs/"
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__scenic.config
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ params {
global {
project_name = 'scenic_CI'
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__scenic_multiruns.config
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ params {
global {
project_name = 'scenic_multiruns_CI'
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample_decontx_correct.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample_decontx_correct_scrublet.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample_decontx_filter.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample_param_exploration.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample_scenic.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample_scenic_multiruns.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test__single_sample_scrublet.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ params {
cellranger_mex = 'sample_data/outs'
}
}
sc {
tools {
file_annotator {
metadataFilePath = ''
}
Expand Down
2 changes: 1 addition & 1 deletion conf/test_disabled.config
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
params {
misc {
mitools {
test {
enabled = false
}
Expand Down
26 changes: 13 additions & 13 deletions docs/development.rst
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ Steps:
.. code:: groovy
params {
sc {
tools {
harmony {
container = 'vibsinglecellnf/harmony:1.0'
report_ipynb = "${params.misc.test.enabled ? '../../..' : ''}/src/harmony/bin/reports/sc_harmony_report.ipynb"
Expand Down Expand Up @@ -239,7 +239,7 @@ Steps:
process SC__HARMONY__HARMONY_MATRIX {
container params.getToolParams("harmony").container
container params.tools.harmony.container
publishDir "${params.global.outdir}/data/intermediate", mode: 'symlink'
clusterOptions "-l nodes=1:ppn=${params.global.threads} -l walltime=1:00:00 -A ${params.global.qsubaccount}"
Expand All @@ -250,7 +250,7 @@ Steps:
tuple val(sampleId), path("${sampleId}.SC__HARMONY__HARMONY_MATRIX.tsv")
script:
def sampleParams = params.parseConfig(sampleId, params.global, params.getToolParams("harmony"))
def sampleParams = params.parseConfig(sampleId, params.global, params.tools.harmony)
processParams = sampleParams.local
varsUseAsArguments = processParams.varsUse.collect({ '--vars-use' + ' ' + it }).join(' ')
"""
Expand Down Expand Up @@ -364,7 +364,7 @@ Steps:
// Run clustering
// Define the parameters for clustering
def clusteringParams = SC__SCANPY__CLUSTERING_PARAMS( clean(params.getToolParams("scanpy").clustering) )
def clusteringParams = SC__SCANPY__CLUSTERING_PARAMS( clean(params.tools.scanpy.clustering) )
CLUSTER_IDENTIFICATION(
normalizedTransformedData,
DIM_REDUCTION_TSNE_UMAP.out.dimred_tsne_umap,
Expand Down Expand Up @@ -400,7 +400,7 @@ Steps:
)
harmony_report = GENERATE_DUAL_INPUT_REPORT(
becDualDataPrePost,
file(workflow.projectDir + params.getToolParams("harmony").report_ipynb),
file(workflow.projectDir + params.tools.harmony.report_ipynb),
"SC_BEC_HARMONY_report",
clusteringParams.isParameterExplorationModeOn()
)
Expand Down Expand Up @@ -490,10 +490,10 @@ Steps:
SC__FILE_CONVERTER | \
FILTER_AND_ANNOTATE_AND_CLEAN
if(params.getToolParams("scanpy").containsKey("filter")) {
if(params.tools.scanpy.containsKey("filter")) {
out = QC_FILTER( out ).filtered // Remove concat
}
if(params.getUtilsParams("file_concatenator")) {
if(params.utils.file_concatenator) {
out = SC__FILE_CONCATENATOR(
out.map {
it -> it[1]
Expand All @@ -502,7 +502,7 @@ Steps:
)
)
}
if(params.getToolParams("scanpy").containsKey("data_transformation") && params.getToolParams("scanpy").containsKey("normalization")) {
if(params.tools.scanpy.containsKey("data_transformation") && params.tools.scanpy.containsKey("normalization")) {
out = NORMALIZE_TRANSFORM( out )
}
out = HVG_SELECTION( out )
Expand All @@ -527,7 +527,7 @@ Steps:
// Conversion
// Convert h5ad to X (here we choose: loom format)
if(params.hasUtilsParams("file_concatenator")) {
if(params.utils?.file_concatenator) {
filteredloom = SC__H5AD_TO_FILTERED_LOOM( SC__FILE_CONCATENATOR.out )
scopeloom = FILE_CONVERTER(
BEC_HARMONY.out.data.groupTuple(),
Expand All @@ -552,7 +552,7 @@ Steps:
// Collect the reports:
// Define the parameters for clustering
def clusteringParams = SC__SCANPY__CLUSTERING_PARAMS( clean(params.getToolParams("scanpy").clustering) )
def clusteringParams = SC__SCANPY__CLUSTERING_PARAMS( clean(params.tools.scanpy.clustering) )
// Pairing clustering reports with bec reports
if(!clusteringParams.isParameterExplorationModeOn()) {
clusteringBECReports = BEC_HARMONY.out.cluster_report.map {
Expand Down Expand Up @@ -722,8 +722,8 @@ Workflows import multiple processes and define the workflow by name:
workflow CELLRANGER {
main:
SC__CELLRANGER__MKFASTQ(file(params.getToolParams("cellranger").mkfastq.csv), path(params.getToolParams("cellranger").mkfastq.runFolder))
SC__CELLRANGER__COUNT(file(params.getToolParams("cellranger").count.transcriptome), SC__CELLRANGER__MKFASTQ.out.flatten())
SC__CELLRANGER__MKFASTQ(file(params.tools.cellranger.mkfastq.csv), path(params.tools.cellranger.mkfastq.runFolder))
SC__CELLRANGER__COUNT(file(params.tools.cellranger.count.transcriptome), SC__CELLRANGER__MKFASTQ.out.flatten())
emit:
SC__CELLRANGER__COUNT.out
Expand Down Expand Up @@ -775,7 +775,7 @@ The parameter structure internally (post-merge) is:
project_name = "MCF7"
...
}
sc {
tools {
utils {
file_converter {
...
Expand Down
Loading

0 comments on commit e8b377d

Please sign in to comment.