From d7f3b5eab4e5f70df83538318db574dbd7f57dff Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 8 Dec 2020 13:18:17 +0100 Subject: [PATCH 01/26] Add option to change the copy mode for the PUBLISH workflow. - Options are now 'copy', 'link', 'symlink', etc. and are set in params.utils.publish.mode - Default value is 'link' - Fixes #265 --- src/utils/conf/base.config | 1 + src/utils/processes/utils.nf | 14 ++++---------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/src/utils/conf/base.config b/src/utils/conf/base.config index 03cea928..69d2d1f5 100644 --- a/src/utils/conf/base.config +++ b/src/utils/conf/base.config @@ -5,6 +5,7 @@ params { // pipelineOutputSuffix = '' compressionLevel = 6 annotateWithBatchVariableName = false + mode = 'link' } } sc { diff --git a/src/utils/processes/utils.nf b/src/utils/processes/utils.nf index 8388e204..0b99fe3f 100644 --- a/src/utils/processes/utils.nf +++ b/src/utils/processes/utils.nf @@ -414,11 +414,8 @@ def getPublishDir = { outDir, toolName -> process SC__PUBLISH_PROXY { publishDir "${params.global.outdir}/data/intermediate", \ - mode: 'symlink', \ - overwrite: true, \ - saveAs: { - filename -> "${outputFileName}" - } + mode: "${params.utils.publish.mode}", \ + saveAs: { filename -> "${outputFileName}" } label 'compute_resources__minimal' @@ -458,11 +455,8 @@ process SC__PUBLISH { publishDir \ "${getPublishDir(params.global.outdir,toolName)}", \ - mode: 'link', \ - overwrite: true, \ - saveAs: { - filename -> "${outputFileName}" - } + mode: "${params.utils.publish.mode}", \ + saveAs: { filename -> "${outputFileName}" } label 'compute_resources__minimal' From adaf89b8ea351e801891d9ec1d553ace359c4606 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 8 Dec 2020 14:22:43 +0100 Subject: [PATCH 02/26] Cleanup and streamline PUBLISH workflow - Avoid duplicating the input file, this is replaced with a hard link - Input/output file collisions are avoided by renaming the input file before creating the link - SC__PUBLISH_PROXY is removed from the workflow. The PUBLISH workflow now only needs a single process (instead of two) to publish a file --- src/utils/processes/utils.nf | 52 ++++++------------------------------ src/utils/workflows/utils.nf | 20 +++----------- 2 files changed, 12 insertions(+), 60 deletions(-) diff --git a/src/utils/processes/utils.nf b/src/utils/processes/utils.nf index 0b99fe3f..061161b8 100644 --- a/src/utils/processes/utils.nf +++ b/src/utils/processes/utils.nf @@ -411,45 +411,6 @@ def getPublishDir = { outDir, toolName -> return "${outDir}/data/${toolName.toLowerCase()}" } -process SC__PUBLISH_PROXY { - - publishDir "${params.global.outdir}/data/intermediate", \ - mode: "${params.utils.publish.mode}", \ - saveAs: { filename -> "${outputFileName}" } - - label 'compute_resources__minimal' - - input: - tuple \ - val(tag), \ - path(f), \ - val(stashedParams) - val(fileOutputSuffix) - val(toolName) - val(isParameterExplorationModeOn) - - output: - tuple \ - val(tag), \ - path(outputFileName), \ - val(stashedParams) - - script: - outputFileName = getOutputFileName( - params, - tag, - f, - fileOutputSuffix, - false, - null - ) - """ - if [ ! -f ${outputFileName} ]; then - ln -s $f "${outputFileName}" - fi - """ - -} process SC__PUBLISH { @@ -484,15 +445,18 @@ process SC__PUBLISH { isParameterExplorationModeOn, stashedParams ) + /* avoid cases where the input and output files have identical names: + Move the input file to a unique name, then create a link to + the input file */ """ - cp -rL $f tmp - rm $f - ln tmp "${outputFileName}" - rm tmp + mv $f tmp + if [ ! -f ${outputFileName} ]; then + ln -L tmp "${outputFileName}" + fi """ - } + process COMPRESS_HDF5() { container "vibsinglecellnf/hdf5:1.10.5-r2" diff --git a/src/utils/workflows/utils.nf b/src/utils/workflows/utils.nf index 790ec73f..c15f8989 100644 --- a/src/utils/workflows/utils.nf +++ b/src/utils/workflows/utils.nf @@ -12,7 +12,6 @@ include { isParamNull; COMPRESS_HDF5; SC__PUBLISH; - SC__PUBLISH_PROXY; } from "./../processes/utils.nf" params(params) formatsAllowed = ['h5ad', 'loom'] @@ -45,22 +44,11 @@ workflow PUBLISH { isParamNull(toolName) ? 'NULL' : toolName, ) } - - // Publish - SC__PUBLISH( - out.map { - // if stashedParams not there, just put null 3rd arg - it -> tuple(it[0], it[1], it.size() > 2 ? it[2]: null) - }, - isParamNull(fileOutputSuffix) ? 'NULL' : fileOutputSuffix, - isParamNull(toolName) ? 'NULL' : toolName, - isParameterExplorationModeOn - ) } - // Proxy to avoid file name collision - SC__PUBLISH_PROXY( - data.map { + // Publish + SC__PUBLISH( + out.map { // if stashedParams not there, just put null 3rd arg it -> tuple(it[0], it[1], it.size() > 2 ? it[2]: null) }, @@ -70,7 +58,7 @@ workflow PUBLISH { ) emit: - SC__PUBLISH_PROXY.out + SC__PUBLISH.out } From 5526882d774d33669174f595137962c3159dbfd2 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 8 Dec 2020 20:15:23 +0100 Subject: [PATCH 03/26] Add CI test for parameter exploration - Uses single_sample + two clustering parameters --- .../single_sample_param_exploration.yml | 33 ++++++++++++++++++ ...st__single_sample_param_exploration.config | 34 +++++++++++++++++++ nextflow.config | 3 ++ 3 files changed, 70 insertions(+) create mode 100644 .github/workflows/single_sample_param_exploration.yml create mode 100644 conf/test__single_sample_param_exploration.config diff --git a/.github/workflows/single_sample_param_exploration.yml b/.github/workflows/single_sample_param_exploration.yml new file mode 100644 index 00000000..c28ff6b5 --- /dev/null +++ b/.github/workflows/single_sample_param_exploration.yml @@ -0,0 +1,33 @@ +name: single_sample_param_exploration + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Install Nextflow + run: | + export NXF_VER='20.04.1' + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + - name: Get sample data + run: | + wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data_tiny.tar.gz + tar xzvf sample_data_tiny.tar.gz + - name: Run single_sample_param_exploration test + run: | + nextflow run ${GITHUB_WORKSPACE} -profile single_sample,test__single_sample_param_exploration,test__compute_resources,docker -entry single_sample -ansi-log false + cat .nextflow.log + diff --git a/conf/test__single_sample_param_exploration.config b/conf/test__single_sample_param_exploration.config new file mode 100644 index 00000000..82e51612 --- /dev/null +++ b/conf/test__single_sample_param_exploration.config @@ -0,0 +1,34 @@ + +params { + global { + project_name = 'single_sample_param_exploration_CI' + } + data { + tenx { + cellranger_mex = 'sample_data/outs' + } + } + sc { + file_annotator { + metadataFilePath = '' + } + scanpy { + filter { + cellFilterMinNGenes = 1 + } + neighborhood_graph { + nPcs = 2 + } + dim_reduction { + pca { + method = 'pca' + nComps = 2 + } + } + clustering { + resolutions = [0.8,1.0] + } + } + } +} + diff --git a/nextflow.config b/nextflow.config index 67b1571f..37a1968a 100644 --- a/nextflow.config +++ b/nextflow.config @@ -368,6 +368,9 @@ profiles { test__single_sample_scrublet { includeConfig 'conf/test__single_sample_scrublet.config' } + test__single_sample_param_exploration { + includeConfig 'conf/test__single_sample_param_exploration.config' + } test__scenic { includeConfig 'conf/genomes/hg38.config' includeConfig 'conf/test__scenic.config' From fdabdc4c913e5c25b04c1ba036475ef8416a808f Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Fri, 18 Dec 2020 16:27:42 +0100 Subject: [PATCH 04/26] Extend the run time of the pcacv process --- src/pcacv/pcacv.config | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pcacv/pcacv.config b/src/pcacv/pcacv.config index dd57cb39..5b2867c9 100644 --- a/src/pcacv/pcacv.config +++ b/src/pcacv/pcacv.config @@ -23,6 +23,7 @@ params { process { withLabel: 'compute_resources__pcacv' { cpus = 1 + time = '24h' } } From ae65800359d421a1655a2655a0e3641432a31fef Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 5 Jan 2021 12:01:36 +0100 Subject: [PATCH 05/26] Updated development docs, added attributions --- docs/attributions.rst | 40 ++++++++++++++++++++++++++++++++++++++++ docs/development.rst | 4 ++-- 2 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 docs/attributions.rst diff --git a/docs/attributions.rst b/docs/attributions.rst new file mode 100644 index 00000000..e317cca9 --- /dev/null +++ b/docs/attributions.rst @@ -0,0 +1,40 @@ +Attributions +============ + + +VSN-Pipelines is a collection of workflows targeted toward the analysis of single cell data. +VSN is dependendent on, and takes functions from many tools, developed both internally and externally, which are listed here. + +Tools +---------------------------------------------------- + + +- `GreenleafLab/ArchR `_ +- `caleblareau/bap `_ +- `lh3/bwa `_ +- `Samtools `_ +- `campbio/celda `_ +- Directs +- `DropletUtils `_ +- `Drop-seq Tools `_ +- `EDirect `_ +- `OpenGene/fastp `_ +- `hangnoh/flybaseR `_ +- `dweemx/flybaseR `_ +- `immunogenomics/harmony `_ +- pcacv +- `Picard `_ +- `statgen/popscle `_ +- `aertslab/popscle_helper_tools `_ +- `aertslab/cisTopic `_ +- `theislab/scanpy `_ +- `aertslab/pySCENIC `_ +- `aertslab/SCENIC `_ +- `swolock/scrublet `_ +- `aertslab/single_cell_toolkit `_ +- `timoast/sinto `_ +- `constantAmateur/SoupX `_ +- `ncbi/sra-tools `_ +- `alexdobin/STAR `_ +- `Trim Galore `_ + diff --git a/docs/development.rst b/docs/development.rst index 9018ff90..3ed80745 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -1,5 +1,5 @@ -Development -============ +Development Guide +================= Create module ------------- From 3e11e1291ae17344115de39b8b199415014e36ac Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 5 Jan 2021 12:03:21 +0100 Subject: [PATCH 06/26] Docs updates: - Moved two-pass writeup to features file - Split input formats to a separate file - Cleanup/streamline getting-started --- docs/features.rst | 33 ++++- docs/getting-started.rst | 73 ++++++++-- docs/input_formats.rst | 252 ++++++++++++++++++++++++++++++++ docs/pipelines.rst | 308 --------------------------------------- 4 files changed, 346 insertions(+), 320 deletions(-) create mode 100644 docs/input_formats.rst diff --git a/docs/features.rst b/docs/features.rst index e6fa68cc..fe9268b7 100644 --- a/docs/features.rst +++ b/docs/features.rst @@ -1,5 +1,27 @@ -Features -========= +Advanced Features +================= + +Two-pass strategy +----------------- + +Typically, cell- and gene-level filtering is one of the first steps performed in the analysis pipelines. +This usually results in the pipeline being run in two passes. +In the **first pass**, the default filters are applied (which are probably not valid for new datasets), and a separate QC report is generated for each sample. +These QC reports can be inspected and the filters can be adjusted in the config file +either for all samples (by editing the ``params.sc.scanpy.filter`` settings directly, or for individual samples by using the strategy described in multi-sample parameters. +Then, the **second pass** restarts the pipeline with the correct filtering parameters applied (use ``nextflow run ... -resume`` to skip already completed steps). + +Other notes +^^^^^^^^^^^ +In order to run a specific pipeline (e.g. ``single_sample``), +the pipeline name must be specified as a **profile** when running ``nextflow config ...`` (so that the default parameters are included), +and as the **entry** workflow when running the pipeline with ``nextflow run``. + +One exception to this is that the ``-entry`` pipeline can be one that is a subset of the one present in the config file. +For example, in a pipeline with long running step that occurs after filtering (e.g. ``single_sample_scenic``), +it can be useful to generate the full config file (``nextflow config vib-singlecell-nf/vsn-pipelines -profile single_sample_scenic``), +then run a first pass for filtering using ``nextflow run vib-singlecell-nf/vsn-pipelines -entry single_sample``, and a second pass using the full pipeline ``-entry single_sample_scenic``). + Avoid re-running SCENIC and use pre-existing results ---------------------------------------------------- @@ -107,6 +129,9 @@ Remarks: Currently, only the Scanpy related pipelines have this feature implemented. + +.. _Cell annotation: + Cell-based metadata annotation ------------------------------ @@ -180,6 +205,8 @@ If ``obo`` is used, the following parameters are required: .. _`Input Data Formats`: https://vsn-pipelines.readthedocs.io/en/develop/pipelines.html#input-data-formats +.. _Sample annotation: + Sample-based metadata annotation -------------------------------- @@ -293,6 +320,8 @@ If ``external`` used, the following additional parameters are required: - `optional` ``filterColumnName`` is the column name from ``cellMetaDataFilePath`` which be used to filter out cells. +.. _Multi-sample parameters: + Multi-sample parameters ------------------------ diff --git a/docs/getting-started.rst b/docs/getting-started.rst index 6c8cd24b..49cf9b6c 100644 --- a/docs/getting-started.rst +++ b/docs/getting-started.rst @@ -2,7 +2,7 @@ Getting Started ================ Prerequisite ------------- +************ Make sure that ``LANG`` and ``LC_ALL`` environment variables have been set. You can use the following command to check this: @@ -18,7 +18,7 @@ If some are not set, you can set them to the default language for instance: export LC_ALL="C" Dependencies -^^^^^^^^^^^^ +------------ Make sure you have the following software installed, - Nextflow_ @@ -35,14 +35,12 @@ Make sure you have the following software installed, .. _Singularity: https://www.sylabs.io/singularity/ Quick start ------------ +*********** To run a quick test of the single sample analysis pipeline, we can use the 1k PBMC datasets provided by 10x Genomics. This will take only **~3min** to run. -1. The data first needs to be downloaded (instructions can be found here_) - -.. _here: ../data/README.md +1. The data first needs to be downloaded (instructions can be found `here `_). 2. Next, update to the latest pipeline version:: @@ -60,7 +58,7 @@ This will take only **~3min** to run. -entry single_sample Example Output -^^^^^^^^^^^^^^ +-------------- .. code:: shell @@ -118,14 +116,69 @@ Example Output Succeeded : 83 +Output +------ + The pipelines will generate 3 types of results in the output directory (`params.global.outdir`), by default ``out/`` - ``data``: contains the workflow output file (in h5ad format), plus symlinks to all the intermediate files. - ``loom``: contains final loom files which can be imported inside SCope visualization tool for further visualization of the results. - ``notebooks``: contains all the notebooks generated along the pipeline (e.g.: Quality control report) +- ``pipeline_reports``: Nextflow dag, execution, timeline, and trace reports - - See the example output report from the 1k PBMC data `here `_ +For a full list of the pipelines available please see the `pipelines <./pipelines.html>`_ page. -- ``pipeline_reports``: Nextflow dag, execution, timeline, and trace reports -If you would like to use the pipelines on a custom dataset, please see the `pipelines <./pipelines.html>`_ section below. +---- + + +Further pipeline configuration details +************************************** + +This pipeline can be fully configured and run on custom data with a few steps. +The recommended method is to first run ``nextflow config ...`` to generate a complete config file (with the default parameters) in your working directory. +The tool-specific parameters, as well as Docker/Singularity profiles, are included when specifying the appropriate profiles to ``nextflow config``. + +1. First, update to the latest pipeline version (this will update the Nextflow cache of the repository, typically located in ``~/.nextflow/assets/vib-singlecell-nf/``):: + + nextflow pull vib-singlecell-nf/vsn-pipelines + + +2. Next, a config file needs to be generated. + This step will merge parameters from multiple profiles together to create a master config which specifies **all** parameters used by the pipeline. + In this example, these are ``tenx`` for the input data, ``singularity`` to use the Singularity system (replace with ``docker`` if necessary), and ``single_sample`` to load the defaults for the single sample pipeline. + In your working directory, run ``nextflow config ...`` with the appropriate profiles:: + + nextflow config vib-singlecell-nf/vsn-pipelines \ + -profile tenx,singularity,single_sample > single_sample.config + + + +3. Now, edits can be made to ``single_sample.config``. + Generally, the default values are acceptable to use for a first pass, but certain variables (input directory, etc.) need to be changed. + + In particular, the following parameters are frequently modified in practice: + + * ``params.global.project_name``: a project name which will be included in some of the output file names. + * ``params.data.tenx.cellranger_mex``, which should point to the ``outs/`` folder generated by Cell Ranger (if using 10x data). See :ref:`Information on using 10x Genomics datasets` for additional info. + * Filtering parameters (``params.sc.scanpy.filter``): filtering parameters, which will be applied to all samples, can be set here: min/max genes, mitochondrial read fraction, and min cells. See :ref:`Multi-sample parameters` for additional info on how to specify sample-specific parameters. + * Louvain cluster resolution: ``params.sc.scanpy.clustering.resolution``. + * :ref:`Cell-` and :ref:`sample-` level annotations are also possible. + + +4. Run the workflow using the new config file (using ``-C`` is recommended to use **only** this file), specifying the proper workflow as the entry point:: + + nextflow -C single_sample.config \ + run vib-singlecell-nf/vsn-pipelines \ + -entry single_sample + + +Additional resources for running on custom data +----------------------------------------------- + +- `Input file formats available <./input_formats.html>`_. +- `Available pipelines <./pipelines.html>`_. +- `Advanced features for customizing pipelines <./features.html>`_. + +Finally, see the list of case studies with specific examples and full config files at `VSN-Pipelines-examples `_. + diff --git a/docs/input_formats.rst b/docs/input_formats.rst new file mode 100644 index 00000000..898a7a0b --- /dev/null +++ b/docs/input_formats.rst @@ -0,0 +1,252 @@ +Input Data Formats +=================== + +Depending on the type of data you run the pipeline with, one or more appropriate profiles should be set when running ``nextflow config``. +These profiles are indicated in the sections below. + +Specifying multiple samples +*************************** + +All the input data parameters are compatible with the following features: + +- Glob patterns + +.. code:: + + "data/10x/1k_pbmc/1k_pbmc_*/outs/" + +- Comma separated paths (paths can contain glob patterns) + +.. code:: + + "data/10x/1k_pbmc/1k_pbmc_v2_chemistry/outs/, data/10x/1k_pbmc/1k_pbmc_v3_chemistry/outs/" + +- Array of paths (paths can contain glob patterns) + +.. code:: + + [ + "data/10x/1k_pbmc/1k_pbmc_v2_chemistry/outs/", + "data/10x/1k_pbmc/1k_pbmc_v3_chemistry/outs/" + ] + +---- + +.. _using_10x_datasets: + +Cell Ranger (10x Genomics) +************************** + +Data from a standard Cell Ranger output directory can be easily ingested into the pipeline by using the proper input channel (``tenx_mex`` or ``tenx_h5``, depending on which file should be used). +Multiple samples can be selected by providing the path to this directory using glob patterns. + +.. code:: + + /home/data/ + └── cellranger + ├── sample_A + │   └── outs + │   ├── filtered_feature_bc_matrix + │   │   ├── barcodes.tsv + │   │   ├── genes.tsv + │   │   └── matrix.mtx + │   └── filtered_feature_bc_matrix.h5 + └── sample_B + └── outs + ├── filtered_feature_bc_matrix + │   ├── barcodes.tsv + │   ├── genes.tsv + │   └── matrix.mtx + └── filtered_feature_bc_matrix.h5 + + +MEX +___ + +To use the Cell Ranger Market Exchange (**MEX**) files, use the following profile when generating the config file:: + + -profile tenx + +This profile adds the following parameter (``params.data.tenx.cellranger_mex``) into the generated .config file:: + + [...] + data { + tenx { + cellranger_mex = "/home/data/cellranger/sample*/outs/" + } + } + [...] + + +H5 +__ + +To use the Cell Ranger ``h5`` file as input, use the following profile:: + + -profile tenx_h5 + +This profile adds the ``params.data.tenx.cellranger_h5`` parameter into the generated .config file:: + + [...] + data { + tenx { + cellranger_h5 = "/home/data/cellranger/sample*/outs/" + } + } + [...] + + +Input file detection +____________________ + +Setting the input directory appropriately, using a glob in the directory path in place of the sample names, will collect all the samples listed in the ``filtered_[feature|gene]_bc_matrix`` directories listed above. +For example, in ``params.data.tenx``, setting:: + + cellranger_mex = "/home/data/cellranger/sample*/outs/" + +or + +.. code:: + + cellranger_h5 = "/home/data/cellranger/sample*/outs/" + +will recursively find all 10x samples in that directory. + +The pipeline will use either the ``outs/filtered_feature_bc_matrix/`` or the ``outs/raw_feature_bc_matrix/`` depending on the setting of the ``params.sc.file_converter.useFilteredMatrix`` (``true`` uses filtered; ``false`` uses raw). + +---- + +H5AD (Scanpy) +************* +Use the following profile when generating the config file:: + + -profile h5ad + + +In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.h5ad`` files:: + + [...] + data { + h5ad { + file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.h5ad" + suffix = "_SUFFIX.SC__FILE_CONVERTER.h5ad" + } + } + [...] + +- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). + +In case there are multiple .h5ad files that need to be processed with different suffixes, the multi-labelled strategy should be used to define the h5ad parameter:: + + [...] + data { + h5ad { + GROUP1 { + file_paths = "[path-to-group1-files]/*.SUFFIX1.h5ad" + suffix = ".SUFFIX1.h5ad" + } + GROUP2 { + file_paths = "[path-to-group1-files]/*.SUFFIX2.h5ad" + suffix = ".SUFFIX2.h5ad" + } + } + } + [...] + +Notes: + +- ``GROUP1``, ``GROUP2`` are just example names here. They can be replaced by any value as long as they are alphanumeric (underscores are allowed). +- ``file_paths`` and ``suffix`` do allow list of paths/globs in the multi-labelled strategy. + +---- + +Loom +**** + +Use the following profile when generating the config file:: + + -profile loom + + +In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.loom`` files:: + + [...] + data { + loom { + file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.loom" + suffix = "_SUFFIX.SC__FILE_CONVERTER.loom" + } + } + [...] + +- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). + +---- + +Seurat Rds +********** + +Use the following profile when generating the config file:: + + -profile seurat_rds + + +In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.Rds`` files:: + + [...] + data { + seurat_rds { + file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.Rds" + suffix = "_SUFFIX.SC__FILE_CONVERTER.Rds" + } + } + [...] + +- The pipelines expect a Seurat v3 object contained in the .Rds file. (Seurat v2 objects are currently not supported). +- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). + +---- + +TSV +*** +Use the following profile when generating the config file:: + + -profile tsv + + +In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.tsv`` files:: + + [...] + data { + h5ad { + file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.tsv" + suffix = "_SUFFIX.SC__FILE_CONVERTER.tsv" + } + } + [...] + +- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). + +---- + +CSV +*** +Use the following profile when generating the config file:: + + -profile csv + + +In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.csv`` files:: + + [...] + data { + h5ad { + file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.csv" + suffix = "_SUFFIX.SC__FILE_CONVERTER.csv" + } + } + [...] + +- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). + + diff --git a/docs/pipelines.rst b/docs/pipelines.rst index 9178c23f..7a4f01e9 100644 --- a/docs/pipelines.rst +++ b/docs/pipelines.rst @@ -1,70 +1,6 @@ Pipelines ========== -Generating a config file and running the pipeline -************************************************* - -This pipeline can be configured and run on custom data with a few steps. -The recommended method is to first run ``nextflow config ...`` to generate a complete config file (with the default parameters) in your working directory. -The tool-specific parameters, as well as Docker/Singularity profiles, are included when specifying the appropriate profiles to ``nextflow config``. - -1. First, update to the latest pipeline version (this will update the Nextflow cache of the repository, typically located in ``~/.nextflow/assets/vib-singlecell-nf/``):: - - nextflow pull vib-singlecell-nf/vsn-pipelines - - -2. Next, a config file needs to be generated. - This step will merge parameters from multiple profiles together to create a master config which specifies **all** parameters used by the pipeline. - In this example, these are ``tenx`` for the input data, ``singularity`` to use the Singularity system (replace with ``docker`` if necessary), and ``single_sample`` to load the defaults for the single sample pipeline. - In your working directory, run ``nextflow config ...`` with the appropriate profiles:: - - nextflow config vib-singlecell-nf/vsn-pipelines \ - -profile tenx,singularity,single_sample > single_sample.config - - - -3. Now, edits can be made to ``single_sample.config``. - Generally, the default values are acceptable to use for a first pass, but certain variables (input directory, etc.) need to be changed. - - In particular, the following parameters are frequently modified in practice: - - * ``params.global.project_name``: a project name which will be included in some of the output file names. - * ``params.data.tenx.cellranger_mex``, which should point to the ``outs/`` folder generated by Cell Ranger (if using 10x data). See ``Information on using 10x Genomics datasets`` for additional info. - * Filtering parameters (``params.sc.scanpy.filter``): filtering parameters, which will be applied to all samples, can be set here: min/max genes, mitochondrial read fraction, and min cells. See ``Multi-sample parameters`` for additional info on how to specify sample-specific parameters. - * Louvain cluster resolution: ``params.sc.scanpy.clustering.resolution``. - * For cell- and sample-level annotations, see ``here`` for additional info. - - -4. Run the workflow using the new config file (using ``-C`` is recommended to use **only** this file), specifying the proper workflow as the entry point:: - - nextflow -C single_sample.config \ - run vib-singlecell-nf/vsn-pipelines \ - -entry single_sample - - -Two-pass strategy ---------------------- - -Typically, cell- and gene-level filtering is one of the first steps performed in the analysis pipelines. -This usually results in the pipeline being run in two passes. -In the **first pass**, the default filters are applied (which are probably not valid for new datasets), and a separate QC report is generated for each sample. -These QC reports can be inspected and the filters can be adjusted in the config file -either for all samples (by editing the ``params.sc.scanpy.filter`` settings directly, or for individual samples by using the strategy described in multi-sample parameters. -Then, the **second pass** restarts the pipeline with the correct filtering parameters applied (use ``nextflow run ... -resume`` to skip already completed steps). - -Other notes ----------------- -In order to run a specific pipeline (e.g. ``single_sample``), -the pipeline name must be specified as a **profile** when running ``nextflow config ...`` (so that the default parameters are included), -and as the **entry** workflow when running the pipeline with ``nextflow run``. - -One exception to this is that the ``-entry`` pipeline can be one that is a subset of the one present in the config file. -For example, in a pipeline with long running step that occurs after filtering (e.g. ``single_sample_scenic``), -it can be useful to generate the full config file (``nextflow config vib-singlecell-nf/vsn-pipelines -profile single_sample_scenic``), -then run a first pass for filtering using ``nextflow run vib-singlecell-nf/vsn-pipelines -entry single_sample``, and a second pass using the full pipeline ``-entry single_sample_scenic``). - ----- - Single-sample Pipelines *********************** Pipelines to run on a single sample or multiple samples separately and in parallel. @@ -620,247 +556,3 @@ Now we can run it with the following command: run ~/vib-singlecell-nf/vsn-pipelines \ -entry cell_filter -Input Data Formats -******************* - -Depending on the type of data you run the pipeline with, one or more appropriate profiles should be set when running ``nextflow config``. - -All the input data parameters are compatible with the following features: - -- Glob patterns - -.. code:: - - "data/10x/1k_pbmc/1k_pbmc_*/outs/" - -- Comma separated paths (paths can contain glob patterns) - -.. code:: - - "data/10x/1k_pbmc/1k_pbmc_v2_chemistry/outs/, data/10x/1k_pbmc/1k_pbmc_v3_chemistry/outs/" - -- Array of paths (paths can contain glob patterns) - -.. code:: - - [ - "data/10x/1k_pbmc/1k_pbmc_v2_chemistry/outs/", - "data/10x/1k_pbmc/1k_pbmc_v3_chemistry/outs/" - ] - ----- - -Cell Ranger (10x Genomics) --------------------------- - -Data from a standard Cell Ranger output directory can be easily ingested into the pipeline by using the proper input channel (``tenx_mex`` or ``tenx_h5``, depending on which file should be used). -Multiple samples can be selected by providing the path to this directory using glob patterns. - -.. code:: - - /home/data/ - └── cellranger - ├── sample_A - │   └── outs - │   ├── filtered_feature_bc_matrix - │   │   ├── barcodes.tsv - │   │   ├── genes.tsv - │   │   └── matrix.mtx - │   └── filtered_feature_bc_matrix.h5 - └── sample_B - └── outs - ├── filtered_feature_bc_matrix - │   ├── barcodes.tsv - │   ├── genes.tsv - │   └── matrix.mtx - └── filtered_feature_bc_matrix.h5 - - -MEX -___ - -To use the Cell Ranger Market Exchange (**MEX**) files, use the following profile when generating the config file:: - - -profile tenx - -This profile adds the following parameter (``params.data.tenx.cellranger_mex``) into the generated .config file:: - - [...] - data { - tenx { - cellranger_mex = "/home/data/cellranger/sample*/outs/" - } - } - [...] - - -H5 -__ - -To use the Cell Ranger ``h5`` file as input, use the following profile:: - - -profile tenx_h5 - -This profile adds the ``params.data.tenx.cellranger_h5`` parameter into the generated .config file:: - - [...] - data { - tenx { - cellranger_h5 = "/home/data/cellranger/sample*/outs/" - } - } - [...] - - -Input file detection -____________________ - -Setting the input directory appropriately, using a glob in the directory path in place of the sample names, will collect all the samples listed in the ``filtered_[feature|gene]_bc_matrix`` directories listed above. -For example, in ``params.data.tenx``, setting:: - - cellranger_mex = "/home/data/cellranger/sample*/outs/" - -or - -.. code:: - - cellranger_h5 = "/home/data/cellranger/sample*/outs/" - -will recursively find all 10x samples in that directory. - -The pipeline will use either the ``outs/filtered_feature_bc_matrix/`` or the ``outs/raw_feature_bc_matrix/`` depending on the setting of the ``params.sc.file_converter.useFilteredMatrix`` (``true`` uses filtered; ``false`` uses raw). - ----- - -H5AD (Scanpy) -------------- -Use the following profile when generating the config file:: - - -profile h5ad - - -In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.h5ad`` files:: - - [...] - data { - h5ad { - file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.h5ad" - suffix = "_SUFFIX.SC__FILE_CONVERTER.h5ad" - } - } - [...] - -- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). - -In case there are multiple .h5ad files that need to be processed with different suffixes, the multi-labelled strategy should be used to define the h5ad parameter:: - - [...] - data { - h5ad { - GROUP1 { - file_paths = "[path-to-group1-files]/*.SUFFIX1.h5ad" - suffix = ".SUFFIX1.h5ad" - } - GROUP2 { - file_paths = "[path-to-group1-files]/*.SUFFIX2.h5ad" - suffix = ".SUFFIX2.h5ad" - } - } - } - [...] - -Notes: - -- ``GROUP1``, ``GROUP2`` are just example names here. They can be replaced by any value as long as they are alphanumeric (underscores are allowed). -- ``file_paths`` and ``suffix`` do allow list of paths/globs in the multi-labelled strategy. - ----- - -Loom ----- -Use the following profile when generating the config file:: - - -profile loom - - -In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.loom`` files:: - - [...] - data { - loom { - file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.loom" - suffix = "_SUFFIX.SC__FILE_CONVERTER.loom" - } - } - [...] - -- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). - ----- - -Seurat Rds ----------- - -Use the following profile when generating the config file:: - - -profile seurat_rds - - -In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.Rds`` files:: - - [...] - data { - seurat_rds { - file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.Rds" - suffix = "_SUFFIX.SC__FILE_CONVERTER.Rds" - } - } - [...] - -- The pipelines expect a Seurat v3 object contained in the .Rds file. (Seurat v2 objects are currently not supported). -- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). - ----- - -TSV ---- -Use the following profile when generating the config file:: - - -profile tsv - - -In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.tsv`` files:: - - [...] - data { - h5ad { - file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.tsv" - suffix = "_SUFFIX.SC__FILE_CONVERTER.tsv" - } - } - [...] - -- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). - ----- - -CSV ---- -Use the following profile when generating the config file:: - - -profile csv - - -In the generated .config file, make sure the ``file_paths`` parameter is set with the paths to the ``.csv`` files:: - - [...] - data { - h5ad { - file_paths = "data/1k_pbmc_v*_chemistry_SUFFIX.SC__FILE_CONVERTER.csv" - suffix = "_SUFFIX.SC__FILE_CONVERTER.csv" - } - } - [...] - -- The ``suffix`` parameter is used to infer the sample name from the file paths (it is removed from the input file path to derive a sample name). - From 2fda1438b3f01f4a9af994f990cd35937e4611bb Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 5 Jan 2021 12:05:50 +0100 Subject: [PATCH 07/26] Docs cleanup/restructure: - Structure index with additonal toc entries - Rename headers in some files - Other cleanup --- README.rst | 2 +- docs/index.rst | 17 ++++++++++++++++- docs/scatac-seq.rst | 20 ++++++++------------ src/directs/README.rst | 5 ++--- 4 files changed, 27 insertions(+), 17 deletions(-) diff --git a/README.rst b/README.rst index 4be64d21..3fd4b620 100644 --- a/README.rst +++ b/README.rst @@ -109,7 +109,7 @@ Sample Aggregation Workflows - |mnncorrect| ---- +---- In addition, the pySCENIC_ implementation of the SCENIC_ workflow is integrated here and can be run in conjunction with any of the above workflows. The output of each of the main workflows is a loom_-format file, which is ready for import into the interactive single-cell web visualization tool SCope_. diff --git a/docs/index.rst b/docs/index.rst index 561517db..34efaead 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -9,12 +9,27 @@ Home getting-started + input_formats pipelines features case-studies - development + +.. toctree:: + :maxdepth: 2 + :hidden: + :caption: scATAC-seq + scatac-seq +.. toctree:: + :maxdepth: 2 + :hidden: + :caption: Development + + development + attributions + + .. include:: ../README.rst .. Indices and tables diff --git a/docs/scatac-seq.rst b/docs/scatac-seq.rst index 189a5103..efe75894 100644 --- a/docs/scatac-seq.rst +++ b/docs/scatac-seq.rst @@ -1,10 +1,6 @@ -scATAC-seq Pipelines -==================== +scATAC-seq Preprocessing +======================== ----- - -scATAC-seq preprocessing -************************ This pipeline takes fastq files from paired end single cell ATAC-seq, and applies preprocessing steps to align the reads to a reference genome, and produce a bam file and scATAC-seq fragments file. The full steps are: @@ -25,7 +21,7 @@ The full steps are: - A fragments file is created using `Sinto `_. Input ------ +***** The input to this pipeline is a (tab-delimited) metadata table with the sample ID, sequencing technology, and locations of the fastq files: @@ -63,13 +59,13 @@ The columns represent: - ``fastq_PE2_path``: The full path to the fastq file for the second read in a pair. Technology ----------- +********** This controls how both barcode correction and debarcoding is applied to the input fastq files. Available options are: ``standard`` -____________ +------------ The ``standard`` setting assumes a typical 10x Genomics style format with two read pair fastqs and a barcode fastq: @@ -113,14 +109,14 @@ which transforms this input into two paired fastq files with the barcode integra ``multiome`` -____________ +------------ The ``multiome`` setting works the same as ``standard`` with the exception of the whitelist used for barcode correction. The whitelists are supplied in the params file (``params.tools.singlecelltoolkit.barcode_correction.whitelist``). ``biorad`` -__________ +---------- The ``biorad`` setting processes BioRad data using `BAP `_. This takes input data: @@ -158,7 +154,7 @@ And produces paired fastq files with the barcode integrated into the read name ( Running the workflow --------------------- +******************** To generate a config file, use the ``atac_preprocess`` profile along with ``docker`` or ``singularity``. Note that the full path to ``vib-singlecell-nf/vsn-pipelines/main_atac.nf`` must be used: diff --git a/src/directs/README.rst b/src/directs/README.rst index 0616bae6..e55008c8 100644 --- a/src/directs/README.rst +++ b/src/directs/README.rst @@ -1,6 +1,5 @@ -VSN-Pipelines template -====================== +Directs +======= -This is a template repository for VIB-SingleCell-NF (VSN) pipelines tools From e8fa79602974a00968f6225e41e17eddeebcb12e Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Wed, 13 Jan 2021 12:52:56 +0100 Subject: [PATCH 08/26] Disable loom vaidation in file converter - Fixes #286 --- src/utils/bin/sc_file_converter.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/utils/bin/sc_file_converter.py b/src/utils/bin/sc_file_converter.py index 39ada1d6..c41ee4d9 100755 --- a/src/utils/bin/sc_file_converter.py +++ b/src/utils/bin/sc_file_converter.py @@ -253,7 +253,8 @@ def tag_cell(adata, tag): elif INPUT_FORMAT == 'loom' and OUTPUT_FORMAT == 'h5ad': adata = sc.read_loom( FILE_PATH_IN, - sparse=False + sparse=False, + validate=False ) adata = add_sample_id( adata=adata, From da65c15243c6a62fdb9494e9bd1722d1475aebd8 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Wed, 13 Jan 2021 14:43:17 +0100 Subject: [PATCH 09/26] Switch to sparse loading from loom files --- src/utils/bin/sc_file_converter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/utils/bin/sc_file_converter.py b/src/utils/bin/sc_file_converter.py index c41ee4d9..e2a30977 100755 --- a/src/utils/bin/sc_file_converter.py +++ b/src/utils/bin/sc_file_converter.py @@ -253,7 +253,7 @@ def tag_cell(adata, tag): elif INPUT_FORMAT == 'loom' and OUTPUT_FORMAT == 'h5ad': adata = sc.read_loom( FILE_PATH_IN, - sparse=False, + sparse=True, validate=False ) adata = add_sample_id( From 868ea935340b57eaa278914ebec9da2dcdb6de70 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Fri, 15 Jan 2021 17:01:38 +0100 Subject: [PATCH 10/26] Fix incorrectly specified labels in cellranger count processes --- src/cellranger-atac/processes/count.nf | 2 +- src/cellranger/processes/count.nf | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/cellranger-atac/processes/count.nf b/src/cellranger-atac/processes/count.nf index a00101dc..bfb9c608 100644 --- a/src/cellranger-atac/processes/count.nf +++ b/src/cellranger-atac/processes/count.nf @@ -63,7 +63,7 @@ process SC__CELLRANGER_ATAC__COUNT_WITH_METADATA { cache 'deep' container toolParams.container publishDir "${params.global.outdir}/counts", mode: 'link', overwrite: true - label 'compute_resources__cellranger' + label 'compute_resources__cellranger_count' input: tuple \ diff --git a/src/cellranger/processes/count.nf b/src/cellranger/processes/count.nf index ce1ba180..6ba6d9e7 100644 --- a/src/cellranger/processes/count.nf +++ b/src/cellranger/processes/count.nf @@ -121,7 +121,7 @@ process SC__CELLRANGER__COUNT_WITH_LIBRARIES { cache 'deep' container toolParams.container publishDir "${params.global.outdir}/counts", saveAs: {"${sampleId}/outs"}, mode: 'link', overwrite: true - label 'compute_resources__cellranger' + label 'compute_resources__cellranger_count' input: path(transcriptome) @@ -168,7 +168,7 @@ process SC__CELLRANGER__COUNT_WITH_METADATA { cache 'deep' container toolParams.container publishDir "${params.global.outdir}/counts", saveAs: {"${sampleId}/outs"}, mode: 'link', overwrite: true - label 'compute_resources__cellranger' + label 'compute_resources__cellranger_count' input: path(transcriptome) From 2a5673e506933bbf444c739cc811abdd16e48f84 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Fri, 15 Jan 2021 17:06:01 +0100 Subject: [PATCH 11/26] Fix missing task param in cellranger process --- src/cellranger/processes/count.nf | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/cellranger/processes/count.nf b/src/cellranger/processes/count.nf index 6ba6d9e7..fda72ce1 100644 --- a/src/cellranger/processes/count.nf +++ b/src/cellranger/processes/count.nf @@ -11,7 +11,8 @@ def generateCellRangerCountCommandDefaults = { processParams, transcriptome, expectCells, - chemistry -> + chemistry, + task -> _expectCells = null // --expect-cells argument if(!isParamNull(expectCells)) { @@ -53,7 +54,7 @@ def runCellRangerCount = { expectCells = null, chemistry = null -> return ( - generateCellRangerCountCommandDefaults(processParams, transcriptome, expectCells, chemistry) + \ + generateCellRangerCountCommandDefaults(processParams, transcriptome, expectCells, chemistry, task) + \ """ \ --id=${id} \ --sample=${sample} \ @@ -72,7 +73,7 @@ def runCellRangerCountLibraries = { expectCells = null, chemistry = null -> return ( - generateCellRangerCountCommandDefaults(processParams, transcriptome, expectCells, chemistry) + \ + generateCellRangerCountCommandDefaults(processParams, transcriptome, expectCells, chemistry, task) + \ """ \ --id ${id} \ --libraries ${libraries} \ From 7985a74a1c9a9675872df18f36bb00121da6d12b Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Fri, 15 Jan 2021 23:43:22 +0100 Subject: [PATCH 12/26] Rename channel variables --- src/channels/file.nf | 12 ++++++------ src/channels/tenx.nf | 14 +++++++------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/channels/file.nf b/src/channels/file.nf index 7de23074..f51f866c 100644 --- a/src/channels/file.nf +++ b/src/channels/file.nf @@ -15,14 +15,14 @@ workflow getChannel { if(glob.contains(',')) { glob = Arrays.asList(glob.split(',')); } - channel = Channel + data_channel = Channel .fromPath(glob, checkIfExists: true) .map { path -> tuple(extractSample( "${path}", sampleSuffixWithExtension ), file("${path}")) } emit: - channel + data_channel } @@ -38,14 +38,14 @@ workflow getChannelWithIndex { if(glob.contains(',')) { glob = Arrays.asList(glob.split(',')); } - channel = Channel + data_channel = Channel .fromPath(glob, checkIfExists: true) .map { path -> tuple(extractSample( "${path}", sampleSuffixWithExtension ), file("${path}"), file("${path}${indexFileExtension}")) } emit: - channel + data_channel } @@ -56,13 +56,13 @@ workflow getChannelFromFilePath { sampleSuffixWithExtension // Suffix after the sample name in the file paths main: - channel = Channel.of( + data_channel = Channel.of( tuple(filePath) ).map { it -> tuple(extractSample( "${it[0]}", sampleSuffixWithExtension ), file("${it[0]}")) } emit: - channel + data_channel } diff --git a/src/channels/tenx.nf b/src/channels/tenx.nf index dc54436f..debc6077 100644 --- a/src/channels/tenx.nf +++ b/src/channels/tenx.nf @@ -22,14 +22,14 @@ workflow getOutsChannel { if(glob.contains(',')) { glob = Arrays.asList(glob.split(',')); } - channel = Channel + data_channel = Channel .fromPath(glob, type: 'dir', checkIfExists: true) .map { filePath -> tuple(extractSampleFromOuts( "${filePath}" ), file("${filePath}")) } emit: - channel + data_channel } @@ -51,14 +51,14 @@ workflow getH5Channel { if(glob.contains(',')) { glob = Arrays.asList(glob.split(',')); } - channel = Channel + data_channel = Channel .fromPath(glob, type: 'file', checkIfExists: true) .map { filePath -> tuple(extractSampleFromH5( "${filePath}" ), file("${filePath}")) } emit: - channel + data_channel } @@ -82,13 +82,13 @@ workflow getMEXChannel { if(glob.contains(',')) { glob = Arrays.asList(glob.split(',')); } - channel = Channel + data_channel = Channel .fromPath(glob, type: 'dir', checkIfExists: true) .map { filePath -> tuple(extractSampleFromMEX( "${filePath}" ), file("${filePath}")) } emit: - channel + data_channel -} \ No newline at end of file +} From 005dd98290617fd80b06c852041ee48d01c78a5e Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Fri, 15 Jan 2021 23:44:27 +0100 Subject: [PATCH 13/26] Update Nextflow version in the manifest --- nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nextflow.config b/nextflow.config index 5580b47e..878cf990 100644 --- a/nextflow.config +++ b/nextflow.config @@ -6,7 +6,7 @@ manifest { version = '0.24.0' mainScript = 'main.nf' defaultBranch = 'master' - nextflowVersion = '!20.04.1' // with ! prefix, stop execution if current version does not match required version. + nextflowVersion = '!20.10.0' // with ! prefix, stop execution if current version does not match required version. } // load these configs first: From 0f1ad1b324f55499bb99892da5381073c2bea22c Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Sat, 16 Jan 2021 00:07:09 +0100 Subject: [PATCH 14/26] Update all includes to new format --- main.nf | 4 +++- src/cellranger-atac/main.nf | 12 ++++++++--- src/cellranger/main.nf | 18 ++++++++++------ src/cellranger/workflows/mkfastq.nf | 2 -- src/directs/main.nf | 20 ------------------ src/harmony/workflows/harmony_only.nf | 8 +++++-- src/scanpy/workflows/dim_reduction.nf | 4 +++- src/scrublet/main.nf | 30 --------------------------- src/star/main.nf | 12 ++++++++--- src/trimgalore/main.nf | 26 ----------------------- workflows/star.nf | 4 +++- 11 files changed, 45 insertions(+), 95 deletions(-) delete mode 100644 src/directs/main.nf delete mode 100644 src/scrublet/main.nf delete mode 100644 src/trimgalore/main.nf diff --git a/main.nf b/main.nf index 4c22aa16..951bea55 100644 --- a/main.nf +++ b/main.nf @@ -371,7 +371,9 @@ workflow single_sample_scenic { workflow pcacv { - include PCACV__FIND_OPTIMAL_NPCS from './src/pcacv/processes/runPCACV' params(params) + include { + PCACV__FIND_OPTIMAL_NPCS; + } from './src/pcacv/processes/runPCACV' params(params) getDataChannel().map { it -> tuple(it[0], it[1]) } diff --git a/src/cellranger-atac/main.nf b/src/cellranger-atac/main.nf index 74f6e8e5..ea278503 100644 --- a/src/cellranger-atac/main.nf +++ b/src/cellranger-atac/main.nf @@ -5,9 +5,15 @@ nextflow.preview.dsl=2 ////////////////////////////////////////////////////// // Define the parameters for current testing proces -include SC__CELLRANGER_ATAC__MKFASTQ from './processes/mkfastq' params(params) -include SC__CELLRANGER_ATAC__COUNT from './processes/count' params(params) -include CELLRANGER_ATAC_COUNT_WITH_METADATA from './workflows/cellRangerCountWithMetadata' params(params) +include { + SC__CELLRANGER_ATAC__MKFASTQ; +} from './processes/mkfastq' params(params) +include { + SC__CELLRANGER_ATAC__COUNT; +} from './processes/count' params(params) +include { + CELLRANGER_ATAC_COUNT_WITH_METADATA; +} from './workflows/cellRangerCountWithMetadata' params(params) ////////////////////////////////////////////////////// diff --git a/src/cellranger/main.nf b/src/cellranger/main.nf index f6ecad83..2519f7df 100644 --- a/src/cellranger/main.nf +++ b/src/cellranger/main.nf @@ -1,14 +1,20 @@ nextflow.preview.dsl=2 -// include groupParams from '../../utils/utils.nf' - ////////////////////////////////////////////////////// // Define the parameters for current testing proces -include SC__CELLRANGER__MKFASTQ from './processes/mkfastq' params(params) -include SC__CELLRANGER__COUNT from './processes/count' params(params) -include CELLRANGER_COUNT_WITH_METADATA from './workflows/cellRangerCountWithMetadata' params(params) -include MKFASTQ from './workflows/mkfastq' params(params) +include { + SC__CELLRANGER__MKFASTQ; +} from './processes/mkfastq' params(params) +include { + SC__CELLRANGER__COUNT; +} from './processes/count' params(params) +include { + CELLRANGER_COUNT_WITH_METADATA; +} from './workflows/cellRangerCountWithMetadata' params(params) +include { + MKFASTQ; +} from './workflows/mkfastq' params(params) workflow CELLRANGER { diff --git a/src/cellranger/workflows/mkfastq.nf b/src/cellranger/workflows/mkfastq.nf index a93b199d..28c0ae2f 100644 --- a/src/cellranger/workflows/mkfastq.nf +++ b/src/cellranger/workflows/mkfastq.nf @@ -1,7 +1,5 @@ nextflow.preview.dsl=2 -// include groupParams from '../../utils/utils.nf' - ////////////////////////////////////////////////////// // Define the parameters for current testing proces diff --git a/src/directs/main.nf b/src/directs/main.nf deleted file mode 100644 index 044278ed..00000000 --- a/src/directs/main.nf +++ /dev/null @@ -1,20 +0,0 @@ -nextflow.preview.dsl=2 - -////////////////////////////////////////////////////// -// Import sub-workflows from the modules: - -include SC__TEMPLATE__PROCESS1 from './processes/process1.nf' params(params) - - -////////////////////////////////////////////////////// -// Define the workflow - -workflow template { - - take: - data - - main: - SC__TEMPLATE__PROCESS1(data) - -} diff --git a/src/harmony/workflows/harmony_only.nf b/src/harmony/workflows/harmony_only.nf index 9d30f99e..5b858ab2 100644 --- a/src/harmony/workflows/harmony_only.nf +++ b/src/harmony/workflows/harmony_only.nf @@ -3,8 +3,12 @@ nextflow.preview.dsl=2 ////////////////////////////////////////////////////// // process imports: -include SC__HARMONY__HARMONY_MATRIX from './../processes/runHarmony.nf' params(params) -include SC__H5AD_UPDATE_X_PCA from './../../utils/processes/h5adUpdate.nf' params(params) +include { + SC__HARMONY__HARMONY_MATRIX; +} from './../processes/runHarmony.nf' params(params) +include { +SC__H5AD_UPDATE_X_PCA; +} from './../../utils/processes/h5adUpdate.nf' params(params) ////////////////////////////////////////////////////// // Define the workflow diff --git a/src/scanpy/workflows/dim_reduction.nf b/src/scanpy/workflows/dim_reduction.nf index 3aa48b69..7179e32e 100644 --- a/src/scanpy/workflows/dim_reduction.nf +++ b/src/scanpy/workflows/dim_reduction.nf @@ -15,7 +15,9 @@ include { } from '../processes/dim_reduction.nf' params(params + [method: "umap"]) // reporting: -include GENERATE_REPORT from './create_report.nf' params(params) +include { + GENERATE_REPORT; +} from './create_report.nf' params(params) ////////////////////////////////////////////////////// diff --git a/src/scrublet/main.nf b/src/scrublet/main.nf deleted file mode 100644 index 48a299ec..00000000 --- a/src/scrublet/main.nf +++ /dev/null @@ -1,30 +0,0 @@ -nextflow.preview.dsl=2 - -////////////////////////////////////////////////////// -// Import sub-workflows from the modules: - -include { - SC__FILE_CONVERTER; -} from '../utils/processes/utils.nf' params(params) - -include { - SC__TEMPLATE__PROCESS1; -} from './processes/process1.nf' params(params) - - -////////////////////////////////////////////////////// -// Define the workflow - -workflow template { - - take: - data - - main: - data = SC__FILE_CONVERTER(data) - data.view() - - SC__TEMPLATE__PROCESS1(data) - -} - diff --git a/src/star/main.nf b/src/star/main.nf index ba55509f..93402f0a 100644 --- a/src/star/main.nf +++ b/src/star/main.nf @@ -3,9 +3,15 @@ nextflow.preview.dsl=2 ////////////////////////////////////////////////////// // Define the parameters for current testing proces -include SC__STAR__LOAD_GENOME from './processes/load_genome' params(params) -include SC__STAR__MAP_COUNT from './processes/map_count' params(params) -include SC__STAR__UNLOAD_GENOME from './processes/unload_genome' params(params) +include { + SC__STAR__LOAD_GENOME; +} from './processes/load_genome' params(params) +include { + SC__STAR__MAP_COUNT; +} from './processes/map_count' params(params) +include { + SC__STAR__UNLOAD_GENOME; +} from './processes/unload_genome' params(params) ////////////////////////////////////////////////////// // Define the workflow diff --git a/src/trimgalore/main.nf b/src/trimgalore/main.nf deleted file mode 100644 index b233be85..00000000 --- a/src/trimgalore/main.nf +++ /dev/null @@ -1,26 +0,0 @@ -nextflow.preview.dsl=2 - -////////////////////////////////////////////////////// -// Import sub-workflows from the modules: - -include SC__FILE_CONVERTER from '../utils/processes/utils.nf' params(params) - -include SC__TEMPLATE__PROCESS1 from './processes/process1.nf' params(params) - - -////////////////////////////////////////////////////// -// Define the workflow - -workflow template { - - take: - data - - main: - data = SC__FILE_CONVERTER(data) - data.view() - - SC__TEMPLATE__PROCESS1(data) - -} - diff --git a/workflows/star.nf b/workflows/star.nf index 5e1c2bb7..8e07cb68 100644 --- a/workflows/star.nf +++ b/workflows/star.nf @@ -16,7 +16,9 @@ include { SC__STAR_CONCATENATOR; } from '../src/utils/processes/utils.nf' params(params) -include getChannel as getSingleEndChannel from '../src/channels/singleend.nf' params(params) +include { + getChannel; +} as getSingleEndChannel from '../src/channels/singleend.nf' params(params) ////////////////////////////////////////////////////// // Define the workflow From 5041068cdb521877e9d3e18c7de7e23882493f02 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Sat, 16 Jan 2021 00:29:55 +0100 Subject: [PATCH 15/26] All files to nextflow.enable.dsl=2 - Resolves #253 --- main.nf | 2 +- main_atac.nf | 2 +- src/archr/processes/cell_calling.nf | 2 +- src/archr/processes/createArrow_unfiltered.nf | 2 +- src/bap/processes/barcode_multiplet.nf | 2 +- src/bap/processes/biorad_debarcode.nf | 2 +- src/bap/workflows/bap_debarcode.nf | 2 +- src/bwamaptools/main.nf | 2 +- src/bwamaptools/processes/add_barcode_as_tag.nf | 2 +- src/bwamaptools/processes/index.nf | 2 +- src/bwamaptools/processes/mapping.nf | 2 +- src/bwamaptools/processes/mapping_summary.nf | 2 +- src/celda/main.nf | 2 +- src/celda/main.test.nf | 2 +- src/celda/processes/runDecontX.nf | 2 +- src/celda/processes/utils.nf | 2 +- src/celda/workflows/decontX.nf | 2 +- src/cellranger-atac/main.nf | 2 +- src/cellranger-atac/processes/count.nf | 2 +- src/cellranger-atac/processes/mkfastq.nf | 2 +- src/cellranger-atac/workflows/cellRangerCountWithMetadata.nf | 2 +- src/cellranger/main.nf | 2 +- src/cellranger/processes/count.nf | 2 +- src/cellranger/processes/mkfastq.nf | 2 +- src/cellranger/processes/utils.nf | 2 +- src/cellranger/workflows/cellRangerCountWithLibraries.nf | 2 +- src/cellranger/workflows/cellRangerCountWithMetadata.nf | 2 +- src/cellranger/workflows/cellranger_libraries.nf | 2 +- src/cellranger/workflows/mkfastq.nf | 2 +- src/channels/channels.nf | 2 +- src/channels/file.nf | 2 +- src/channels/singleend.nf | 2 +- src/channels/sra.nf | 2 +- src/channels/tenx.nf | 2 +- src/directs/main.test.nf | 2 +- src/directs/processes/selectDefaultClustering.nf | 2 +- src/dropletutils/processes/barcode_selection.nf | 2 +- src/dropseqtools/processes/bam_tag_histogram.nf | 2 +- src/dropseqtools/processes/convert_to_ref_flat.nf | 2 +- src/dropseqtools/processes/detect_bead_synthesis_errors.nf | 2 +- src/dropseqtools/processes/digital_expression.nf | 2 +- src/dropseqtools/processes/filter_bam.nf | 2 +- src/dropseqtools/processes/gzip.nf | 2 +- src/dropseqtools/processes/polya_trimmer.nf | 2 +- src/dropseqtools/processes/tag_read_with_gene_exon.nf | 2 +- src/dropseqtools/processes/trim_starting_sequence.nf | 2 +- src/edirect/processes/sra_metadata.nf | 2 +- src/edirect/workflows/sra_fastq_urls.nf | 2 +- src/fastp/processes/clean_and_fastqc.nf | 2 +- src/flybaser/processes/convertNomenclature.nf | 2 +- src/harmony/processes/runHarmony.nf | 2 +- src/harmony/workflows/bec_harmony.nf | 2 +- src/harmony/workflows/harmony_only.nf | 2 +- src/pcacv/processes/runPCACV.nf | 2 +- src/picard/processes/create_sequence_dictionary.nf | 2 +- src/picard/processes/fastq_to_bam.nf | 2 +- src/picard/processes/merge_bam_alignment.nf | 2 +- src/picard/processes/sam_to_fastq.nf | 2 +- src/picard/processes/sort_sam.nf | 2 +- src/popscle/main.nf | 2 +- src/popscle/processes/demuxlet.nf | 2 +- src/popscle/processes/dsc_pileup.nf | 2 +- src/popscle/workflows/demuxlet.nf | 2 +- src/popscle/workflows/dsc_pileup.nf | 2 +- src/pycistopic/processes/macs2_call_peaks.nf | 2 +- src/scanpy/main.nf | 2 +- src/scanpy/main.test.nf | 2 +- src/scanpy/processes/batch_effect_correct.nf | 2 +- src/scanpy/processes/cluster.nf | 2 +- src/scanpy/processes/dim_reduction.nf | 2 +- src/scanpy/processes/feature_selection.nf | 2 +- src/scanpy/processes/filter.nf | 2 +- src/scanpy/processes/marker_genes.nf | 2 +- src/scanpy/processes/neighborhood_graph.nf | 2 +- src/scanpy/processes/regress_out.nf | 2 +- src/scanpy/processes/reports.nf | 2 +- src/scanpy/processes/transform.nf | 2 +- src/scanpy/workflows/bec_bbknn.nf | 2 +- src/scanpy/workflows/bec_mnncorrect.nf | 2 +- src/scanpy/workflows/cluster_identification.nf | 2 +- src/scanpy/workflows/combine_reports.nf | 2 +- src/scanpy/workflows/create_report.nf | 2 +- src/scanpy/workflows/dim_reduction.nf | 2 +- src/scanpy/workflows/dim_reduction_pca.nf | 2 +- src/scanpy/workflows/hvg_selection.nf | 2 +- src/scanpy/workflows/neighborhood_graph.nf | 2 +- src/scanpy/workflows/normalize_transform.nf | 2 +- src/scanpy/workflows/qc_filter.nf | 2 +- src/scanpy/workflows/single_sample.nf | 2 +- src/scenic/main.nf | 2 +- src/scenic/main.test.nf | 2 +- src/scenic/processes/add_correlation.nf | 2 +- src/scenic/processes/arboreto_with_multiprocessing.nf | 2 +- src/scenic/processes/aucell.nf | 2 +- src/scenic/processes/cistarget.nf | 2 +- src/scenic/processes/loomHandler.nf | 2 +- src/scenic/processes/multiruns/aggregateFeatures.nf | 2 +- src/scenic/processes/multiruns/aggregateRegulons.nf | 2 +- src/scenic/processes/multiruns/aucellFromFolder.nf | 2 +- src/scenic/processes/multiruns/convertMotifsToRegulons.nf | 2 +- src/scenic/processes/multiruns/saveToLoom.nf | 2 +- src/scenic/processes/reports.nf | 2 +- src/scenic/workflows/aggregateMultiRuns.nf | 2 +- src/scrublet/processes/doublet_detection.nf | 2 +- src/scrublet/processes/reports.nf | 2 +- src/scrublet/workflows/doublet_removal.nf | 2 +- src/singlecelltoolkit/processes/barcode_correction.nf | 2 +- src/singlecelltoolkit/processes/debarcode_10x_scatac_fastqs.nf | 2 +- src/sinto/main.nf | 2 +- src/sinto/processes/fragments.nf | 2 +- src/soupx/main.nf | 2 +- src/soupx/main.test.nf | 2 +- src/soupx/processes/process1.nf | 2 +- src/soupx/processes/runSoupX.nf | 2 +- src/soupx/workflows/soupX.nf | 2 +- src/sratoolkit/processes/downloadFastQ.nf | 2 +- src/star/main.nf | 2 +- src/star/processes/build_genome.nf | 2 +- src/star/processes/load_genome.nf | 2 +- src/star/processes/map_count.nf | 2 +- src/star/processes/solo_map_count.nf | 2 +- src/star/processes/unload_genome.nf | 2 +- src/trimgalore/processes/trim.nf | 2 +- src/utils/main.test.nf | 2 +- src/utils/processes/gtf.nf | 2 +- src/utils/processes/h5adAnnotate.nf | 2 +- src/utils/processes/h5adExtractMetadata.nf | 2 +- src/utils/processes/h5adMerge.nf | 2 +- src/utils/processes/h5adSubset.nf | 2 +- src/utils/processes/h5adToLoom.nf | 2 +- src/utils/processes/h5adUpdate.nf | 2 +- src/utils/processes/h5adUpdateMetadata.nf | 2 +- src/utils/processes/reports.nf | 2 +- src/utils/processes/sra.nf | 2 +- src/utils/processes/utils.nf | 2 +- src/utils/workflows/annotateByCellMetadata.nf | 2 +- src/utils/workflows/downloadFromSRA.nf | 2 +- src/utils/workflows/fileConverter.nf | 2 +- src/utils/workflows/filterAnnotateClean.nf | 2 +- src/utils/workflows/filterByCellMetadata.nf | 2 +- src/utils/workflows/finalize.nf | 2 +- src/utils/workflows/updateFeatureNomenclature.nf | 2 +- src/utils/workflows/utils.nf | 2 +- workflows/atac/preprocess.nf | 2 +- workflows/atac/qc_filtering.nf | 2 +- workflows/bbknn.nf | 2 +- workflows/harmony.nf | 2 +- workflows/mnncorrect.nf | 2 +- workflows/multi_sample.nf | 2 +- workflows/nemesh.nf | 2 +- workflows/single_sample.nf | 2 +- workflows/single_sample_star.nf | 2 +- workflows/star.nf | 2 +- 153 files changed, 153 insertions(+), 153 deletions(-) diff --git a/main.nf b/main.nf index 951bea55..9efa3aaf 100644 --- a/main.nf +++ b/main.nf @@ -1,6 +1,6 @@ import static groovy.json.JsonOutput.* -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { INIT; diff --git a/main_atac.nf b/main_atac.nf index 0d381c70..a3f7762a 100644 --- a/main_atac.nf +++ b/main_atac.nf @@ -1,6 +1,6 @@ import static groovy.json.JsonOutput.* -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { INIT; diff --git a/src/archr/processes/cell_calling.nf b/src/archr/processes/cell_calling.nf index db08eca3..cdff4674 100644 --- a/src/archr/processes/cell_calling.nf +++ b/src/archr/processes/cell_calling.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/archr/bin/" : "" diff --git a/src/archr/processes/createArrow_unfiltered.nf b/src/archr/processes/createArrow_unfiltered.nf index 5b20af98..c82cac6e 100644 --- a/src/archr/processes/createArrow_unfiltered.nf +++ b/src/archr/processes/createArrow_unfiltered.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/archr/bin/" : "" diff --git a/src/bap/processes/barcode_multiplet.nf b/src/bap/processes/barcode_multiplet.nf index 48cbbeda..578aaa49 100644 --- a/src/bap/processes/barcode_multiplet.nf +++ b/src/bap/processes/barcode_multiplet.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/bap/processes/biorad_debarcode.nf b/src/bap/processes/biorad_debarcode.nf index 381fdeed..8fc438b2 100644 --- a/src/bap/processes/biorad_debarcode.nf +++ b/src/bap/processes/biorad_debarcode.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/bap/workflows/bap_debarcode.nf b/src/bap/workflows/bap_debarcode.nf index 5e40e529..00934604 100644 --- a/src/bap/workflows/bap_debarcode.nf +++ b/src/bap/workflows/bap_debarcode.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Import sub-workflows from the modules: diff --git a/src/bwamaptools/main.nf b/src/bwamaptools/main.nf index 1b6eacba..56b85c16 100644 --- a/src/bwamaptools/main.nf +++ b/src/bwamaptools/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/bwamaptools/processes/add_barcode_as_tag.nf b/src/bwamaptools/processes/add_barcode_as_tag.nf index 647e65dc..474cdbc9 100644 --- a/src/bwamaptools/processes/add_barcode_as_tag.nf +++ b/src/bwamaptools/processes/add_barcode_as_tag.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/bwamaptools/processes/index.nf b/src/bwamaptools/processes/index.nf index 48e3ffce..9ae964e9 100644 --- a/src/bwamaptools/processes/index.nf +++ b/src/bwamaptools/processes/index.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/bwamaptools/processes/mapping.nf b/src/bwamaptools/processes/mapping.nf index dad8be3e..d82ef45a 100644 --- a/src/bwamaptools/processes/mapping.nf +++ b/src/bwamaptools/processes/mapping.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/bwamaptools/processes/mapping_summary.nf b/src/bwamaptools/processes/mapping_summary.nf index 1703374d..3517efad 100644 --- a/src/bwamaptools/processes/mapping_summary.nf +++ b/src/bwamaptools/processes/mapping_summary.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/bwamaptools/bin/" : "" diff --git a/src/celda/main.nf b/src/celda/main.nf index c96ed573..7eb95c0a 100644 --- a/src/celda/main.nf +++ b/src/celda/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // Should be set in case this pipeline is run with other pipelines (e.g.: single_sample) diff --git a/src/celda/main.test.nf b/src/celda/main.test.nf index 30be2558..10d8f1b6 100644 --- a/src/celda/main.test.nf +++ b/src/celda/main.test.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { INIT; diff --git a/src/celda/processes/runDecontX.nf b/src/celda/processes/runDecontX.nf index 109955e7..0c749555 100644 --- a/src/celda/processes/runDecontX.nf +++ b/src/celda/processes/runDecontX.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/celda/processes/utils.nf b/src/celda/processes/utils.nf index b8742b96..0cecc6f7 100644 --- a/src/celda/processes/utils.nf +++ b/src/celda/processes/utils.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/celda/workflows/decontX.nf b/src/celda/workflows/decontX.nf index e8afdd49..17c74af7 100644 --- a/src/celda/workflows/decontX.nf +++ b/src/celda/workflows/decontX.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/src/cellranger-atac/main.nf b/src/cellranger-atac/main.nf index ea278503..52ae94f6 100644 --- a/src/cellranger-atac/main.nf +++ b/src/cellranger-atac/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // include groupParams from '../../utils/utils.nf' diff --git a/src/cellranger-atac/processes/count.nf b/src/cellranger-atac/processes/count.nf index bfb9c608..7d28ffd5 100644 --- a/src/cellranger-atac/processes/count.nf +++ b/src/cellranger-atac/processes/count.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 toolParams = params.sc.cellranger_atac diff --git a/src/cellranger-atac/processes/mkfastq.nf b/src/cellranger-atac/processes/mkfastq.nf index cddebb5c..3694039a 100644 --- a/src/cellranger-atac/processes/mkfastq.nf +++ b/src/cellranger-atac/processes/mkfastq.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 toolParams = params.sc.cellranger_atac diff --git a/src/cellranger-atac/workflows/cellRangerCountWithMetadata.nf b/src/cellranger-atac/workflows/cellRangerCountWithMetadata.nf index 44432a17..b0850e1d 100644 --- a/src/cellranger-atac/workflows/cellRangerCountWithMetadata.nf +++ b/src/cellranger-atac/workflows/cellRangerCountWithMetadata.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/cellranger/main.nf b/src/cellranger/main.nf index 2519f7df..6854e2ec 100644 --- a/src/cellranger/main.nf +++ b/src/cellranger/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Define the parameters for current testing proces diff --git a/src/cellranger/processes/count.nf b/src/cellranger/processes/count.nf index fda72ce1..845bb7fe 100644 --- a/src/cellranger/processes/count.nf +++ b/src/cellranger/processes/count.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { isParamNull; diff --git a/src/cellranger/processes/mkfastq.nf b/src/cellranger/processes/mkfastq.nf index ecbd9954..85e23da4 100644 --- a/src/cellranger/processes/mkfastq.nf +++ b/src/cellranger/processes/mkfastq.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 toolParams = params.sc.cellranger diff --git a/src/cellranger/processes/utils.nf b/src/cellranger/processes/utils.nf index a0e736f2..2dbe14c4 100644 --- a/src/cellranger/processes/utils.nf +++ b/src/cellranger/processes/utils.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 if(!params.containsKey("test")) { binDir = "${workflow.projectDir}/src/cellranger/bin/" diff --git a/src/cellranger/workflows/cellRangerCountWithLibraries.nf b/src/cellranger/workflows/cellRangerCountWithLibraries.nf index e9f7e7df..9835ed3a 100644 --- a/src/cellranger/workflows/cellRangerCountWithLibraries.nf +++ b/src/cellranger/workflows/cellRangerCountWithLibraries.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/cellranger/workflows/cellRangerCountWithMetadata.nf b/src/cellranger/workflows/cellRangerCountWithMetadata.nf index 39f4f7e8..3fd6fc31 100644 --- a/src/cellranger/workflows/cellRangerCountWithMetadata.nf +++ b/src/cellranger/workflows/cellRangerCountWithMetadata.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/cellranger/workflows/cellranger_libraries.nf b/src/cellranger/workflows/cellranger_libraries.nf index 1c06ad4e..b7c78c65 100644 --- a/src/cellranger/workflows/cellranger_libraries.nf +++ b/src/cellranger/workflows/cellranger_libraries.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/cellranger/workflows/mkfastq.nf b/src/cellranger/workflows/mkfastq.nf index 28c0ae2f..229e73d4 100644 --- a/src/cellranger/workflows/mkfastq.nf +++ b/src/cellranger/workflows/mkfastq.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Define the parameters for current testing proces diff --git a/src/channels/channels.nf b/src/channels/channels.nf index b8da023f..99b589d8 100644 --- a/src/channels/channels.nf +++ b/src/channels/channels.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { getOutsChannel as getTenXCellRangerOutsChannel; diff --git a/src/channels/file.nf b/src/channels/file.nf index f51f866c..e017b07c 100644 --- a/src/channels/file.nf +++ b/src/channels/file.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { extractSample diff --git a/src/channels/singleend.nf b/src/channels/singleend.nf index 721ad9c6..c2f717aa 100644 --- a/src/channels/singleend.nf +++ b/src/channels/singleend.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 def extractSample(path) { pattern = /(.+)\/(.+)_R[1-2](.*)\.fastq(\.gz)?/ diff --git a/src/channels/sra.nf b/src/channels/sra.nf index eaab85e4..7d7dd37d 100644 --- a/src/channels/sra.nf +++ b/src/channels/sra.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 workflow getChannel { diff --git a/src/channels/tenx.nf b/src/channels/tenx.nf index debc6077..5c5a6168 100644 --- a/src/channels/tenx.nf +++ b/src/channels/tenx.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 CELLRANGER_OUTS_REGEX = /(.+)\/(.+)\/outs/ CELLRANGER_H5_REGEX = /(.+)\/(.+)\/outs\/(.+)\.h5/ diff --git a/src/directs/main.test.nf b/src/directs/main.test.nf index b7f2a5ed..677b76a3 100644 --- a/src/directs/main.test.nf +++ b/src/directs/main.test.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { SC__DIRECTS__SELECT_DEFAULT_CLUSTERING diff --git a/src/directs/processes/selectDefaultClustering.nf b/src/directs/processes/selectDefaultClustering.nf index 2894f91a..ea600809 100644 --- a/src/directs/processes/selectDefaultClustering.nf +++ b/src/directs/processes/selectDefaultClustering.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/directs/bin/" : "" diff --git a/src/dropletutils/processes/barcode_selection.nf b/src/dropletutils/processes/barcode_selection.nf index 8ae5d147..c4f10d03 100644 --- a/src/dropletutils/processes/barcode_selection.nf +++ b/src/dropletutils/processes/barcode_selection.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROPLET_UTILS__BARCODE_SELECTION { diff --git a/src/dropseqtools/processes/bam_tag_histogram.nf b/src/dropseqtools/processes/bam_tag_histogram.nf index e08aabca..38e8489e 100644 --- a/src/dropseqtools/processes/bam_tag_histogram.nf +++ b/src/dropseqtools/processes/bam_tag_histogram.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__BAM_TAG_HISTOGRAM { diff --git a/src/dropseqtools/processes/convert_to_ref_flat.nf b/src/dropseqtools/processes/convert_to_ref_flat.nf index b0efe5f9..ee91ad93 100644 --- a/src/dropseqtools/processes/convert_to_ref_flat.nf +++ b/src/dropseqtools/processes/convert_to_ref_flat.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__CONVERT_TO_REFFLAT { diff --git a/src/dropseqtools/processes/detect_bead_synthesis_errors.nf b/src/dropseqtools/processes/detect_bead_synthesis_errors.nf index 1dc40f4b..22d875bc 100644 --- a/src/dropseqtools/processes/detect_bead_synthesis_errors.nf +++ b/src/dropseqtools/processes/detect_bead_synthesis_errors.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__DETECT_REPAIR_BARCODE_SYNTHESIS_ERRORS { diff --git a/src/dropseqtools/processes/digital_expression.nf b/src/dropseqtools/processes/digital_expression.nf index 60faaf3b..755fd548 100644 --- a/src/dropseqtools/processes/digital_expression.nf +++ b/src/dropseqtools/processes/digital_expression.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__DIGITAL_EXPRESSION { diff --git a/src/dropseqtools/processes/filter_bam.nf b/src/dropseqtools/processes/filter_bam.nf index b3a9bc07..d99b02e4 100644 --- a/src/dropseqtools/processes/filter_bam.nf +++ b/src/dropseqtools/processes/filter_bam.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__FILTER_UNALIGNED_TAGGED_BAM { diff --git a/src/dropseqtools/processes/gzip.nf b/src/dropseqtools/processes/gzip.nf index 66fd9b27..d17e149a 100644 --- a/src/dropseqtools/processes/gzip.nf +++ b/src/dropseqtools/processes/gzip.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process GZIP { diff --git a/src/dropseqtools/processes/polya_trimmer.nf b/src/dropseqtools/processes/polya_trimmer.nf index b2c0c1f5..dd6cdef7 100644 --- a/src/dropseqtools/processes/polya_trimmer.nf +++ b/src/dropseqtools/processes/polya_trimmer.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__TRIM_POLYA_UNALIGNED_TAGGED_TRIMMED_SMART { diff --git a/src/dropseqtools/processes/tag_read_with_gene_exon.nf b/src/dropseqtools/processes/tag_read_with_gene_exon.nf index f512cf0d..20a227bc 100644 --- a/src/dropseqtools/processes/tag_read_with_gene_exon.nf +++ b/src/dropseqtools/processes/tag_read_with_gene_exon.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__TAG_READ_WITH_GENE_EXON { diff --git a/src/dropseqtools/processes/trim_starting_sequence.nf b/src/dropseqtools/processes/trim_starting_sequence.nf index 72c25797..58436846 100644 --- a/src/dropseqtools/processes/trim_starting_sequence.nf +++ b/src/dropseqtools/processes/trim_starting_sequence.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__DROP_SEQ_TOOLS__TRIM_SMART_UNALIGNED_TAGGED_FILTERED_BAM { diff --git a/src/edirect/processes/sra_metadata.nf b/src/edirect/processes/sra_metadata.nf index 31f75407..49a8f6c8 100644 --- a/src/edirect/processes/sra_metadata.nf +++ b/src/edirect/processes/sra_metadata.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process EDIRECT__SRAID_TO_SAMPLENAME { diff --git a/src/edirect/workflows/sra_fastq_urls.nf b/src/edirect/workflows/sra_fastq_urls.nf index cf375e09..883ef89b 100644 --- a/src/edirect/workflows/sra_fastq_urls.nf +++ b/src/edirect/workflows/sra_fastq_urls.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { EDIRECT__SRAID_TO_SAMPLENAME diff --git a/src/fastp/processes/clean_and_fastqc.nf b/src/fastp/processes/clean_and_fastqc.nf index f1281f9c..3550b84c 100644 --- a/src/fastp/processes/clean_and_fastqc.nf +++ b/src/fastp/processes/clean_and_fastqc.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 /** * Preprocess + FastQC diff --git a/src/flybaser/processes/convertNomenclature.nf b/src/flybaser/processes/convertNomenclature.nf index 1b9c1ea1..7db1134c 100644 --- a/src/flybaser/processes/convertNomenclature.nf +++ b/src/flybaser/processes/convertNomenclature.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 if(!params.containsKey("test")) { binDir = "${workflow.projectDir}/src/flybaser/bin/" diff --git a/src/harmony/processes/runHarmony.nf b/src/harmony/processes/runHarmony.nf index b2b6c24e..d2884598 100644 --- a/src/harmony/processes/runHarmony.nf +++ b/src/harmony/processes/runHarmony.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/harmony/bin/" : "" diff --git a/src/harmony/workflows/bec_harmony.nf b/src/harmony/workflows/bec_harmony.nf index 5fc40244..44345a0f 100644 --- a/src/harmony/workflows/bec_harmony.nf +++ b/src/harmony/workflows/bec_harmony.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/harmony/workflows/harmony_only.nf b/src/harmony/workflows/harmony_only.nf index 5b858ab2..63a0d756 100644 --- a/src/harmony/workflows/harmony_only.nf +++ b/src/harmony/workflows/harmony_only.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/pcacv/processes/runPCACV.nf b/src/pcacv/processes/runPCACV.nf index f12a69d1..6a7dc94e 100644 --- a/src/pcacv/processes/runPCACV.nf +++ b/src/pcacv/processes/runPCACV.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/picard/processes/create_sequence_dictionary.nf b/src/picard/processes/create_sequence_dictionary.nf index 6ea1f02f..4b638ead 100644 --- a/src/picard/processes/create_sequence_dictionary.nf +++ b/src/picard/processes/create_sequence_dictionary.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process PICARD__CREATE_SEQUENCE_DICTIONARY { diff --git a/src/picard/processes/fastq_to_bam.nf b/src/picard/processes/fastq_to_bam.nf index 593b14e9..e97dcd99 100644 --- a/src/picard/processes/fastq_to_bam.nf +++ b/src/picard/processes/fastq_to_bam.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process PICARD__FASTQ_TO_BAM { diff --git a/src/picard/processes/merge_bam_alignment.nf b/src/picard/processes/merge_bam_alignment.nf index 7eb2ff4f..b19a9b27 100644 --- a/src/picard/processes/merge_bam_alignment.nf +++ b/src/picard/processes/merge_bam_alignment.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process PICARD__MERGE_BAM_ALIGNMENT { diff --git a/src/picard/processes/sam_to_fastq.nf b/src/picard/processes/sam_to_fastq.nf index 2defe85c..ee87f02d 100644 --- a/src/picard/processes/sam_to_fastq.nf +++ b/src/picard/processes/sam_to_fastq.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process PICARD__BAM_TO_FASTQ { diff --git a/src/picard/processes/sort_sam.nf b/src/picard/processes/sort_sam.nf index 0ef076ed..32acc056 100644 --- a/src/picard/processes/sort_sam.nf +++ b/src/picard/processes/sort_sam.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process PICARD__SORT_SAM { diff --git a/src/popscle/main.nf b/src/popscle/main.nf index c701aa10..3a814a9d 100644 --- a/src/popscle/main.nf +++ b/src/popscle/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Import sub-workflows from the modules: diff --git a/src/popscle/processes/demuxlet.nf b/src/popscle/processes/demuxlet.nf index 493ea8c3..07f79e09 100644 --- a/src/popscle/processes/demuxlet.nf +++ b/src/popscle/processes/demuxlet.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/popscle/bin/" : "" diff --git a/src/popscle/processes/dsc_pileup.nf b/src/popscle/processes/dsc_pileup.nf index 3df63672..c7c0bde7 100644 --- a/src/popscle/processes/dsc_pileup.nf +++ b/src/popscle/processes/dsc_pileup.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/popscle/bin/" : "" diff --git a/src/popscle/workflows/demuxlet.nf b/src/popscle/workflows/demuxlet.nf index 34f82feb..d6212d92 100644 --- a/src/popscle/workflows/demuxlet.nf +++ b/src/popscle/workflows/demuxlet.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Import sub-workflows from the modules: diff --git a/src/popscle/workflows/dsc_pileup.nf b/src/popscle/workflows/dsc_pileup.nf index 24ba349a..d360ac14 100644 --- a/src/popscle/workflows/dsc_pileup.nf +++ b/src/popscle/workflows/dsc_pileup.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Import sub-workflows from the modules: diff --git a/src/pycistopic/processes/macs2_call_peaks.nf b/src/pycistopic/processes/macs2_call_peaks.nf index 4930c9bc..ff43cef2 100644 --- a/src/pycistopic/processes/macs2_call_peaks.nf +++ b/src/pycistopic/processes/macs2_call_peaks.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/scanpy/main.nf b/src/scanpy/main.nf index 6d0b4cd7..e0e10ff9 100644 --- a/src/scanpy/main.nf +++ b/src/scanpy/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import static groovy.json.JsonOutput.* diff --git a/src/scanpy/main.test.nf b/src/scanpy/main.test.nf index 548b7e16..656f7d52 100644 --- a/src/scanpy/main.test.nf +++ b/src/scanpy/main.test.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 /////////////////////////////////////////// // Define the parameters for all processes diff --git a/src/scanpy/processes/batch_effect_correct.nf b/src/scanpy/processes/batch_effect_correct.nf index 3ff66836..a1822719 100644 --- a/src/scanpy/processes/batch_effect_correct.nf +++ b/src/scanpy/processes/batch_effect_correct.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scanpy/bin/" : "" diff --git a/src/scanpy/processes/cluster.nf b/src/scanpy/processes/cluster.nf index eafd19cc..24bce01b 100644 --- a/src/scanpy/processes/cluster.nf +++ b/src/scanpy/processes/cluster.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths import groovy.transform.TupleConstructor diff --git a/src/scanpy/processes/dim_reduction.nf b/src/scanpy/processes/dim_reduction.nf index 7ac489f8..6bee40eb 100644 --- a/src/scanpy/processes/dim_reduction.nf +++ b/src/scanpy/processes/dim_reduction.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths import groovy.transform.TupleConstructor diff --git a/src/scanpy/processes/feature_selection.nf b/src/scanpy/processes/feature_selection.nf index f3c074ea..8124bb0a 100644 --- a/src/scanpy/processes/feature_selection.nf +++ b/src/scanpy/processes/feature_selection.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/scanpy/processes/filter.nf b/src/scanpy/processes/filter.nf index ba0263e3..dffdf9b2 100644 --- a/src/scanpy/processes/filter.nf +++ b/src/scanpy/processes/filter.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/scanpy/processes/marker_genes.nf b/src/scanpy/processes/marker_genes.nf index 03a6f196..da900fa8 100644 --- a/src/scanpy/processes/marker_genes.nf +++ b/src/scanpy/processes/marker_genes.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/scanpy/processes/neighborhood_graph.nf b/src/scanpy/processes/neighborhood_graph.nf index c085acbe..3c36b3b4 100644 --- a/src/scanpy/processes/neighborhood_graph.nf +++ b/src/scanpy/processes/neighborhood_graph.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths import groovy.transform.TupleConstructor diff --git a/src/scanpy/processes/regress_out.nf b/src/scanpy/processes/regress_out.nf index 244298b7..93e6d3f9 100644 --- a/src/scanpy/processes/regress_out.nf +++ b/src/scanpy/processes/regress_out.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scanpy/bin/" : "" diff --git a/src/scanpy/processes/reports.nf b/src/scanpy/processes/reports.nf index 6fd62827..2a731722 100644 --- a/src/scanpy/processes/reports.nf +++ b/src/scanpy/processes/reports.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import static groovy.json.JsonOutput.* import org.yaml.snakeyaml.Yaml diff --git a/src/scanpy/processes/transform.nf b/src/scanpy/processes/transform.nf index 1d844cd1..7b1017e1 100644 --- a/src/scanpy/processes/transform.nf +++ b/src/scanpy/processes/transform.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/scanpy/workflows/bec_bbknn.nf b/src/scanpy/workflows/bec_bbknn.nf index 3fed0565..f37ab71e 100644 --- a/src/scanpy/workflows/bec_bbknn.nf +++ b/src/scanpy/workflows/bec_bbknn.nf @@ -5,7 +5,7 @@ * - batch effect correction using python package bbknn (Park et al. (2018), Fast Batch Alignment of Single Cell Transcriptomes Unifies Multiple Mouse Cell Atlases into an Integrated Landscape) */ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/src/scanpy/workflows/bec_mnncorrect.nf b/src/scanpy/workflows/bec_mnncorrect.nf index d4d38cee..5dd8161d 100644 --- a/src/scanpy/workflows/bec_mnncorrect.nf +++ b/src/scanpy/workflows/bec_mnncorrect.nf @@ -3,7 +3,7 @@ * - batch effect correction using python package mnnpy (fast and python version of mnnCorrect (Haghverdi et al, 2018) */ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/src/scanpy/workflows/cluster_identification.nf b/src/scanpy/workflows/cluster_identification.nf index c75d820b..62563549 100644 --- a/src/scanpy/workflows/cluster_identification.nf +++ b/src/scanpy/workflows/cluster_identification.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/src/scanpy/workflows/combine_reports.nf b/src/scanpy/workflows/combine_reports.nf index 155a94c5..45c2c54c 100644 --- a/src/scanpy/workflows/combine_reports.nf +++ b/src/scanpy/workflows/combine_reports.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { clean; diff --git a/src/scanpy/workflows/create_report.nf b/src/scanpy/workflows/create_report.nf index 1943d0cb..8ff0f391 100644 --- a/src/scanpy/workflows/create_report.nf +++ b/src/scanpy/workflows/create_report.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/scanpy/workflows/dim_reduction.nf b/src/scanpy/workflows/dim_reduction.nf index 7179e32e..e9d3a1a3 100644 --- a/src/scanpy/workflows/dim_reduction.nf +++ b/src/scanpy/workflows/dim_reduction.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/scanpy/workflows/dim_reduction_pca.nf b/src/scanpy/workflows/dim_reduction_pca.nf index b9793dbf..27a615dd 100644 --- a/src/scanpy/workflows/dim_reduction_pca.nf +++ b/src/scanpy/workflows/dim_reduction_pca.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/scanpy/workflows/hvg_selection.nf b/src/scanpy/workflows/hvg_selection.nf index 0d91d8cd..6381b91a 100644 --- a/src/scanpy/workflows/hvg_selection.nf +++ b/src/scanpy/workflows/hvg_selection.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/scanpy/workflows/neighborhood_graph.nf b/src/scanpy/workflows/neighborhood_graph.nf index b2c4735f..37a1299e 100644 --- a/src/scanpy/workflows/neighborhood_graph.nf +++ b/src/scanpy/workflows/neighborhood_graph.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/scanpy/workflows/normalize_transform.nf b/src/scanpy/workflows/normalize_transform.nf index e1e4ad7c..dc1e62a6 100644 --- a/src/scanpy/workflows/normalize_transform.nf +++ b/src/scanpy/workflows/normalize_transform.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/scanpy/workflows/qc_filter.nf b/src/scanpy/workflows/qc_filter.nf index 20e93495..b3c07494 100644 --- a/src/scanpy/workflows/qc_filter.nf +++ b/src/scanpy/workflows/qc_filter.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/src/scanpy/workflows/single_sample.nf b/src/scanpy/workflows/single_sample.nf index 59eefd53..2de848cb 100644 --- a/src/scanpy/workflows/single_sample.nf +++ b/src/scanpy/workflows/single_sample.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import static groovy.json.JsonOutput.* diff --git a/src/scenic/main.nf b/src/scenic/main.nf index 22763a8a..34424537 100644 --- a/src/scenic/main.nf +++ b/src/scenic/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { resolveParams; diff --git a/src/scenic/main.test.nf b/src/scenic/main.test.nf index 80915828..9ceef3d2 100644 --- a/src/scenic/main.test.nf +++ b/src/scenic/main.test.nf @@ -40,7 +40,7 @@ // nextflow -C conf/multi_runs.config,conf/test.config run main.test.nf --test SAVE_SCENIC_MULTI_RUNS_TO_LOOM -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 /////////////////////////////////////////// // Define the parameters for all processes diff --git a/src/scenic/processes/add_correlation.nf b/src/scenic/processes/add_correlation.nf index ae3b1abe..a857f712 100644 --- a/src/scenic/processes/add_correlation.nf +++ b/src/scenic/processes/add_correlation.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/arboreto_with_multiprocessing.nf b/src/scenic/processes/arboreto_with_multiprocessing.nf index 2f3fd059..747ba1f1 100644 --- a/src/scenic/processes/arboreto_with_multiprocessing.nf +++ b/src/scenic/processes/arboreto_with_multiprocessing.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/aucell.nf b/src/scenic/processes/aucell.nf index d0fcdd5d..f52c6cbf 100644 --- a/src/scenic/processes/aucell.nf +++ b/src/scenic/processes/aucell.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/cistarget.nf b/src/scenic/processes/cistarget.nf index db6cd33b..6621c4a0 100644 --- a/src/scenic/processes/cistarget.nf +++ b/src/scenic/processes/cistarget.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/loomHandler.nf b/src/scenic/processes/loomHandler.nf index 458790d8..cebf52c2 100644 --- a/src/scenic/processes/loomHandler.nf +++ b/src/scenic/processes/loomHandler.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/multiruns/aggregateFeatures.nf b/src/scenic/processes/multiruns/aggregateFeatures.nf index f574a556..a7da8ece 100644 --- a/src/scenic/processes/multiruns/aggregateFeatures.nf +++ b/src/scenic/processes/multiruns/aggregateFeatures.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/multiruns/aggregateRegulons.nf b/src/scenic/processes/multiruns/aggregateRegulons.nf index 557eec4a..da1546a2 100644 --- a/src/scenic/processes/multiruns/aggregateRegulons.nf +++ b/src/scenic/processes/multiruns/aggregateRegulons.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/multiruns/aucellFromFolder.nf b/src/scenic/processes/multiruns/aucellFromFolder.nf index fba8e0c9..2a3c88f9 100644 --- a/src/scenic/processes/multiruns/aucellFromFolder.nf +++ b/src/scenic/processes/multiruns/aucellFromFolder.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/multiruns/convertMotifsToRegulons.nf b/src/scenic/processes/multiruns/convertMotifsToRegulons.nf index fc2122a6..f735446f 100644 --- a/src/scenic/processes/multiruns/convertMotifsToRegulons.nf +++ b/src/scenic/processes/multiruns/convertMotifsToRegulons.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/multiruns/saveToLoom.nf b/src/scenic/processes/multiruns/saveToLoom.nf index b0e5da50..d2d8b6a7 100644 --- a/src/scenic/processes/multiruns/saveToLoom.nf +++ b/src/scenic/processes/multiruns/saveToLoom.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/processes/reports.nf b/src/scenic/processes/reports.nf index 2fcbb5ae..b8552dc0 100644 --- a/src/scenic/processes/reports.nf +++ b/src/scenic/processes/reports.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/scenic/bin/" : "" diff --git a/src/scenic/workflows/aggregateMultiRuns.nf b/src/scenic/workflows/aggregateMultiRuns.nf index f3ff0ab7..657b0950 100644 --- a/src/scenic/workflows/aggregateMultiRuns.nf +++ b/src/scenic/workflows/aggregateMultiRuns.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/scrublet/processes/doublet_detection.nf b/src/scrublet/processes/doublet_detection.nf index 5cd2f33a..e43c12a0 100644 --- a/src/scrublet/processes/doublet_detection.nf +++ b/src/scrublet/processes/doublet_detection.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths import groovy.transform.TupleConstructor diff --git a/src/scrublet/processes/reports.nf b/src/scrublet/processes/reports.nf index 180e1882..86c96b5d 100644 --- a/src/scrublet/processes/reports.nf +++ b/src/scrublet/processes/reports.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__SCRUBLET__DOUBLET_DETECTION_REPORT { diff --git a/src/scrublet/workflows/doublet_removal.nf b/src/scrublet/workflows/doublet_removal.nf index d76cca4a..eb50af71 100644 --- a/src/scrublet/workflows/doublet_removal.nf +++ b/src/scrublet/workflows/doublet_removal.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/src/singlecelltoolkit/processes/barcode_correction.nf b/src/singlecelltoolkit/processes/barcode_correction.nf index bd3122c6..4fe9d9ac 100644 --- a/src/singlecelltoolkit/processes/barcode_correction.nf +++ b/src/singlecelltoolkit/processes/barcode_correction.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/singlecelltoolkit/bin/" : "" diff --git a/src/singlecelltoolkit/processes/debarcode_10x_scatac_fastqs.nf b/src/singlecelltoolkit/processes/debarcode_10x_scatac_fastqs.nf index 3d6ad361..0567aea1 100644 --- a/src/singlecelltoolkit/processes/debarcode_10x_scatac_fastqs.nf +++ b/src/singlecelltoolkit/processes/debarcode_10x_scatac_fastqs.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/singlecelltoolkit/bin/" : "" diff --git a/src/sinto/main.nf b/src/sinto/main.nf index 715c7ab2..ccb2e4f7 100644 --- a/src/sinto/main.nf +++ b/src/sinto/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Import sub-workflows from the modules: diff --git a/src/sinto/processes/fragments.nf b/src/sinto/processes/fragments.nf index b9fdbff4..a23c6b80 100644 --- a/src/sinto/processes/fragments.nf +++ b/src/sinto/processes/fragments.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/soupx/main.nf b/src/soupx/main.nf index a615618d..d46da119 100644 --- a/src/soupx/main.nf +++ b/src/soupx/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // Should be set in case this pipeline is run with other pipelines (e.g.: single_sample) diff --git a/src/soupx/main.test.nf b/src/soupx/main.test.nf index 8d627f87..bf06320f 100644 --- a/src/soupx/main.test.nf +++ b/src/soupx/main.test.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { INIT; diff --git a/src/soupx/processes/process1.nf b/src/soupx/processes/process1.nf index b62be9dd..a572755f 100644 --- a/src/soupx/processes/process1.nf +++ b/src/soupx/processes/process1.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/soupx/processes/runSoupX.nf b/src/soupx/processes/runSoupX.nf index cb2c84f9..11553a2f 100644 --- a/src/soupx/processes/runSoupX.nf +++ b/src/soupx/processes/runSoupX.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/soupx/workflows/soupX.nf b/src/soupx/workflows/soupX.nf index 7d771825..2c50b617 100644 --- a/src/soupx/workflows/soupX.nf +++ b/src/soupx/workflows/soupX.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/src/sratoolkit/processes/downloadFastQ.nf b/src/sratoolkit/processes/downloadFastQ.nf index 297b69ca..4768e388 100644 --- a/src/sratoolkit/processes/downloadFastQ.nf +++ b/src/sratoolkit/processes/downloadFastQ.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 if(!params.containsKey("test")) { binDir = "${workflow.projectDir}/src/sratoolkit/bin/" diff --git a/src/star/main.nf b/src/star/main.nf index 93402f0a..eafd9381 100644 --- a/src/star/main.nf +++ b/src/star/main.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Define the parameters for current testing proces diff --git a/src/star/processes/build_genome.nf b/src/star/processes/build_genome.nf index e51b187e..273ee58b 100644 --- a/src/star/processes/build_genome.nf +++ b/src/star/processes/build_genome.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__STAR__BUILD_INDEX { diff --git a/src/star/processes/load_genome.nf b/src/star/processes/load_genome.nf index a25aeb67..adc794a7 100644 --- a/src/star/processes/load_genome.nf +++ b/src/star/processes/load_genome.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__STAR__LOAD_GENOME { diff --git a/src/star/processes/map_count.nf b/src/star/processes/map_count.nf index fd0e9505..1fac3f90 100644 --- a/src/star/processes/map_count.nf +++ b/src/star/processes/map_count.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__STAR__MAP_COUNT { diff --git a/src/star/processes/solo_map_count.nf b/src/star/processes/solo_map_count.nf index 0e26c225..b839329e 100644 --- a/src/star/processes/solo_map_count.nf +++ b/src/star/processes/solo_map_count.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__STAR__SOLO_MAP_COUNT { container params.sc.star.container diff --git a/src/star/processes/unload_genome.nf b/src/star/processes/unload_genome.nf index 932286f7..77d5a429 100644 --- a/src/star/processes/unload_genome.nf +++ b/src/star/processes/unload_genome.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process SC__STAR__UNLOAD_GENOME { diff --git a/src/trimgalore/processes/trim.nf b/src/trimgalore/processes/trim.nf index 3a02e8de..02e9a9f9 100644 --- a/src/trimgalore/processes/trim.nf +++ b/src/trimgalore/processes/trim.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // binDir = !params.containsKey("test") ? "${workflow.projectDir}/src/template/bin/" : "" diff --git a/src/utils/main.test.nf b/src/utils/main.test.nf index c99b294b..92159c70 100644 --- a/src/utils/main.test.nf +++ b/src/utils/main.test.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { INIT; diff --git a/src/utils/processes/gtf.nf b/src/utils/processes/gtf.nf index 11c111e5..0e568550 100644 --- a/src/utils/processes/gtf.nf +++ b/src/utils/processes/gtf.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 process FORMAT_GTF { diff --git a/src/utils/processes/h5adAnnotate.nf b/src/utils/processes/h5adAnnotate.nf index df6c9f44..bdab3c1b 100644 --- a/src/utils/processes/h5adAnnotate.nf +++ b/src/utils/processes/h5adAnnotate.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/utils/processes/h5adExtractMetadata.nf b/src/utils/processes/h5adExtractMetadata.nf index 23f9a0e6..f25d3430 100644 --- a/src/utils/processes/h5adExtractMetadata.nf +++ b/src/utils/processes/h5adExtractMetadata.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/utils/processes/h5adMerge.nf b/src/utils/processes/h5adMerge.nf index 25dbafba..43acb06b 100644 --- a/src/utils/processes/h5adMerge.nf +++ b/src/utils/processes/h5adMerge.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/utils/processes/h5adSubset.nf b/src/utils/processes/h5adSubset.nf index 61290f74..30f41f60 100644 --- a/src/utils/processes/h5adSubset.nf +++ b/src/utils/processes/h5adSubset.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/utils/processes/h5adToLoom.nf b/src/utils/processes/h5adToLoom.nf index 8c4c90b7..d1d6cf5c 100644 --- a/src/utils/processes/h5adToLoom.nf +++ b/src/utils/processes/h5adToLoom.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/utils/processes/h5adUpdate.nf b/src/utils/processes/h5adUpdate.nf index 2b308426..5cb7506a 100644 --- a/src/utils/processes/h5adUpdate.nf +++ b/src/utils/processes/h5adUpdate.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths import static groovy.json.JsonOutput.* diff --git a/src/utils/processes/h5adUpdateMetadata.nf b/src/utils/processes/h5adUpdateMetadata.nf index 18e98882..fb829160 100644 --- a/src/utils/processes/h5adUpdateMetadata.nf +++ b/src/utils/processes/h5adUpdateMetadata.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths diff --git a/src/utils/processes/reports.nf b/src/utils/processes/reports.nf index d23970bf..3779f98c 100644 --- a/src/utils/processes/reports.nf +++ b/src/utils/processes/reports.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import static groovy.json.JsonOutput.* diff --git a/src/utils/processes/sra.nf b/src/utils/processes/sra.nf index 7d3f682c..2fc2afc5 100644 --- a/src/utils/processes/sra.nf +++ b/src/utils/processes/sra.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Files import java.nio.file.Paths diff --git a/src/utils/processes/utils.nf b/src/utils/processes/utils.nf index 8388e204..b3647ed9 100644 --- a/src/utils/processes/utils.nf +++ b/src/utils/processes/utils.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Paths import nextflow.config.ConfigParser diff --git a/src/utils/workflows/annotateByCellMetadata.nf b/src/utils/workflows/annotateByCellMetadata.nf index 887a662c..3fceb5e7 100644 --- a/src/utils/workflows/annotateByCellMetadata.nf +++ b/src/utils/workflows/annotateByCellMetadata.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Process imports: diff --git a/src/utils/workflows/downloadFromSRA.nf b/src/utils/workflows/downloadFromSRA.nf index 178a1e72..e94ea27e 100644 --- a/src/utils/workflows/downloadFromSRA.nf +++ b/src/utils/workflows/downloadFromSRA.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import java.nio.file.Files import java.nio.file.Paths diff --git a/src/utils/workflows/fileConverter.nf b/src/utils/workflows/fileConverter.nf index 602a7ff7..7fd55fd2 100644 --- a/src/utils/workflows/fileConverter.nf +++ b/src/utils/workflows/fileConverter.nf @@ -1,6 +1,6 @@ import nextflow.util.ArrayBag -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/utils/workflows/filterAnnotateClean.nf b/src/utils/workflows/filterAnnotateClean.nf index 74a57623..aca918cf 100644 --- a/src/utils/workflows/filterAnnotateClean.nf +++ b/src/utils/workflows/filterAnnotateClean.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Process imports: diff --git a/src/utils/workflows/filterByCellMetadata.nf b/src/utils/workflows/filterByCellMetadata.nf index 158ce902..4e6b26e6 100644 --- a/src/utils/workflows/filterByCellMetadata.nf +++ b/src/utils/workflows/filterByCellMetadata.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Process imports: diff --git a/src/utils/workflows/finalize.nf b/src/utils/workflows/finalize.nf index 041964ad..a30a84d8 100644 --- a/src/utils/workflows/finalize.nf +++ b/src/utils/workflows/finalize.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 include { SC__H5AD_TO_FILTERED_LOOM diff --git a/src/utils/workflows/updateFeatureNomenclature.nf b/src/utils/workflows/updateFeatureNomenclature.nf index 62f11986..3e9c4df4 100644 --- a/src/utils/workflows/updateFeatureNomenclature.nf +++ b/src/utils/workflows/updateFeatureNomenclature.nf @@ -4,7 +4,7 @@ * */ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/src/utils/workflows/utils.nf b/src/utils/workflows/utils.nf index 790ec73f..392afdf9 100644 --- a/src/utils/workflows/utils.nf +++ b/src/utils/workflows/utils.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 import static groovy.json.JsonOutput.* diff --git a/workflows/atac/preprocess.nf b/workflows/atac/preprocess.nf index f5e958f9..6d33d919 100644 --- a/workflows/atac/preprocess.nf +++ b/workflows/atac/preprocess.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/workflows/atac/qc_filtering.nf b/workflows/atac/qc_filtering.nf index 397581fd..5f2358f7 100644 --- a/workflows/atac/qc_filtering.nf +++ b/workflows/atac/qc_filtering.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // process imports: diff --git a/workflows/bbknn.nf b/workflows/bbknn.nf index 1cecb647..816667f8 100644 --- a/workflows/bbknn.nf +++ b/workflows/bbknn.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/workflows/harmony.nf b/workflows/harmony.nf index f8a383f0..5743c008 100644 --- a/workflows/harmony.nf +++ b/workflows/harmony.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/workflows/mnncorrect.nf b/workflows/mnncorrect.nf index 06e003f8..584621be 100644 --- a/workflows/mnncorrect.nf +++ b/workflows/mnncorrect.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/workflows/multi_sample.nf b/workflows/multi_sample.nf index a8ee593f..ff7f5368 100644 --- a/workflows/multi_sample.nf +++ b/workflows/multi_sample.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/workflows/nemesh.nf b/workflows/nemesh.nf index a4d5073c..d489e46c 100644 --- a/workflows/nemesh.nf +++ b/workflows/nemesh.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Define the parameters for current testing proces diff --git a/workflows/single_sample.nf b/workflows/single_sample.nf index ce04497a..1100c25e 100644 --- a/workflows/single_sample.nf +++ b/workflows/single_sample.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 // Utils include { diff --git a/workflows/single_sample_star.nf b/workflows/single_sample_star.nf index 9f95175a..893aba35 100644 --- a/workflows/single_sample_star.nf +++ b/workflows/single_sample_star.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 //////////////////////////////////////////////////////// // Import sub-workflows/processes from the utils module: diff --git a/workflows/star.nf b/workflows/star.nf index 8e07cb68..e4065e5f 100644 --- a/workflows/star.nf +++ b/workflows/star.nf @@ -1,4 +1,4 @@ -nextflow.preview.dsl=2 +nextflow.enable.dsl=2 ////////////////////////////////////////////////////// // Define the parameters for current testing proces From 0aec6f4f7aa672f872263bc4c1bd0bcdaa3937cc Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Sat, 16 Jan 2021 00:37:12 +0100 Subject: [PATCH 16/26] Update CI tests to use NF 20.10.0 --- .github/workflows/bbknn.yml | 2 +- .github/workflows/bbknn_scenic.yml | 2 +- .github/workflows/decontx.yml | 2 +- .github/workflows/harmony.yml | 2 +- .github/workflows/mnncorrect.yml | 2 +- .github/workflows/scenic.yml | 2 +- .github/workflows/scenic_multiruns.yml | 2 +- .github/workflows/single_sample.yml | 2 +- .github/workflows/single_sample_decontx.yml | 2 +- .github/workflows/single_sample_decontx_scrublet.yml | 2 +- .github/workflows/single_sample_scenic.yml | 2 +- .github/workflows/single_sample_scenic_multiruns.yml | 2 +- .github/workflows/single_sample_scrublet.yml | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/bbknn.yml b/.github/workflows/bbknn.yml index 1b14ee55..d43e1140 100644 --- a/.github/workflows/bbknn.yml +++ b/.github/workflows/bbknn.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/bbknn_scenic.yml b/.github/workflows/bbknn_scenic.yml index b7c98cdd..8aa51b8d 100644 --- a/.github/workflows/bbknn_scenic.yml +++ b/.github/workflows/bbknn_scenic.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/decontx.yml b/.github/workflows/decontx.yml index bf22fa5c..2a5205ed 100644 --- a/.github/workflows/decontx.yml +++ b/.github/workflows/decontx.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/harmony.yml b/.github/workflows/harmony.yml index 313c3a65..c74dfdf4 100644 --- a/.github/workflows/harmony.yml +++ b/.github/workflows/harmony.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/mnncorrect.yml b/.github/workflows/mnncorrect.yml index 8d2c804e..11bc0be0 100644 --- a/.github/workflows/mnncorrect.yml +++ b/.github/workflows/mnncorrect.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/scenic.yml b/.github/workflows/scenic.yml index 52ab0f1f..10ff02b0 100644 --- a/.github/workflows/scenic.yml +++ b/.github/workflows/scenic.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Run scenic test diff --git a/.github/workflows/scenic_multiruns.yml b/.github/workflows/scenic_multiruns.yml index 3e3c7077..69e5c367 100644 --- a/.github/workflows/scenic_multiruns.yml +++ b/.github/workflows/scenic_multiruns.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Run scenic test diff --git a/.github/workflows/single_sample.yml b/.github/workflows/single_sample.yml index 0cc537be..68d2d945 100644 --- a/.github/workflows/single_sample.yml +++ b/.github/workflows/single_sample.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/single_sample_decontx.yml b/.github/workflows/single_sample_decontx.yml index 59916597..bf384455 100644 --- a/.github/workflows/single_sample_decontx.yml +++ b/.github/workflows/single_sample_decontx.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/single_sample_decontx_scrublet.yml b/.github/workflows/single_sample_decontx_scrublet.yml index f056f068..8140b41e 100644 --- a/.github/workflows/single_sample_decontx_scrublet.yml +++ b/.github/workflows/single_sample_decontx_scrublet.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/single_sample_scenic.yml b/.github/workflows/single_sample_scenic.yml index 1c03b4a6..30f24122 100644 --- a/.github/workflows/single_sample_scenic.yml +++ b/.github/workflows/single_sample_scenic.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/single_sample_scenic_multiruns.yml b/.github/workflows/single_sample_scenic_multiruns.yml index 2dbd3191..ae0b3785 100644 --- a/.github/workflows/single_sample_scenic_multiruns.yml +++ b/.github/workflows/single_sample_scenic_multiruns.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data diff --git a/.github/workflows/single_sample_scrublet.yml b/.github/workflows/single_sample_scrublet.yml index ff7043e0..7d28784b 100644 --- a/.github/workflows/single_sample_scrublet.yml +++ b/.github/workflows/single_sample_scrublet.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data From b4d9cc95893adfb0bc7a239c8262066cd52f0f09 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 19 Jan 2021 21:04:15 +0100 Subject: [PATCH 17/26] Update parameter exp. CI to use nextflow 20.10.0 --- .github/workflows/single_sample_param_exploration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/single_sample_param_exploration.yml b/.github/workflows/single_sample_param_exploration.yml index c28ff6b5..cdb86202 100644 --- a/.github/workflows/single_sample_param_exploration.yml +++ b/.github/workflows/single_sample_param_exploration.yml @@ -19,7 +19,7 @@ jobs: submodules: true - name: Install Nextflow run: | - export NXF_VER='20.04.1' + export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - name: Get sample data From 8db06b5f4bddd6edc575a75398298a2276fb8b23 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Wed, 20 Jan 2021 11:24:47 +0100 Subject: [PATCH 18/26] Allow newer nextflow versions in manifest --- nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nextflow.config b/nextflow.config index 878cf990..235c541f 100644 --- a/nextflow.config +++ b/nextflow.config @@ -6,7 +6,7 @@ manifest { version = '0.24.0' mainScript = 'main.nf' defaultBranch = 'master' - nextflowVersion = '!20.10.0' // with ! prefix, stop execution if current version does not match required version. + nextflowVersion = '!>=20.10.0' } // load these configs first: From 73c2824eb6f6a1fc6e82f2ba76040cc8fb5553d5 Mon Sep 17 00:00:00 2001 From: KrisDavie Date: Wed, 20 Jan 2021 14:39:52 +0100 Subject: [PATCH 19/26] Update pysradb --- src/utils/Dockerfile | 8 ++++---- src/utils/conf/base.config | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/utils/Dockerfile b/src/utils/Dockerfile index 1e5c0cd1..e5df8987 100644 --- a/src/utils/Dockerfile +++ b/src/utils/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6.8-slim-stretch AS compile-image +FROM python:3.7.9-slim-stretch AS compile-image RUN apt-get update && \ apt-get install -y --no-install-recommends build-essential gcc apt-utils cmake openssh-client git && \ @@ -12,12 +12,12 @@ ENV PATH="/opt/venv/bin:$PATH" RUN python3 -m pip install ipykernel && \ pip install --no-cache-dir papermill && \ - pip install --no-cache-dir pysradb==0.10.2 && \ + pip install --no-cache-dir pysradb==1.0.0 && \ pip install --no-cache-dir nbconvert==5.6.0 && \ pip install --no-cache-dir nbmerge==0.0.4 && \ - pip install --no-cache-dir nbformat==4.4.0 + pip install --no-cache-dir nbformat==5.0.8 -FROM python:3.6.8-slim-stretch AS build-image +FROM python:3.7.9-slim-stretch AS build-image RUN apt-get -y update && \ # Need to run ps apt-get -y install procps && \ diff --git a/src/utils/conf/base.config b/src/utils/conf/base.config index 69d2d1f5..34559dc1 100644 --- a/src/utils/conf/base.config +++ b/src/utils/conf/base.config @@ -1,6 +1,6 @@ params { utils { - container = 'vibsinglecellnf/utils:0.3.0' + container = 'vibsinglecellnf/utils:0.4.0' publish { // pipelineOutputSuffix = '' compressionLevel = 6 From 795d2c261961ad5e041b9122a3d058dafc63e817 Mon Sep 17 00:00:00 2001 From: dweemx Date: Wed, 20 Jan 2021 17:05:28 +0100 Subject: [PATCH 20/26] Fix #288 for python converter Add new remove10xGEMWell to be backward compatible --- src/utils/bin/sc_file_converter.py | 36 ++++++++++++++++++++++-------- src/utils/conf/base.config | 1 + src/utils/processes/utils.nf | 2 ++ 3 files changed, 30 insertions(+), 9 deletions(-) diff --git a/src/utils/bin/sc_file_converter.py b/src/utils/bin/sc_file_converter.py index e2a30977..5cb44290 100755 --- a/src/utils/bin/sc_file_converter.py +++ b/src/utils/bin/sc_file_converter.py @@ -7,6 +7,7 @@ import numpy as np from scipy.sparse import csr_matrix + def str2bool(v): if isinstance(v, bool): return v @@ -68,6 +69,15 @@ def str2bool(v): help="Tag each cell with the given sample_id." ) +parser.add_argument( + "-r", "--remove-10x-gem-well", + type=str2bool, + action="store", + dest="remove_10x_gem_well", + default=False, + help="If tag_cell_with_sample_id is passed, remove the GEM well number from the barcode." +) + parser.add_argument( "-u", "--make-var-index-unique", type=str2bool, @@ -120,14 +130,17 @@ def add_sample_id(adata, args): return adata -def tag_cell(adata, tag): +def tag_cell(adata, tag, remove_10x_gem_well=False): # Check the number of untagged cells # We consider an untagged cell as matching the following pattern: [barcode-id]-[sample-index] where # - [barcode-id] is sequence of A,C,G,T letters # - [sample-index] is a natural number - num_untagged_cells = sum(list(map(lambda x: len(re.findall(r"[ACGT]*-[0-9]+$", x)), adata.obs.index))) - if num_untagged_cells != 0: - adata.obs.index = list(map(lambda x: re.sub(r"([ACGT]*)-.*", rf'\1-{tag}', x), adata.obs.index)) + if remove_10x_gem_well: + num_untagged_cells = sum(list(map(lambda x: len(re.findall(r"[ACGT]*-[0-9]+$", x)), adata.obs.index))) + if num_untagged_cells != 0: + adata.obs.index = list(map(lambda x: re.sub(r"([ACGT]*)-.*", rf'\1-{tag}', x), adata.obs.index)) + else: + adata.obs.index = [cell_barcode + "___" + tag for cell_barcode in adata.obs.index] return adata @@ -148,7 +161,8 @@ def tag_cell(adata, tag): if args.tag_cell_with_sample_id: adata = tag_cell( adata=adata, - tag=args.sample_id + tag=args.sample_id, + remove_10x_gem_well=args.remove_10x_gem_well ) adata.var.index = adata.var.index.astype(str) # Check if var index is unique @@ -178,7 +192,8 @@ def tag_cell(adata, tag): if args.tag_cell_with_sample_id: adata = tag_cell( adata=adata, - tag=args.sample_id + tag=args.sample_id, + remove_10x_gem_well=args.remove_10x_gem_well ) adata.var.index = adata.var.index.astype(str) # Check if var index is unique @@ -213,7 +228,8 @@ def tag_cell(adata, tag): if args.tag_cell_with_sample_id: adata = tag_cell( adata=adata, - tag=args.sample_id + tag=args.sample_id, + remove_10x_gem_well=args.remove_10x_gem_well ) adata.var.index = adata.var.index.astype(str) # Check if var index is unique @@ -238,7 +254,8 @@ def tag_cell(adata, tag): if args.tag_cell_with_sample_id: adata = tag_cell( adata=adata, - tag=args.sample_id + tag=args.sample_id, + remove_10x_gem_well=args.remove_10x_gem_well ) adata.var.index = adata.var.index.astype(str) # Check if var index is unique @@ -264,7 +281,8 @@ def tag_cell(adata, tag): if args.tag_cell_with_sample_id: adata = tag_cell( adata=adata, - tag=args.sample_id + tag=args.sample_id, + remove_10x_gem_well=args.remove_10x_gem_well ) adata.var.index = adata.var.index.astype(str) # Check if var index is unique diff --git a/src/utils/conf/base.config b/src/utils/conf/base.config index 34559dc1..7f872648 100644 --- a/src/utils/conf/base.config +++ b/src/utils/conf/base.config @@ -12,6 +12,7 @@ params { file_converter { off = 'h5ad' tagCellWithSampleId = true + remove10xGEMWell = false useFilteredMatrix = true makeVarIndexUnique = false } diff --git a/src/utils/processes/utils.nf b/src/utils/processes/utils.nf index 8e3e357a..e730bfc1 100644 --- a/src/utils/processes/utils.nf +++ b/src/utils/processes/utils.nf @@ -102,6 +102,7 @@ def runPythonConverter = { --sample-id "${sampleId}" \ ${(processParams.containsKey('makeVarIndexUnique')) ? '--make-var-index-unique '+ processParams.makeVarIndexUnique : ''} \ ${(processParams.containsKey('tagCellWithSampleId')) ? '--tag-cell-with-sample-id '+ processParams.tagCellWithSampleId : ''} \ + ${(processParams.containsKey('remove10xGEMWell')) ? '--remove-10x-gem-well '+ processParams.remove10xGEMWell : ''} \ --input-format $inputDataType \ --output-format $outputDataType \ ${f} \ @@ -125,6 +126,7 @@ def runRConverter = { ${binDir}/sc_file_converter.R \ --sample-id "${sampleId}" \ ${(processParams.containsKey('tagCellWithSampleId')) ? '--tag-cell-with-sample-id '+ processParams.tagCellWithSampleId : ''} \ + ${(processParams.containsKey('remove10xGEMWell')) ? '--remove-10x-gem-well '+ processParams.remove10xGEMWell : ''} \ ${(processParams.containsKey('seuratAssay')) ? '--seurat-assay '+ processParams.seuratAssay : ''} \ ${(processParams.containsKey('seuratMainLayer')) ? '--seurat-main-assay '+ processParams.seuratMainLayer : ''} \ ${sceMainLayer != null ? '--sce-main-layer '+ sceMainLayer : ''} \ From 659cf51d2f32cd125216dac54da16a05e395a565 Mon Sep 17 00:00:00 2001 From: dweemx Date: Wed, 20 Jan 2021 17:16:07 +0100 Subject: [PATCH 21/26] Fix #288 for R converter --- src/utils/bin/sc_file_converter.R | 54 +++++++++++++++++++++---------- 1 file changed, 37 insertions(+), 17 deletions(-) diff --git a/src/utils/bin/sc_file_converter.R b/src/utils/bin/sc_file_converter.R index 55848721..7d7593d8 100755 --- a/src/utils/bin/sc_file_converter.R +++ b/src/utils/bin/sc_file_converter.R @@ -54,6 +54,14 @@ parser$add_argument( default = TRUE, help = "Sample ID of the given input file." ) +parser$add_argument( + '--remove-10x-gem-well', + type="character", + dest='remove_10x_gem_well', + action = "store", + default = FALSE, + help = "If tag_cell_with_sample_id is passed, remove the GEM well number from the barcode." +) parser$add_argument( '--seurat-assay', type="character", @@ -120,11 +128,15 @@ if(INPUT_FORMAT == 'seurat_rds' & OUTPUT_FORMAT == 'h5ad') { } # Tag cell with sample ID if(isTrue(x = args$`tag_cell_with_sample_id`)) { - new.names <- gsub( - pattern = "-([0-9]+)$", - replace = paste0("-", args$`sample_id`), - x = colnames(x = seurat) - ) + if(isTrue(x = args$`remove_10x_gem_well`)) { + new.names <- gsub( + pattern = "-([0-9]+)$", + replace = paste0("-", args$`sample_id`), + x = colnames(x = seurat) + ) + } else { + new.names <- paste0(colnames(x = seurat), "___", args$`sample_id`) + } seurat <- Seurat::RenameCells( object = seurat, new.names = new.names @@ -195,12 +207,16 @@ if(INPUT_FORMAT == 'seurat_rds' & OUTPUT_FORMAT == 'h5ad') { } # Tag cell with sample ID if(isTrue(x = args$`tag_cell_with_sample_id`)) { - new.names <- gsub( - pattern = "-([0-9]+)$", - replace = paste0("-", args$`sample_id`), - x = colnames(x = sce) - ) - colnames(x = sce) <- new.names + if(isTrue(x = args$`remove_10x_gem_well`)) { + new.names <- gsub( + pattern = "-([0-9]+)$", + replace = paste0("-", args$`sample_id`), + x = colnames(x = sce) + ) + } else { + new.names <- paste0(colnames(x = sce), "___", args$`sample_id`) + } + colnames(x = sce) <- new.names } # Add sample ID as colData entry col_data <- SummarizedExperiment::colData(x = sce) @@ -229,12 +245,16 @@ if(INPUT_FORMAT == 'seurat_rds' & OUTPUT_FORMAT == 'h5ad') { colnames(x = sce) <- SummarizedExperiment::colData(x = sce)$Barcode # Tag cell with sample ID if(isTrue(x = args$`tag_cell_with_sample_id`)) { - new.names <- gsub( - pattern = "-([0-9]+)$", - replace = paste0("-", args$`sample_id`), - x = colnames(x = sce) - ) - colnames(x = sce) <- new.names + if(isTrue(x = args$`remove_10x_gem_well`)) { + new.names <- gsub( + pattern = "-([0-9]+)$", + replace = paste0("-", args$`sample_id`), + x = colnames(x = sce) + ) + } else { + new.names <- paste0(colnames(x = sce), "___", args$`sample_id`) + } + colnames(x = sce) <- new.names } # Add sample ID as colData entry col_data <- SummarizedExperiment::colData(x = sce) From dad009b7f7a9a9f2ecb36876e297c385548d7f62 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 26 Jan 2021 10:07:17 +0100 Subject: [PATCH 22/26] Allow for missing publish mode in params --- src/utils/processes/utils.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/utils/processes/utils.nf b/src/utils/processes/utils.nf index e730bfc1..d59162ed 100644 --- a/src/utils/processes/utils.nf +++ b/src/utils/processes/utils.nf @@ -418,7 +418,7 @@ process SC__PUBLISH { publishDir \ "${getPublishDir(params.global.outdir,toolName)}", \ - mode: "${params.utils.publish.mode}", \ + mode: "${params.utils.publish?.mode ? params.utils.publish.mode: 'link'}", \ saveAs: { filename -> "${outputFileName}" } label 'compute_resources__minimal' From 491ad8b90a86eaee29f09ed00c03119f5c9fe277 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 26 Jan 2021 10:08:34 +0100 Subject: [PATCH 23/26] Update version to 0.25.0 --- nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nextflow.config b/nextflow.config index 5287022f..035199b1 100644 --- a/nextflow.config +++ b/nextflow.config @@ -3,7 +3,7 @@ manifest { name = 'vib-singlecell-nf/vsn-pipelines' description = 'A repository of pipelines for single-cell data in Nextflow DSL2' homePage = 'https://github.com/vib-singlecell-nf/vsn-pipelines' - version = '0.24.0' + version = '0.25.0' mainScript = 'main.nf' defaultBranch = 'master' nextflowVersion = '!>=20.10.0' From 201c4ff8d546ba9cd43c79a7ca15beef62c66550 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 26 Jan 2021 11:51:39 +0100 Subject: [PATCH 24/26] Enable hard links in CI tests --- .github/workflows/bbknn.yml | 1 + .github/workflows/bbknn_scenic.yml | 1 + .github/workflows/decontx.yml | 1 + .github/workflows/harmony.yml | 1 + .github/workflows/mnncorrect.yml | 1 + .github/workflows/scenic.yml | 1 + .github/workflows/scenic_multiruns.yml | 1 + .github/workflows/single_sample.yml | 1 + .github/workflows/single_sample_decontx.yml | 1 + .github/workflows/single_sample_decontx_scrublet.yml | 1 + .github/workflows/single_sample_param_exploration.yml | 1 + .github/workflows/single_sample_scenic.yml | 1 + .github/workflows/single_sample_scenic_multiruns.yml | 1 + .github/workflows/single_sample_scrublet.yml | 1 + 14 files changed, 14 insertions(+) diff --git a/.github/workflows/bbknn.yml b/.github/workflows/bbknn.yml index d43e1140..d0feb42f 100644 --- a/.github/workflows/bbknn.yml +++ b/.github/workflows/bbknn.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | mkdir testdata diff --git a/.github/workflows/bbknn_scenic.yml b/.github/workflows/bbknn_scenic.yml index 8aa51b8d..dc2fb4af 100644 --- a/.github/workflows/bbknn_scenic.yml +++ b/.github/workflows/bbknn_scenic.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | mkdir testdata diff --git a/.github/workflows/decontx.yml b/.github/workflows/decontx.yml index 2a5205ed..cb6bbe2b 100644 --- a/.github/workflows/decontx.yml +++ b/.github/workflows/decontx.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data.tar.gz diff --git a/.github/workflows/harmony.yml b/.github/workflows/harmony.yml index c74dfdf4..d1afe9b5 100644 --- a/.github/workflows/harmony.yml +++ b/.github/workflows/harmony.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | mkdir testdata diff --git a/.github/workflows/mnncorrect.yml b/.github/workflows/mnncorrect.yml index 11bc0be0..5034b664 100644 --- a/.github/workflows/mnncorrect.yml +++ b/.github/workflows/mnncorrect.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | mkdir testdata diff --git a/.github/workflows/scenic.yml b/.github/workflows/scenic.yml index 10ff02b0..afa199c0 100644 --- a/.github/workflows/scenic.yml +++ b/.github/workflows/scenic.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Run scenic test run: | nextflow run ${GITHUB_WORKSPACE} -profile scenic,test__scenic,test__compute_resources,docker -entry scenic -ansi-log false diff --git a/.github/workflows/scenic_multiruns.yml b/.github/workflows/scenic_multiruns.yml index 69e5c367..02de22c4 100644 --- a/.github/workflows/scenic_multiruns.yml +++ b/.github/workflows/scenic_multiruns.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Run scenic test run: | nextflow run ${GITHUB_WORKSPACE} -profile scenic_multiruns,test__scenic_multiruns,test__compute_resources,docker -entry scenic -ansi-log false diff --git a/.github/workflows/single_sample.yml b/.github/workflows/single_sample.yml index 68d2d945..19a682f6 100644 --- a/.github/workflows/single_sample.yml +++ b/.github/workflows/single_sample.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data_tiny.tar.gz diff --git a/.github/workflows/single_sample_decontx.yml b/.github/workflows/single_sample_decontx.yml index bf384455..e4980925 100644 --- a/.github/workflows/single_sample_decontx.yml +++ b/.github/workflows/single_sample_decontx.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data.tar.gz diff --git a/.github/workflows/single_sample_decontx_scrublet.yml b/.github/workflows/single_sample_decontx_scrublet.yml index 8140b41e..21e1f8b8 100644 --- a/.github/workflows/single_sample_decontx_scrublet.yml +++ b/.github/workflows/single_sample_decontx_scrublet.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data.tar.gz diff --git a/.github/workflows/single_sample_param_exploration.yml b/.github/workflows/single_sample_param_exploration.yml index cdb86202..4c5e79c4 100644 --- a/.github/workflows/single_sample_param_exploration.yml +++ b/.github/workflows/single_sample_param_exploration.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data_tiny.tar.gz diff --git a/.github/workflows/single_sample_scenic.yml b/.github/workflows/single_sample_scenic.yml index 30f24122..07a196bc 100644 --- a/.github/workflows/single_sample_scenic.yml +++ b/.github/workflows/single_sample_scenic.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data_small.tar.gz diff --git a/.github/workflows/single_sample_scenic_multiruns.yml b/.github/workflows/single_sample_scenic_multiruns.yml index ae0b3785..8f773d66 100644 --- a/.github/workflows/single_sample_scenic_multiruns.yml +++ b/.github/workflows/single_sample_scenic_multiruns.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data_small.tar.gz diff --git a/.github/workflows/single_sample_scrublet.yml b/.github/workflows/single_sample_scrublet.yml index 7d28784b..623f7598 100644 --- a/.github/workflows/single_sample_scrublet.yml +++ b/.github/workflows/single_sample_scrublet.yml @@ -22,6 +22,7 @@ jobs: export NXF_VER='20.10.0' wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + echo 0 | sudo tee /proc/sys/fs/protected_hardlinks - name: Get sample data run: | wget https://raw.githubusercontent.com/aertslab/SCENICprotocol/master/example/sample_data.tar.gz From 941e853aeddb0c59446e1e2c5986457a8729d136 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 26 Jan 2021 12:05:02 +0100 Subject: [PATCH 25/26] Run CI tests on pushes and PRs to develop --- .github/workflows/bbknn.yml | 2 ++ .github/workflows/bbknn_scenic.yml | 2 ++ .github/workflows/decontx.yml | 2 ++ .github/workflows/harmony.yml | 2 ++ .github/workflows/mnncorrect.yml | 2 ++ .github/workflows/scenic.yml | 2 ++ .github/workflows/scenic_multiruns.yml | 2 ++ .github/workflows/single_sample.yml | 2 ++ .github/workflows/single_sample_decontx.yml | 2 ++ .github/workflows/single_sample_decontx_scrublet.yml | 2 ++ .github/workflows/single_sample_param_exploration.yml | 2 ++ .github/workflows/single_sample_scenic.yml | 2 ++ .github/workflows/single_sample_scenic_multiruns.yml | 2 ++ .github/workflows/single_sample_scrublet.yml | 2 ++ 14 files changed, 28 insertions(+) diff --git a/.github/workflows/bbknn.yml b/.github/workflows/bbknn.yml index d0feb42f..d71bf0d2 100644 --- a/.github/workflows/bbknn.yml +++ b/.github/workflows/bbknn.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/bbknn_scenic.yml b/.github/workflows/bbknn_scenic.yml index dc2fb4af..1f874e8e 100644 --- a/.github/workflows/bbknn_scenic.yml +++ b/.github/workflows/bbknn_scenic.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/decontx.yml b/.github/workflows/decontx.yml index cb6bbe2b..4a1a787b 100644 --- a/.github/workflows/decontx.yml +++ b/.github/workflows/decontx.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/harmony.yml b/.github/workflows/harmony.yml index d1afe9b5..a952eeb5 100644 --- a/.github/workflows/harmony.yml +++ b/.github/workflows/harmony.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/mnncorrect.yml b/.github/workflows/mnncorrect.yml index 5034b664..d47811fc 100644 --- a/.github/workflows/mnncorrect.yml +++ b/.github/workflows/mnncorrect.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/scenic.yml b/.github/workflows/scenic.yml index afa199c0..82703c35 100644 --- a/.github/workflows/scenic.yml +++ b/.github/workflows/scenic.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/scenic_multiruns.yml b/.github/workflows/scenic_multiruns.yml index 02de22c4..d8cf866a 100644 --- a/.github/workflows/scenic_multiruns.yml +++ b/.github/workflows/scenic_multiruns.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/single_sample.yml b/.github/workflows/single_sample.yml index 19a682f6..1149884c 100644 --- a/.github/workflows/single_sample.yml +++ b/.github/workflows/single_sample.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/single_sample_decontx.yml b/.github/workflows/single_sample_decontx.yml index e4980925..3e8c891b 100644 --- a/.github/workflows/single_sample_decontx.yml +++ b/.github/workflows/single_sample_decontx.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/single_sample_decontx_scrublet.yml b/.github/workflows/single_sample_decontx_scrublet.yml index 21e1f8b8..84d48fe5 100644 --- a/.github/workflows/single_sample_decontx_scrublet.yml +++ b/.github/workflows/single_sample_decontx_scrublet.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/single_sample_param_exploration.yml b/.github/workflows/single_sample_param_exploration.yml index 4c5e79c4..ae2d12f5 100644 --- a/.github/workflows/single_sample_param_exploration.yml +++ b/.github/workflows/single_sample_param_exploration.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/single_sample_scenic.yml b/.github/workflows/single_sample_scenic.yml index 07a196bc..36a3288a 100644 --- a/.github/workflows/single_sample_scenic.yml +++ b/.github/workflows/single_sample_scenic.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/single_sample_scenic_multiruns.yml b/.github/workflows/single_sample_scenic_multiruns.yml index 8f773d66..6ac0d90e 100644 --- a/.github/workflows/single_sample_scenic_multiruns.yml +++ b/.github/workflows/single_sample_scenic_multiruns.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: diff --git a/.github/workflows/single_sample_scrublet.yml b/.github/workflows/single_sample_scrublet.yml index 623f7598..59455e2a 100644 --- a/.github/workflows/single_sample_scrublet.yml +++ b/.github/workflows/single_sample_scrublet.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - develop pull_request: branches: - master + - develop jobs: build: From 71aac257fe870a948b2f88925fb05fc9560ae041 Mon Sep 17 00:00:00 2001 From: Chris Flerin Date: Tue, 26 Jan 2021 12:06:51 +0100 Subject: [PATCH 26/26] Remove submodules from CI tests --- .github/workflows/bbknn.yml | 2 -- .github/workflows/bbknn_scenic.yml | 2 -- .github/workflows/decontx.yml | 2 -- .github/workflows/harmony.yml | 2 -- .github/workflows/mnncorrect.yml | 2 -- .github/workflows/scenic.yml | 2 -- .github/workflows/scenic_multiruns.yml | 2 -- .github/workflows/single_sample.yml | 2 -- .github/workflows/single_sample_decontx.yml | 2 -- .github/workflows/single_sample_decontx_scrublet.yml | 2 -- .github/workflows/single_sample_param_exploration.yml | 2 -- .github/workflows/single_sample_scenic.yml | 2 -- .github/workflows/single_sample_scenic_multiruns.yml | 2 -- .github/workflows/single_sample_scrublet.yml | 2 -- 14 files changed, 28 deletions(-) diff --git a/.github/workflows/bbknn.yml b/.github/workflows/bbknn.yml index d71bf0d2..2c6f77f2 100644 --- a/.github/workflows/bbknn.yml +++ b/.github/workflows/bbknn.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/bbknn_scenic.yml b/.github/workflows/bbknn_scenic.yml index 1f874e8e..74271a0e 100644 --- a/.github/workflows/bbknn_scenic.yml +++ b/.github/workflows/bbknn_scenic.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/decontx.yml b/.github/workflows/decontx.yml index 4a1a787b..39ef0eee 100644 --- a/.github/workflows/decontx.yml +++ b/.github/workflows/decontx.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/harmony.yml b/.github/workflows/harmony.yml index a952eeb5..3193e562 100644 --- a/.github/workflows/harmony.yml +++ b/.github/workflows/harmony.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/mnncorrect.yml b/.github/workflows/mnncorrect.yml index d47811fc..4640c989 100644 --- a/.github/workflows/mnncorrect.yml +++ b/.github/workflows/mnncorrect.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/scenic.yml b/.github/workflows/scenic.yml index 82703c35..854dc639 100644 --- a/.github/workflows/scenic.yml +++ b/.github/workflows/scenic.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/scenic_multiruns.yml b/.github/workflows/scenic_multiruns.yml index d8cf866a..b7e02f4b 100644 --- a/.github/workflows/scenic_multiruns.yml +++ b/.github/workflows/scenic_multiruns.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/single_sample.yml b/.github/workflows/single_sample.yml index 1149884c..1afa41c2 100644 --- a/.github/workflows/single_sample.yml +++ b/.github/workflows/single_sample.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/single_sample_decontx.yml b/.github/workflows/single_sample_decontx.yml index 3e8c891b..ea763ca2 100644 --- a/.github/workflows/single_sample_decontx.yml +++ b/.github/workflows/single_sample_decontx.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/single_sample_decontx_scrublet.yml b/.github/workflows/single_sample_decontx_scrublet.yml index 84d48fe5..a34c8485 100644 --- a/.github/workflows/single_sample_decontx_scrublet.yml +++ b/.github/workflows/single_sample_decontx_scrublet.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/single_sample_param_exploration.yml b/.github/workflows/single_sample_param_exploration.yml index ae2d12f5..87407c62 100644 --- a/.github/workflows/single_sample_param_exploration.yml +++ b/.github/workflows/single_sample_param_exploration.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/single_sample_scenic.yml b/.github/workflows/single_sample_scenic.yml index 36a3288a..03eeb7cc 100644 --- a/.github/workflows/single_sample_scenic.yml +++ b/.github/workflows/single_sample_scenic.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/single_sample_scenic_multiruns.yml b/.github/workflows/single_sample_scenic_multiruns.yml index 6ac0d90e..f3044683 100644 --- a/.github/workflows/single_sample_scenic_multiruns.yml +++ b/.github/workflows/single_sample_scenic_multiruns.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0' diff --git a/.github/workflows/single_sample_scrublet.yml b/.github/workflows/single_sample_scrublet.yml index 59455e2a..9c2e2872 100644 --- a/.github/workflows/single_sample_scrublet.yml +++ b/.github/workflows/single_sample_scrublet.yml @@ -17,8 +17,6 @@ jobs: steps: - uses: actions/checkout@v2 - with: - submodules: true - name: Install Nextflow run: | export NXF_VER='20.10.0'