Skip to content

Commit

Permalink
WIP: enable in epi2me
Browse files Browse the repository at this point in the history
  • Loading branch information
rmcolq committed Nov 23, 2023
1 parent 3c5809f commit 71f2900
Show file tree
Hide file tree
Showing 4 changed files with 237 additions and 11 deletions.
9 changes: 9 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM condaforge/mambaforge:latest AS conda

COPY environment.yml .

RUN /opt/conda/bin/mamba env create -f /environment.yml

ENV PATH=/opt/conda/envs/artic_rsv/bin:$PATH

CMD ["/bin/bash"]
15 changes: 15 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: artic_rsv
channels:
- bioconda
- conda-forge
- defaults
dependencies:
- python<=3.10
- biopython
- pysam
- nextflow
- bedtools
- artic
- pip:
- git+https://github.com/Desperate-Dan/ampli_clean.git

16 changes: 5 additions & 11 deletions clean_cons.nf → main.nf
Original file line number Diff line number Diff line change
@@ -1,12 +1,5 @@
// Declare syntax version
nextflow.enable.dsl=2

// Script parameters
params.refs = "/home/dmmalone/RSV_analysis/testing_ground/RSV_refs.fasta"
params.bed = "/home/dmmalone/RSV_analysis/testing_ground/RSVA.primer.bed"
params.fastqIn = "/home/dmmalone/RSV_analysis/RSVLO_A_Run2/fastq_pass/*/*"

process ampliClean {
container "${params.wf.container}@${params.wf.container_sha}"
input:
tuple val(key), file(samples)
path refs
Expand All @@ -26,6 +19,7 @@ process ampliClean {
}

process articMinion {
container "${params.wf.container}@${params.wf.container_sha}"
input:
path input_reads
val base
Expand All @@ -39,9 +33,9 @@ process articMinion {
}

workflow {
def ref_ch = Channel.value(params.refs)
def bed_ch = Channel.value(params.bed)
def fastqIn_ch = Channel.fromPath(params.fastqIn, checkIfExists:true)
ref_ch = file(params.refs)
bed_ch = file(params.bed)
fastqIn_ch = Channel.fromPath(params.fastq, checkIfExists:true)
| map { file ->
def key = file.parent.toString().tokenize('/').last()
return tuple(key, file)
Expand Down
208 changes: 208 additions & 0 deletions nextflow.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
//
// Notes to End Users.
//
// The workflow should run without editing this configuration file,
// however there may be instances in which you wish to edit this
// file for compute performance or other reasons. Please see:
//
// https://nextflow.io/docs/latest/config.html#configuration
//
// for further help editing this file.

params {
help = false
version = false

refs = null
bed = null
fastq = null

wf {
example_cmd = [
"--fastq test_data/barcode01/reads.fastq.gz",
]
agent = null
container = "rmcolq/artic_rsv"
container_sha = "sha256:68100b5e1d4dc995d707ee716bda9373e827879904c381b59a71105476c36f58"
}
}


manifest {
name = 'Desperate-Dan/RSV_nextflow'
author = 'Daniel Maloney'
homePage = 'https://github.com/Desperate-Dan/RSV_nextflow/'
description = 'Generate assemblies from RSV sequence data generated using the ARTIC primer scheme.'
mainScript = 'main.nf'
nextflowVersion = '>=20.10.0'
version = 'v0.0.1'
}

profiles {
standard {
docker {
enabled = true
// this ensures container is run as host user and group, but
// also adds host user to the within-container group
runOptions = "--user \$(id -u):\$(id -g) --group-add 100"
}
}
debug {
dumpHashes = true
process.beforeScript = 'echo $HOSTNAME'
cleanup = false
}
conda {
conda.enabled = true
docker.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
apptainer.enabled = false
}
mamba {
conda.enabled = true
conda.useMamba = true
docker.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
apptainer.enabled = false
}
docker {
docker.enabled = true
docker.registry = 'docker.io'
docker.userEmulation = true
conda.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
apptainer.enabled = false
}
arm {
docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64'
}
singularity {
singularity.enabled = true
singularity.autoMounts = true
conda.enabled = false
docker.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
apptainer.enabled = false
}
podman {
podman.enabled = true
podman.registry = 'quay.io'
conda.enabled = false
docker.enabled = false
singularity.enabled = false
shifter.enabled = false
charliecloud.enabled = false
apptainer.enabled = false
}
shifter {
shifter.enabled = true
conda.enabled = false
docker.enabled = false
singularity.enabled = false
podman.enabled = false
charliecloud.enabled = false
apptainer.enabled = false
}
charliecloud {
charliecloud.enabled = true
conda.enabled = false
docker.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
apptainer.enabled = false
}
apptainer {
apptainer.enabled = true
conda.enabled = false
docker.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
}
gitpod {
executor.name = 'local'
executor.cpus = 16
executor.memory = 60.GB
}
test { includeConfig 'conf/test.config' }
test_full { includeConfig 'conf/test_full.config' }
}


// Export these variables to prevent local Python/R libraries from conflicting with those in the container
// The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container.
// See https://apeltzer.github.io/post/03-julia-lang-nextflow/ for details on that. Once we have a common agreement on where to keep Julia packages, this is adjustable.

env {
PYTHONNOUSERSITE = 1
R_PROFILE_USER = "/.Rprofile"
R_ENVIRON_USER = "/.Renviron"
JULIA_DEPOT_PATH = "/usr/local/share/julia"
}

// Capture exit codes from upstream processes when piping
process.shell = ['/bin/bash', '-euo', 'pipefail']

timeline {
enabled = true
file = "${params.tracedir}/execution_timeline_${params.unique_id}.html"
}
report {
enabled = true
file = "${params.tracedir}/execution_report_${params.unique_id}.html"
}
trace {
enabled = true
file = "${params.tracedir}/execution_trace_${params.unique_id}.txt"
}
dag {
enabled = true
file = "${params.tracedir}/pipeline_dag_${params.unique_id}.html"
}


// Function to ensure that resource requirements don't go beyond
// a maximum limit
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min( obj, params.max_cpus as int )
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}

0 comments on commit 71f2900

Please sign in to comment.