Commit 57ad911f authored by mmassaviol's avatar mmassaviol
Browse files

Update Metabarcoding workflow

parent c75d8605
File mode changed from 100755 to 100644
pipeline: Metabarcoding
params: {results_dir: /Results, sample_dir: /Data, sample_suffix: .fastq.gz, SeOrPe: PE,
trimming: 'null', cutadapt_PE_output_dir: cutadapt_PE, cutadapt_threads: 4, cutadapt_qc_score: --quality-base=33,
cutadapt_qc_min: 20, cutadapt_max_N: 0, cutadapt_a: '', cutadapt_A: '', cutadapt_SE_output_dir: cutadapt_SE,
null_output_dir: '', dada2: dada2, dada2_output_dir: dada2, dada2_threads: 4, dada2_no_filter_and_trim: false,
dada2_remove_chim: true, dada2_marker_type: 16S, dada2_taxonomypath: silva132, dada2_truncLen_forward: 0,
dada2_truncLen_reverse: 0, dada2_maxEE_forward: -1, dada2_maxEE_reverse: -1}
params:
results_dir: /Results
sample_dir: /Data
sample_suffix: .fastq.gz
SeOrPe: PE
trimming: 'null'
cutadapt_PE_output_dir: cutadapt_PE
cutadapt_threads: 4
cutadapt_qc_score: --quality-base=33
cutadapt_qc_min: 20
cutadapt_max_N: 0
cutadapt_a: ''
cutadapt_A: ''
cutadapt_SE_output_dir: cutadapt_SE
null_output_dir: ''
dada2: dada2
dada2_output_dir: dada2
dada2_threads: 4
dada2_no_filter_and_trim: false
dada2_remove_chim: true
dada2_marker_type: 16S
dada2_taxonomypath: silva132
dada2_truncLen_forward: 0
dada2_truncLen_reverse: 0
dada2_maxEE_forward: -1
dada2_maxEE_reverse: -1
samples: []
groups: []
final_step: all
steps:
- title: Trimming
name: trimming
tools: [cutadapt, 'null']
tools:
- cutadapt
- 'null'
default: 'null'
- title: Dada2
name: dada2
tools: [dada2]
tools:
- dada2
default: dada2
params_info:
results_dir: {type: output_dir}
sample_dir: {type: input_dir}
sample_suffix: {type: text}
SeOrPe: {type: radio}
cutadapt_threads: {tool: cutadapt, rule: cutadapt_SE, type: numeric}
cutadapt_qc_score: {tool: cutadapt, rule: cutadapt_SE, type: radio}
cutadapt_qc_min: {tool: cutadapt, rule: cutadapt_SE, type: numeric}
cutadapt_max_N: {tool: cutadapt, rule: cutadapt_PE, type: numeric}
cutadapt_a: {tool: cutadapt, rule: cutadapt_SE, type: text}
cutadapt_A: {tool: cutadapt, rule: cutadapt_PE, type: text}
dada2_threads: {tool: dada2, rule: dada2, type: numeric}
dada2_no_filter_and_trim: {tool: dada2, rule: dada2, type: checkbox}
dada2_remove_chim: {tool: dada2, rule: dada2, type: checkbox}
dada2_marker_type: {tool: dada2, rule: dada2, type: select}
dada2_taxonomypath: {tool: dada2, rule: dada2, type: select}
dada2_truncLen_forward: {tool: dada2, rule: dada2, type: numeric}
dada2_truncLen_reverse: {tool: dada2, rule: dada2, type: numeric}
dada2_maxEE_forward: {tool: dada2, rule: dada2, type: numeric}
dada2_maxEE_reverse: {tool: dada2, rule: dada2, type: numeric}
results_dir:
type: output_dir
sample_dir:
type: input_dir
sample_suffix:
type: text
SeOrPe:
type: radio
cutadapt_threads:
tool: cutadapt
rule: cutadapt_SE
type: numeric
cutadapt_qc_score:
tool: cutadapt
rule: cutadapt_SE
type: radio
cutadapt_qc_min:
tool: cutadapt
rule: cutadapt_SE
type: numeric
cutadapt_max_N:
tool: cutadapt
rule: cutadapt_PE
type: numeric
cutadapt_a:
tool: cutadapt
rule: cutadapt_SE
type: text
cutadapt_A:
tool: cutadapt
rule: cutadapt_PE
type: text
dada2_threads:
tool: dada2
rule: dada2
type: numeric
dada2_no_filter_and_trim:
tool: dada2
rule: dada2
type: checkbox
dada2_remove_chim:
tool: dada2
rule: dada2
type: checkbox
dada2_marker_type:
tool: dada2
rule: dada2
type: select
dada2_taxonomypath:
tool: dada2
rule: dada2
type: select
dada2_truncLen_forward:
tool: dada2
rule: dada2
type: numeric
dada2_truncLen_reverse:
tool: dada2
rule: dada2
type: numeric
dada2_maxEE_forward:
tool: dada2
rule: dada2
type: numeric
dada2_maxEE_reverse:
tool: dada2
rule: dada2
type: numeric
prepare_report_scripts: []
prepare_report_outputs: {}
outputs:
cutadapt:
cutadapt_PE:
- {name: read_trimmed, file: '{sample}_trimmed_R1.fq.gz', description: Reads forward
trimmed}
- {name: read2_trimmed, file: '{sample}_trimmed_R2.fq.gz', description: Reads
reverse trimmed}
- name: read_trimmed
file: '{sample}_trimmed_R1.fq.gz'
description: Reads forward trimmed
- name: read2_trimmed
file: '{sample}_trimmed_R2.fq.gz'
description: Reads reverse trimmed
cutadapt_SE:
- {name: read, file: '{sample}_trimmed.fq.gz', description: Reads trimmed}
- name: read
file: '{sample}_trimmed.fq.gz'
description: Reads trimmed
'null':
'null': []
dada2:
dada2:
- {name: qualityForward, file: Quality_forward_mqc.png, description: Quality graph
of the forward reads of the first two samples}
- {name: qualityReverse, file: Quality_reverse_mqc.png, description: Quality graph
of the reverse reads of the first two samples}
- {name: filter_and_trim_out, file: filter_and_trim_out.rds, description: Post-filter
and trimming report}
- {name: filtFs, file: 'filtered/{sample}_F_filt.fastq.gz', description: List
of filtered forward reads}
- {name: filtRs, file: 'filtered/{sample}_R_filt.fastq.gz', description: List
of filtered reverse reads}
- {name: errorsForward, file: Errors_forward_mqc.png, description: Error rate
graph on forward reads}
- {name: errorsReverse, file: Errors_reverse_mqc.png, description: Error rate
graph on reverse reads}
- {name: errF_rds, file: errF.rds, description: Error prediction for forward reads}
- {name: errR_rds, file: errR.rds, description: Error prediction for reverse reads}
- {name: track, file: Track_reads_mqc.tsv, description: Read count in the different
steps}
- {name: seqtab, file: seqtab.rds, description: Sequence table}
- {name: seqtab_nochim_csv, file: seqtab_nochim.csv, description: Sequence table
after removal of chimeras}
- {name: seqtab_nochim_rds, file: seqtab_nochim.rds, description: Sequence table
after removal of chimeras}
- {name: uniques_nochim, file: uniques_nochim.fasta, description: Unique sequences
after removal of chimeras}
- {name: taxa_bootstrap, file: Taxa_bootstrap_mqc.tsv, description: Bootstrap
of taxa}
- {name: taxa_rds, file: taxa.rds, description: Taxa}
- {name: otu_table, file: otu_table.csv, description: OTU table}
- {name: tax_table_csv, file: tax_table.csv, description: Taxa table}
- {name: richness, file: Richness_mqc.png, description: Richness plot}
- {name: top20, file: Top_20_mqc.png, description: Top 20 OTU}
- {name: ps_out, file: ps_out.rds, description: Phyloseq object}
- {name: ordination, file: Ordination_mqc.png, description: Ordination plot}
- {name: tax_table_biom, file: tax_table.qiime.txt, description: Taxa table for
qiime}
- {name: out_table_biom, file: otu_table.qiime.biom, description: OTU table for
qiime}
multiqc: {cutadapt: cutadapt, 'null': custom, dada2: custom}
- name: qualityForward
file: Quality_forward_mqc.png
description: Quality graph of the forward reads of the first two samples
- name: qualityReverse
file: Quality_reverse_mqc.png
description: Quality graph of the reverse reads of the first two samples
- name: filter_and_trim_out
file: filter_and_trim_out.rds
description: Post-filter and trimming report
- name: filtFs
file: filtered/{sample}_F_filt.fastq.gz
description: List of filtered forward reads
- name: filtRs
file: filtered/{sample}_R_filt.fastq.gz
description: List of filtered reverse reads
- name: errorsForward
file: Errors_forward_mqc.png
description: Error rate graph on forward reads
- name: errorsReverse
file: Errors_reverse_mqc.png
description: Error rate graph on reverse reads
- name: errF_rds
file: errF.rds
description: Error prediction for forward reads
- name: errR_rds
file: errR.rds
description: Error prediction for reverse reads
- name: track
file: Track_reads_mqc.tsv
description: Read count in the different steps
- name: seqtab
file: seqtab.rds
description: Sequence table
- name: seqtab_nochim_csv
file: seqtab_nochim.csv
description: Sequence table after removal of chimeras
- name: seqtab_nochim_rds
file: seqtab_nochim.rds
description: Sequence table after removal of chimeras
- name: uniques_nochim
file: uniques_nochim.fasta
description: Unique sequences after removal of chimeras
- name: taxa_bootstrap
file: Taxa_bootstrap_mqc.tsv
description: Bootstrap of taxa
- name: taxa_rds
file: taxa.rds
description: Taxa
- name: otu_table
file: otu_table.csv
description: OTU table
- name: tax_table_csv
file: tax_table.csv
description: Taxa table
- name: richness
file: Richness_mqc.png
description: Richness plot
- name: top20
file: Top_20_mqc.png
description: Top 20 OTU
- name: ps_out
file: ps_out.rds
description: Phyloseq object
- name: ordination
file: Ordination_mqc.png
description: Ordination plot
- name: tax_table_biom
file: tax_table.qiime.txt
description: Taxa table for qiime
- name: out_table_biom
file: otu_table.qiime.biom
description: OTU table for qiime
multiqc:
cutadapt: cutadapt
'null': custom
dada2: custom
Bootstrap: localimage
From: ../base.simg
From: ../base.sif
%environment
export PATH=/opt/biotools/bin:$PATH
......@@ -17,8 +17,8 @@ From: ../base.simg
echo "This container contains two apps (UI and Snakemake)."
echo "UI is a user interface to set up the workflow and launch it."
echo "Snakemake let you provide your configfile and other parameters to the snakemake command and launch it."
echo "To get help for an app :\nsingularity help --app appName this_container.simg"
echo "To run an app :\nsingularity run --app appName this_container.simg"
echo "To get help for an app :\nsingularity help --app appName this_container.sif"
echo "To run an app :\nsingularity run --app appName this_container.sif"
########################
......@@ -30,7 +30,7 @@ echo "To run an app :\nsingularity run --app appName this_container.simg"
%apphelp UI
To run the UI app you should bind data and results directories like in the following example.
You must also provide the host address and port where the shiny app will be launched
exemple : singularity run --app UI -B /path/to/data/directory:/Data -B /path/to/store/Results:/Results this_container.simg 127.0.0.1 1234
exemple : singularity run --app UI -B /path/to/data/directory:/Data -B /path/to/store/Results:/Results this_container.sif 127.0.0.1 1234
########################
......@@ -46,7 +46,7 @@ exemple : singularity run --app UI -B /path/to/data/directory:/Data -B /path/to/
%apphelp Snakemake
To run the Snakemake app you should bind data and results directories like in the following example.
You must also provide the configfile and the number of cores provided to snakemake command (you can add other parameters after these two)
exemple : singularity run --app Snakemake -B /path/to/data/directory:/Data -B /path/to/store/Results:/Results this_container.simg myconfig.yml 16 otherparams
exemple : singularity run --app Snakemake -B /path/to/data/directory:/Data -B /path/to/store/Results:/Results this_container.sif myconfig.yml 16 otherparams
########################
# App getConfigfile
......@@ -56,7 +56,7 @@ exemple : singularity run --app Snakemake -B /path/to/data/directory:/Data -B /p
%apphelp getConfigfile
To run the getConfigfile app you dont need to bind directories. This app will only copy the default parameters file from the container to your local disk.
exemple : singularity run --app getConfigfile this_container.simg
exemple : singularity run --app getConfigfile this_container.sif
%help
This container contains three apps (UI, Snakemake and getConfigfile).
......@@ -64,9 +64,9 @@ This container contains three apps (UI, Snakemake and getConfigfile).
* Snakemake let you provide your configfile and other parameters to the snakemake command and launch it.
* getConfigfile give you a copy of a default parameters file to fill and use with the Snakemake app
To get help for an app :
singularity help --app appName this_container.simg
singularity help --app appName this_container.sif
To run an app :
singularity run --app appName this_container.simg
singularity run --app appName this_container.sif
%files
......@@ -78,6 +78,7 @@ singularity run --app appName this_container.simg
%post
mkdir /Data
mkdir /Results
apt-get update -y
pip3 install cutadapt==2.3
......
......@@ -13,6 +13,6 @@ def read_yaml(filepath):
def write_yaml(filepath,data):
try:
with open(filepath, 'w') as file:
yaml.dump(data, file)
yaml.dump(data, file, default_flow_style=False)
except IOError as e:
print("Error in file opening:", e)
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment