Commit 25485914 authored by mmassaviol's avatar mmassaviol
Browse files

Initial commit

parents
FROM mbbteam/mbb_workflows_base:latest as alltools
RUN cd /opt/biotools \
&& git clone https://github.com/GonzalezLab/T-lex3.git \
&& mv T-lex3/tlex-open-v3.0.pl bin/tlex \
&& chmod +x bin/tlex
RUN apt-get update \
&& apt-get install -y perl unzip bzip2 libkrb5-3 \
&& wget http://security.ubuntu.com/ubuntu/pool/main/libp/libpng/libpng12-0_1.2.54-1ubuntu1.1_amd64.deb && dpkg -i libpng12-0_1.2.54-1ubuntu1.1_amd64.deb \
&& apt-get clean -y
RUN cd /opt/biotools \
&& wget ftp://ftp.ncbi.nlm.nih.gov/blast/executables/rmblast/2.2.28/ncbi-rmblastn-2.2.28-x64-linux.tar.gz \
&& wget ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/2.9.0/ncbi-blast-2.9.0+-x64-linux.tar.gz \
&& tar -xzf ncbi-rmblastn* \
&& tar -xzf ncbi-blast* \
&& mv ncbi*/bin/* bin \
&& rm -rf ncbi*
RUN cd /opt/biotools \
&& wget http://tandem.bu.edu/trf/downloads/trf407b.linux64 \
&& mv trf*.linux64 bin/trf \
&& chmod +x bin/trf
ENV PATH $PATH:/opt/biotools/RepeatMasker
RUN cd /opt/biotools \
&& wget http://www.repeatmasker.org/RepeatMasker-open-4-0-9-p2.tar.gz \
&& tar -xzf RepeatMasker-open-4-0-9-p2.tar.gz \
&& rm -rf RepeatMasker-open-4-0-9-p2.tar.gz \
&& perl -0p -e 's/\/usr\/local\/hmmer/\/usr\/bin/g;' -e 's/\/usr\/local\/rmblast/\/opt\/biotools\/bin/g;' -e 's/DEFAULT_SEARCH_ENGINE = "crossmatch"/DEFAULT_SEARCH_ENGINE = "ncbi"/g;' -e 's/TRF_PRGM = ""/TRF_PRGM = "\/opt\/biotools\/bin\/trf"/g;' RepeatMasker/RepeatMaskerConfig.tmpl > RepeatMasker/RepeatMaskerConfig.pm \
&& cd RepeatMasker \
&& perl -i -0pe 's/^#\!.*perl.*/#\!\/usr\/bin\/env perl/g' RepeatMasker DateRepeats ProcessRepeats RepeatProteinMask DupMasker util/queryRepeatDatabase.pl util/queryTaxonomyDatabase.pl util/rmOutToGFF3.pl util/rmToUCSCTables.pl \
&& cpan Text::Soundex
ENV PATH $PATH:/opt/biotools/SHRiMP_2_2_3
RUN cd /opt/biotools \
&& wget http://compbio.cs.toronto.edu/shrimp/releases/SHRiMP_2_2_3.lx26.x86_64.tar.gz \
&& tar -xzf SHRiMP_2_2_3.lx26.x86_64.tar.gz \
&& rm -rf SHRiMP_2_2_3.lx26.x86_64.tar.gz
ENV PATH $PATH:/opt/biotools/blat36
RUN cd /opt/biotools \
&& mkdir blat36 \
&& wget https://hgwdev.gi.ucsc.edu/~kent/exe/linux/blatSuite.36.zip \
&& unzip blatSuite.36.zip -d /opt/biotools/blat36 \
&& rm -rf blatSuite.36.zip
RUN cd /opt/biotools \
&& wget https://github.com/samtools/samtools/releases/download/1.9/samtools-1.9.tar.bz2 \
&& tar -xvjf samtools-1.9.tar.bz2 \
&& cd samtools-1.9 \
&& ./configure && make \
&& cd .. \
&& mv samtools-1.9/samtools bin/samtools \
&& rm -r samtools-1.9 samtools-1.9.tar.bz2
RUN cd /opt/biotools \
&& wget https://github.com/samtools/bcftools/releases/download/1.9/bcftools-1.9.tar.bz2 \
&& tar -xvjf bcftools-1.9.tar.bz2 \
&& cd bcftools-1.9 \
&& ./configure --prefix=/opt/biotools \
&& make -j 10 \
&& make install \
&& mv bcftools /opt/biotools/bin/ \
&& cd .. && rm -r bcftools-1.9.tar.bz2 bcftools-1.9
RUN cd /opt/biotools \
&& wget https://github.com/lh3/bwa/releases/download/v0.7.17/bwa-0.7.17.tar.bz2 \
&& tar -xvjf bwa-0.7.17.tar.bz2 \
&& cd bwa-0.7.17 \
&& make -j 10 \
&& mv bwa ../bin/ \
&& cd .. \
&& rm -r bwa-0.7.17 bwa-0.7.17.tar.bz2
RUN apt-get update -q \
&& apt-get install -y -qqq build-essential pkg-config autoconf \
&& cd /tmp \
&& wget https://github.com/agordon/libgtextutils/releases/download/0.7/libgtextutils-0.7.tar.gz \
&& tar -xzf libgtextutils-0.7.tar.gz \
&& cd libgtextutils-0.7 \
&& ./reconf \
&& ./configure \
&& make CXXFLAGS='-std=c++03' \
&& make install \
&& cd /tmp \
&& wget https://github.com/agordon/fastx_toolkit/releases/download/0.0.14/fastx_toolkit-0.0.14.tar.bz2 \
&& tar -xjf fastx_toolkit-0.0.14.tar.bz2 \
&& cd fastx_toolkit-0.0.14 \
&& ./reconf \
&& ./configure \
&& make \
&& make install \
&& apt-get remove -y -qqq build-essential pkg-config autoconf \
&& apt-get autoremove -y -qqq \
&& apt-get clean -y -qqq \
&& cd /tmp \
&& rm -rf /var/lib/apt/lists/* \
&& rm -rf /tmp/*
RUN apt-get update -q \
&& apt-get install -y emboss \
&& apt-get clean -y
RUN cd /tmp \
&& wget http://bioinfo.ut.ee/download/dl.php?file=28 -O fastagrep.tar.gz \
&& tar -xzf fastagrep.tar.gz \
&& mv ./fastagrep_v2.0_64bit_linux_2_6 /opt/biotools/bin/fastagrep \
&& rm -rf fastagrep.tar.gz
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
#This part is necessary to run on ISEM cluster
RUN mkdir -p /share/apps/bin \
&& mkdir -p /share/apps/lib \
&& mkdir -p /share/apps/gridengine \
&& mkdir -p /share/bio \
&& mkdir -p /opt/gridengine \
&& mkdir -p /export/scrach \
&& mkdir -p /usr/lib64 \
&& ln -s /bin/bash /bin/mbb_bash \
&& ln -s /bin/bash /bin/isem_bash \
&& /usr/sbin/groupadd --system --gid 400 sge \
&& /usr/sbin/useradd --system --uid 400 --gid 400 -c GridEngine --shell /bin/true --home /opt/gridengine sge
EXPOSE 3838
CMD ["Rscript", "-e", "setwd('/sagApp/'); shiny::runApp('/sagApp/app.R',port=3838 , host='0.0.0.0')"]
FROM alltools
COPY files /workflow
COPY sagApp /sagApp
#!/bin/bash
#This script will help to run a workflow in a docker image.
if [ $# -lt 4 ]
then
echo usage : $0 dataDir resultsDir configFile nbCores '[dockerHub|local]'
exit
fi
# Docker volumes
# MBB Workflows reads data from /Data and write results to /Results
Data=$1
Results=$2
if [ ! -d "$Data" ]; then
echo "can't find $Data directory !"
exit;
fi
mkdir -p $Results
DOCK_VOL+=" --mount type=bind,src=$Data,dst=/Data"
DOCK_VOL+=" --mount type=bind,src=$Results,dst=/Results"
# config file must be in /Data or /Results !
config=$3
cores=$4
if [ $# -lt 5 ]
then
APP_IMG="mbbteam/tlex3_workflow:latest"
else
IMG_SRC=$5
case $IMG_SRC in
dockerHub )
APP_IMG="mbbteam/tlex3_workflow:latest" ;;
local)
docker build . -t tlex3_workflow:latest
APP_IMG="tlex3_workflow:latest" ;;
mbb)
#APP_IMG="X.X.X.X:5000/tlex3_workflow:latest" ;;
esac
fi
docker run --rm $DOCK_VOL --cidfile="CID.txt" $APP_IMG snakemake -s /workflow/Snakefile all --configfile $config --cores $cores
CONTAINER_ID=$(cat CID.txt)
if [ $CONTAINER_ID ]
then
echo " "
echo Results were written to : $2
echo " "
else
echo Failed to run the docker container !!
fi
#!/bin/bash
#This script will help a deployment of a docker image on an MBB bigmem machine
if [ $# -lt 2 ]
then
echo usage : $0 dataDir resultsDir '[dockerHub|local]'
exit
fi
#nginx
##### nginx install #####
#sudo apt-get install -y nginx
# HOST_NAME="192.168.100.49"
# HTTP_ENDP="https://$HOST_NAME"
# openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/ssl/private/nginx-selfsigned.key -out /etc/ssl/certs/nginx-selfsigned.crt -subj "/C=FR/ST=LR/L=Montpellier/O=CNRS/OU=CNRS-ISEM/CN=mbb.univ-montp2.fr"
# openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048
# mkdir -p /etc/nginx/snippets
# echo "ssl_certificate /etc/ssl/certs/nginx-selfsigned.crt;" > /etc/nginx/snippets/self-signed.conf
# echo "ssl_certificate_key /etc/ssl/private/nginx-selfsigned.key;" >> /etc/nginx/snippets/self-signed.conf
# cp system/nginx_snippets_ssl-params.conf /etc/nginx/snippets/ssl-params.conf
# cp /etc/nginx/sites-available/default /etc/nginx/sites-available/default.bak
# cp system/nginx_sites-available_default /etc/nginx/sites-available/default
# sed -i "s|server_domain_or_IP|$HOST_NAME|" /etc/nginx/sites-available/default
# useradd nginx
# cp system/nginx_nginx.conf /etc/nginx/nginx.conf
# cp system/nginx_conf.d_10-rstudio.conf /etc/nginx/conf.d/10-rstudio.conf
# sed -i "s|example.com|$HOST_NAME|" /etc/nginx/conf.d/10-rstudio.conf
# systemctl restart nginx
# systemctl enable nginx
#essayer une plage de ports entre 8787 et 8800
#APP_PORT=$2
APP_PORT=8787
while [[ $(ss -tulw | grep $APP_PORT) != "" && $APP_PORT < 8800 ]]
do
APP_PORT=$(( $APP_PORT + 1))
done
if [[ $(ss -tulw | grep $APP_PORT) != "" ]]
then
echo "No tcp port available !!"
exit -1
fi
# Docker volumes
# MBB Workflows reads data from /Data and write results to /Results
if [ $SUDO_USER ]; then realUSER=$SUDO_USER; else realUSER=`whoami`; fi
Data=$1
Results=$2
mkdir -p $Data
mkdir -p $Results
DOCK_VOL+=" --mount type=bind,src=$Data,dst=/Data"
DOCK_VOL+=" --mount type=bind,src=$Results,dst=/Results"
if [ $# -lt 3 ]
then
APP_IMG="mbbteam/tlex3_workflow:latest"
else
IMG_SRC=$3
case $IMG_SRC in
dockerHub )
APP_IMG="mbbteam/tlex3_workflow:latest" ;;
local)
docker build . -t tlex3_workflow:latest
APP_IMG="tlex3_workflow:latest" ;;
mbb)
#APP_IMG="X.X.X.X:5000/tlex3_workflow:latest" ;;
esac
fi
CONTAINER_ID=$( docker run --rm -d -p $APP_PORT:3838 $DOCK_VOL $APP_IMG )
if [ $CONTAINER_ID ]
then
echo " "
echo You have to put your Data on : $1
echo " "
echo Results will be written to : $2
echo " "
hostname -I | grep -E -o "162.38.181.[0-9]{1,3}" | awk -v port=$APP_PORT '{print "You can access the workflow interface at : http://"$1":"port}'
echo " "
echo To start a Bash session inside the container : docker exec -it $CONTAINER_ID /bin/bash
else
echo Failed to run the docker container !!
fi
#!/bin/bash
# This script is executed on the virtual machine during the *Deployment* phase.
# It is used to apply parameters specific to the current deployment.
# It is executed secondly during a cloud deployement in IFB-Biosphere, after the *Installation* phase.
if [ $# -lt 1 ]
then
APP_IMG="mbbteam/tlex3_workflow:latest"
else
IMG_SRC=$1
case $IMG_SRC in
ifb)
APP_IMG="gitlab-registry.in2p3.fr/ifb-biosphere/apps/tlex3_workflow:master" ;;
docker )
APP_IMG="mbbteam/tlex3_workflow:latest" ;;
local)
docker build . -t tlex3_workflow:latest
APP_IMG="tlex3_workflow:latest" ;;
mbb)
#APP_IMG="X.X.X.X:5000/tlex3_workflow:latest" ;;
esac
fi
# Tuning if site proxy or not
#CLOUD_SERVICE = $(ss-get cloudservice)
#CLOUD_SERVICE="ifb-genouest-genostack"
#HOST_NAME=$( ss-get --timeout=3 hostname )
HOST_NAME="192.168.100.49"
#if [ "$CLOUD_SERVICE" == "ifb-genouest-genostack" ]; then
# Cloud site WITH a site proxy
# APP_PORT=80
# PROXIED_IP=$( echo $HOST_NAME | sed "s|\.|-|g")
# HOST_NAME="openstack-${PROXIED_IP}.genouest.org"
# HTTP_ENDP="https://$HOST_NAME"
# systemctl stop nginx
#else
# Cloud site WOUT a site proxy
APP_PORT=8787
HTTP_ENDP="https://$HOST_NAME"
openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/ssl/private/nginx-selfsigned.key -out /etc/ssl/certs/nginx-selfsigned.crt -subj "/C=FR/ST=AURA/L=Lyon/O=IFB/OU=IFB-biosphere/CN=myrstudio.biosphere.france-bioinformatique.fr"
openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048
mkdir -p /etc/nginx/snippets
echo "ssl_certificate /etc/ssl/certs/nginx-selfsigned.crt;" > /etc/nginx/snippets/self-signed.conf
echo "ssl_certificate_key /etc/ssl/private/nginx-selfsigned.key;" >> /etc/nginx/snippets/self-signed.conf
cp system/nginx_snippets_ssl-params.conf /etc/nginx/snippets/ssl-params.conf
cp /etc/nginx/sites-available/default /etc/nginx/sites-available/default.bak
cp system/nginx_sites-available_default /etc/nginx/sites-available/default
sed -i "s|server_domain_or_IP|$HOST_NAME|" /etc/nginx/sites-available/default
useradd nginx
cp system/nginx_nginx.conf /etc/nginx/nginx.conf
cp system/nginx_conf.d_10-rstudio.conf /etc/nginx/conf.d/10-rstudio.conf
sed -i "s|example.com|$HOST_NAME|" /etc/nginx/conf.d/10-rstudio.conf
systemctl restart nginx
systemctl enable nginx
#fi
# Docker volumes
# mydatalocal: from the system disk or ephemeral one
IFB_DATADIR="/ifb/data/"
source /etc/profile.d/ifb.sh
VOL_NAME="mydatalocal"
VOL_DEV=$(readlink -f -n $IFB_DATADIR/$VOL_NAME )
DOCK_VOL=" --mount type=bind,src=$VOL_DEV,dst=$IFB_DATADIR/$VOL_NAME"
# MBB Workflows reads data from /Data and write results to /Results
mkdir ${VOL_DEV}/Data
mkdir ${VOL_DEV}/Results
DOCK_VOL+=" --mount type=bind,src=$VOL_DEV/Data,dst=/Data"
DOCK_VOL+=" --mount type=bind,src=$VOL_DEV/Results,dst=/Results"
# NFS mounts: from ifb_share configuration in autofs
IFS_ORI=$IFS
while IFS=" :" read VOL_NAME VOL_TYPE VOL_IP VOL_DEV ; do
DOCK_VOL+=" --mount type=volume,volume-driver=local,volume-opt=type=nfs,src=$VOL_NAME,dst=$IFB_DATADIR/$VOL_NAME,volume-opt=device=:$VOL_DEV,volume-opt=o=addr=$VOL_IP"
done < /etc/auto.ifb_share
IFS=$IFS_ORI
CONTAINER_ID=$( docker run -d -p $APP_PORT:3838 $DOCK_VOL $APP_IMG )
VM_IP=$(curl bot.whatismyipaddress.com)
if [ $CONTAINER_ID ]
then
echo " "
echo You have to put your Data on : ${VOL_DEV}/Data
echo " "
echo Results will be written to : ${VOL_DEV}/Results
echo " "
echo You can access the workflow interface at : https://${VM_IP}
echo " "
echo To start a Bash session inside the container : docker exec -it $CONTAINER_ID /bin/bash
echo " "
echo To run the workflow without the interface : docker exec -it $CONTAINER_ID snakemake -s /workflow/Snakefile all --configfile config --cores XX
echo " "
echo config est un fichier de configuration qui doit être dans un sous dossier de ${VOL_DEV}/Data ou ${VOL_DEV}/Results
echo " "
echo ex. si fichier dans ${VOL_DEV}/Data/run1/maconfig1.yml : docker exec -it $CONTAINER_ID snakemake -s /workflow/Snakefile all --configfile /Data/run1/maconfig1.yml --cores XX
echo " "
echo Vous pouvez utiliser l''interface graphique pour générer un fichier de configuration.
echo " "
echo XX étant le nombre de coeurs qui seront utilisés par le workflow.
else
echo Failed to run the docker container !!
fi
#!/bin/bash
#This script will help a deployment of a docker image on your local machine
if [ $# -lt 2 ]
then
echo usage : $0 dataDir resultsDir '[dockerHub|local]'
exit
fi
#nginx
##### nginx install #####
#sudo apt-get install -y nginx
# HOST_NAME="192.168.100.49"
# HTTP_ENDP="https://$HOST_NAME"
# openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/ssl/private/nginx-selfsigned.key -out /etc/ssl/certs/nginx-selfsigned.crt -subj "/C=FR/ST=LR/L=Montpellier/O=CNRS/OU=CNRS-ISEM/CN=mbb.univ-montp2.fr"
# openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048
# mkdir -p /etc/nginx/snippets
# echo "ssl_certificate /etc/ssl/certs/nginx-selfsigned.crt;" > /etc/nginx/snippets/self-signed.conf
# echo "ssl_certificate_key /etc/ssl/private/nginx-selfsigned.key;" >> /etc/nginx/snippets/self-signed.conf
# cp system/nginx_snippets_ssl-params.conf /etc/nginx/snippets/ssl-params.conf
# cp /etc/nginx/sites-available/default /etc/nginx/sites-available/default.bak
# cp system/nginx_sites-available_default /etc/nginx/sites-available/default
# sed -i "s|server_domain_or_IP|$HOST_NAME|" /etc/nginx/sites-available/default
# useradd nginx
# cp system/nginx_nginx.conf /etc/nginx/nginx.conf
# cp system/nginx_conf.d_10-rstudio.conf /etc/nginx/conf.d/10-rstudio.conf
# sed -i "s|example.com|$HOST_NAME|" /etc/nginx/conf.d/10-rstudio.conf
# systemctl restart nginx
# systemctl enable nginx
#essayer une plage de ports entre 8787 et 8800
#APP_PORT=$2
APP_PORT=8787
while [[ $(ss -tulw | grep $APP_PORT) != "" && $APP_PORT < 8800 ]]
do
APP_PORT=$(( $APP_PORT + 1))
done
if [[ $(ss -tulw | grep $APP_PORT) != "" ]]
then
echo "No tcp port available !!"
exit -1
fi
# Docker volumes
# MBB Workflows reads data from /Data and write results to /Results
if [ $SUDO_USER ]; then realUSER=$SUDO_USER; else realUSER=`whoami`; fi
Data=$1
Results=$2
mkdir -p $Data
mkdir -p $Results
DOCK_VOL+=" --mount type=bind,src=$Data,dst=/Data"
DOCK_VOL+=" --mount type=bind,src=$Results,dst=/Results"
if [ $# -lt 3 ]
then
APP_IMG="mbbteam/tlex3_workflow:latest"
else
IMG_SRC=$3
case $IMG_SRC in
dockerHub )
APP_IMG="mbbteam/tlex3_workflow:latest" ;;
local)
docker build . -t tlex3_workflow:latest
APP_IMG="tlex3_workflow:latest" ;;
mbb)
#APP_IMG="X.X.X.X:5000/tlex3_workflow:latest" ;;
esac
fi
IMG_NAME=$(echo $APP_IMG"-"$APP_PORT | sed s/:/-/ )
CONTAINER_ID=$( docker run --rm -d --name $IMG_NAME -p $APP_PORT:3838 $DOCK_VOL $APP_IMG )
if [ $CONTAINER_ID ]
then
echo " "
echo You have to put your Data on : $1
echo " "
echo Results will be written to : $2
echo " "
echo localhost | awk -v port=$APP_PORT '{print "You can access the shiny workflow interface at : http://"$1":"port}'
echo " "
echo To start a Bash session inside the container : docker exec -it $IMG_NAME /bin/bash
else
echo Failed to run the docker container !!
fi
from tools import *
from raw_read_dir import raw_read_dir
workdir: config['params']['results_dir']
import os
import re
import snakemake.utils
import csv
#############
# Wildcards #
#############
STEPS = config["steps"]
PREPARE_REPORT_OUTPUTS = config["prepare_report_outputs"]
PREPARE_REPORT_SCRIPTS = config["prepare_report_scripts"]
OUTPUTS = config["outputs"]
PARAMS_INFO = config["params_info"]
config = config["params"]
##########
# Inputs #
##########
# raw_inputs function call
raw_read_dir = raw_read_dir(config['results_dir'], config['sample_dir'], config['SeOrPe'])
config.update(raw_read_dir)
# Tools inputs functions
def tlex3_inputs():
inputs = dict()
inputs["read_dir"] = raw_read_dir["read_dir"]
return inputs
def prepare_report_inputs():
inputs = list()
for step in STEPS:
inputs.extend(step_outputs(step["name"]))
return inputs
def prepare_report_scripts():
scripts = list()
for step in STEPS:
tool = config[step["name"]]
prefix = tool+".prepare.report."
if type(PREPARE_REPORT_SCRIPTS) == type(""):
if prefix in PREPARE_REPORT_SCRIPTS:
scripts.append("/workflow/scripts/"+PREPARE_REPORT_SCRIPTS)
else :
script = [s for s in PREPARE_REPORT_SCRIPTS if prefix in s]
if (len(script)==1):
scripts.append("/workflow/scripts/"+script[0])
return scripts
def prepare_report_outputs():
outputs = list()
outputs.append(config["results_dir"] + "/outputs_mqc.csv")
for step in STEPS:
tool = config[step["name"]]
if (tool in PREPARE_REPORT_OUTPUTS.keys()):
if type(PREPARE_REPORT_OUTPUTS[tool]) == type(""):
outputs.append(config["results_dir"]+"/"+tool+"/"+PREPARE_REPORT_OUTPUTS[tool])
else:
for output in PREPARE_REPORT_OUTPUTS[tool]:
outputs.append(config["results_dir"]+"/"+tool+"/"+output)
return outputs
def multiqc_inputs():
# Need prepare_report inputs and outputs in case prepare_reports has no outputs
return prepare_report_outputs()
###########
# Outputs #
###########
def step_outputs(step):
outputs = list()
if (step == "tlex3"):
outputs = rules.tlex3.output
if (step == "all"):
outputs = list(rules.multiqc.output)
return outputs
# get outputs for each choosen tools
def workflow_outputs(step):
outputs = list()
outputs.extend(step_outputs(step))
return outputs
#########
# Rules #
#########
rule tlex3:
input:
**tlex3_inputs()
output:
Tresults = config["results_dir"] + "/" + config["tlex3_output_dir"] + "/tlex_output/Tresults"
params:
output_dir = config["results_dir"] + "/" + config["tlex3_output_dir"],
command = config["tlex3_command"],
pairends = "yes" if config["SeOrPe"] == "PE" else "no",
species = "-s " + config["tlex3_species"] if config["tlex3_species"] != "" else "",
te_list = config["tlex3_te_list"],
te_annotations = config["tlex3_te_annotations"],
genome_fasta = config["tlex3_genome_fasta"],
log:
config["results_dir"]+"/logs/tlex3/tlex3_log.txt"
shell:
"cd {params.output_dir}; "
"{params.command} "
"-T {params.te_list} "
"-M {params.te_annotations} "
"-G {params.genome_fasta} "
"-R {input.read_dir} "
"-pairends {params.pairends} "
"{params.species} "
"|& tee {log}"
import collections
rule prepare_report:
input: