Skip to content
Snippets Groups Projects
Commit b387731b authored by Blaise Li's avatar Blaise Li
Browse files

Trying to make container slurm-compatible.

parent 68dd17fb
No related branches found
No related tags found
No related merge requests found
......@@ -114,7 +114,7 @@ megabytes_resource=$(echo "${kilobytes_tot} / 1100" | bc)
from_dir=$(pwd)
#cmd="(cd ${output_dir}; snakemake -s ${snakefile} --configfile ${config_base} --resources mem_mb=${megabytes_resource} $@)"
# TODO: check that this works
cmd="(cd ${output_dir}; echo ${USER}@${HOSTNAME}:${from_dir}/${output_dir} > pwd.txt; snakemake -s ${snakefile_base} --configfile ${config_base} --rulegraph | dot -Tpdf > rulegraph.pdf; ${SBATCH} snakemake -s ${snakefile_base} --configfile ${config_base} --resources mem_mb=${megabytes_resource} $@)"
cmd="(cd ${output_dir}; echo ${USER}@${HOSTNAME}:${from_dir}/${output_dir} > pwd.txt; snakemake -s ${snakefile_base} --configfile ${config_base} --rulegraph | dot -Tpdf > rulegraph.pdf; snakemake -s ${snakefile_base} --configfile ${config_base} --resources mem_mb=${megabytes_resource} $@)"
echo ${cmd} | tee -a ${log_base}.log
# https://unix.stackexchange.com/a/245610/55127
......
......@@ -58,7 +58,13 @@ From:workflows_base.sif
/usr/local/share/doc/qaf_demux_version.txt
%post
apt-get -y install fakeroot
apt-get -y install munge libmunge2
apt-get -y install hwloc libhwloc5
apt-get -y install libevent-2.1-6
apt-get -y install libpmix2
mkdir -p /opt/hpc/slurm
echo slurm:x:497:493::/home/slurm:/sbin/nologin >> /etc/passwd
cat /usr/local/share/doc/qaf_demux_version.txt >> /usr/local/share/doc/program_versions.txt
# To use the "local" python, not the system one.
export PATH="/usr/local/bin":$PATH
......@@ -101,6 +107,7 @@ From:workflows_base.sif
# To avoid using python things installed in the HOME of the user
# (that will be mounted during container execution)
export PYTHONNOUSERSITE=1
export PATH=/opt/hpc/slurm/current/bin:"${PATH}"
export PATH=/usr/local/src/bioinfo_utils:"${PATH}"
export PATH=/usr/local/src/bioinfo_utils/Genome_preparation:"${PATH}"
......
......@@ -66,7 +66,7 @@ then
module --version 2> /dev/null && have_modules=1
if [ ${have_modules} ]
then
module load singularity || error_exit "singularity is needed to run the pipelines (see ${install_doc})"
module load apptainer || error_exit "singularity is needed to run the pipelines (see ${install_doc})"
else
error_exit "singularity is needed to run the pipelines (see ${install_doc})"
fi
......@@ -83,14 +83,25 @@ then
fi
fi
genome_dir="/pasteur/entites/Mhe/Genomes"
gene_lists_dir="/pasteur/entites/Mhe/Gene_lists"
# This should actually be taken from the pipeline config file.
[[ ${GENOME_DIR} ]] || GENOME_DIR="/pasteur/entites/Mhe/Genomes"
[[ ${GENE_LISTS_DIR} ]] || GENE_LISTS_DIR="/pasteur/entites/Mhe/Gene_lists"
if [ ! -e ${genome_dir} -o ! -e ${gene_lists_dir} ]
if [ ! -e ${GENOME_DIR} -o ! -e ${GENE_LISTS_DIR} ]
then
error_exit "The pipelines will look for genome data in ${genome_dir} and gene lists in ${gene_lists_dir}. Make sure it's there."
error_exit "The pipelines will look for genome data in ${GENOME_DIR} and gene lists in ${GENE_LISTS_DIR}. Make sure it's there."
fi
#############################################
# To run with sbatch on slurm queing system #
#############################################
[[ ${QOS} ]] || QOS="normal"
[[ ${PART} ]] || PART="common"
#cluster_opts="--cores 20 --cluster \'sbatch --mem={cluster.ram} --cpus-per-task={threads} --job-name={rule}-{wildcards} --qos=${QOS} --part=${PART} --mpi=none\' -j 300"
#cmd="APPTAINERENV_USER=${USER} apptainer run --cleanenv -B /opt/hpc/slurm -B /var/run/munge -B /pasteur ${container} ${PROGNAME} ${pipeline_config} ${cluster_opts} --cluster-config ${cluster_config}"
cmd="APPTAINERENV_USER=${USER} apptainer run --cleanenv -B /opt/hpc/slurm -B /var/run/munge -B /pasteur ${container} ${PROGNAME} $@"
# This script can be called from various symbolic links.
# The name of the link determines which snakefile to use.
# PRO-seq and GRO-seq are actually the same pipeline
......@@ -101,4 +112,4 @@ fi
# so that it finds the Genome configuration and gene lists
# that are expected to be in a specific location there.
# singularity run -B /pasteur -B /run/shm:/run/shm ${container} ${PROGNAME} $@
SINGULARITYENV_USER=${USER} singularity run --cleanenv -B /pasteur -B /run/shm:/run/shm ${container} ${PROGNAME} $@
[[ $(hostname) = "pisa" ]] && SINGULARITYENV_USER=${USER} singularity run --cleanenv -B /pasteur -B /run/shm:/run/shm ${container} ${PROGNAME} $@ || sbatch --qos=${QOS} --part=${PART} --wrap="${cmd}"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment