diff --git a/README.md b/README.md index 4d4c818f943cd6ace4ff473df59c7c826e099e0c..b94817b620411f2b22f37ebff877c25c8d3dd367 100644 --- a/README.md +++ b/README.md @@ -92,6 +92,24 @@ workflow, as well as a shell wrapper and symbolic links that can be used in the same way as described in the previous section. +#### Running on a cluster with slurm workload manager + +In july 2024, attempts were made to make it possible to run the containerized +pipeline on a computing cluster using the slurm workload manager. The +above-mentioned shell wrapper (and symbolic links to it) will, by default, +attempt to run the pipeline using sbatch and can use some environment variables +in order to specify quality of service (`QOS`), partition (`PART`), location of +gene lists (`GeNE_LISTS_DIR`) and installed genomes (`GENOME_DIR`). + +Example command to run on a computing cluster: + + QOS="hubbioit" PART="hubbioit" GENOME_DIR="/pasteur/appa/homes/bli/test/cecere_pipelines_tests/Genomes" GENE_LISTS_DIR="/pasteur/appa/homes/bli/test/cecere_pipelines_tests/Gene_lists" run_sRNA-seq_pipeline 20221219_FS10001183_sRNA-seq_2cells_embryos_testprotocol_purif_pippinprep.yaml --cores 20 -j 300 + +To run the pipeline on a non-cluster machine, set the `DEFAULT_HOSTNAME` +variable to this machine's hostname (either on the command-line, as for the +other variables above, or by editing the wrapper script). + + ### Genome preparation A genome preparation workflow is available at @@ -115,6 +133,6 @@ This upload upon error can be inactivated by adding `--config upload_on_err=Fals ## Citing -If you use these tools, please cite the following papers: +If you use these tools, please cite the following paper: > Barucci et al, 2020 (doi: [10.1038/s41556-020-0462-7](https://doi.org/10.1038/s41556-020-0462-7)) diff --git a/singularity/run_pipeline.sh b/singularity/run_pipeline.sh index c3485ce7844ba27c60d8356885986732a6bf6d1c..50d2430580ecbf8a344bab2baee462eb3294a100 100755 --- a/singularity/run_pipeline.sh +++ b/singularity/run_pipeline.sh @@ -56,6 +56,8 @@ BASEDIR=$(dirname "${SCRIPT}") container="${BASEDIR}/run_pipeline" wrapper="${BASEDIR}/wrap_in_container.sh" cluster_config="${BASEDIR}/cluster_config.json" +# If we are on this machine, then the pipeline will be run without sbatch +[[ ${DEFAULT_HOSTNAME} ]] || DEFAULT_HOSTNAME="pisa" # Do we have singularity? @@ -128,6 +130,6 @@ cmd="APPTAINERENV_USER=${USER} apptainer run -B /opt/hpc/slurm -B /var/run/munge # that are expected to be in a specific location there. # singularity run -B /pasteur -B /run/shm:/run/shm ${container} ${PROGNAME} $@ #[[ $(hostname) = "pisa" ]] && SINGULARITYENV_USER=${USER} singularity run --cleanenv -B /pasteur -B /run/shm:/run/shm ${container} ${PROGNAME} $@ || sbatch --qos=${QOS} --part=${PART} --wrap="${cmd}" -[[ $(hostname) = "pisa" ]] && SINGULARITYENV_USER=${USER} singularity run -B /pasteur -B /local -B /run/shm:/run/shm ${container} ${PROGNAME} $@ || sbatch --qos=${QOS} --part=${PART} --wrap="${cmd}" +[[ $(hostname) = ${DEFAULT_HOSTNAME} ]] && SINGULARITYENV_USER=${USER} singularity run -B /pasteur -B /local -B /run/shm:/run/shm ${container} ${PROGNAME} $@ || sbatch --qos=${QOS} --part=${PART} --wrap="${cmd}" exit 0 diff --git a/singularity/workflows_shell.sh b/singularity/workflows_shell.sh index fcce92b5df630371f8da5ed22e15eb25bb289899..53723bdb17fd3a7b879d2626789e30ac37b34ae5 100755 --- a/singularity/workflows_shell.sh +++ b/singularity/workflows_shell.sh @@ -37,7 +37,8 @@ SCRIPT=$(readlink -f "${0}") # Absolute path this script is in BASEDIR=$(dirname "${SCRIPT}") container="${BASEDIR}/run_pipeline" - +# If we are on this machine, then the pipeline will be run without sbatch +[[ ${DEFAULT_HOSTNAME} ]] || DEFAULT_HOSTNAME="pisa" # Do we have singularity? singularity --version 2> /dev/null && have_singularity=1 @@ -77,6 +78,6 @@ case ${1} in # -B /pasteur will mount /pasteur in the container # so that it finds the Genome configuration and gene lists # that are expected to be in a specific location there. - [[ $(hostname) = "pisa" ]] && SINGULARITYENV_USER=${USER} singularity shell -B /pasteur -B /local -B /run/shm:/run/shm ${container} $@ || APPTAINERENV_USER=${USER} apptainer shell -B /opt/hpc/slurm -B /var/run/munge -B /pasteur -B /local ${container} $@ + [[ $(hostname) = ${DEFAULT_HOSTNAME} ]] && SINGULARITYENV_USER=${USER} singularity shell -B /pasteur -B /local -B /run/shm:/run/shm ${container} $@ || APPTAINERENV_USER=${USER} apptainer shell -B /opt/hpc/slurm -B /var/run/munge -B /pasteur -B /local ${container} $@ ;; esac