Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
B
bioinfo_utils
Manage
Activity
Members
Labels
Plan
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Model registry
Operate
Environments
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Blaise LI
bioinfo_utils
Commits
b387731b
Commit
b387731b
authored
1 year ago
by
Blaise Li
Browse files
Options
Downloads
Patches
Plain Diff
Trying to make container slurm-compatible.
parent
68dd17fb
No related branches found
No related tags found
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
run_pipeline.sh
+1
-1
1 addition, 1 deletion
run_pipeline.sh
singularity/run_pipeline.def
+8
-1
8 additions, 1 deletion
singularity/run_pipeline.def
singularity/run_pipeline.sh
+17
-6
17 additions, 6 deletions
singularity/run_pipeline.sh
with
26 additions
and
8 deletions
run_pipeline.sh
+
1
−
1
View file @
b387731b
...
...
@@ -114,7 +114,7 @@ megabytes_resource=$(echo "${kilobytes_tot} / 1100" | bc)
from_dir
=
$(
pwd
)
#cmd="(cd ${output_dir}; snakemake -s ${snakefile} --configfile ${config_base} --resources mem_mb=${megabytes_resource} $@)"
# TODO: check that this works
cmd
=
"(cd
${
output_dir
}
; echo
${
USER
}
@
${
HOSTNAME
}
:
${
from_dir
}
/
${
output_dir
}
> pwd.txt; snakemake -s
${
snakefile_base
}
--configfile
${
config_base
}
--rulegraph | dot -Tpdf > rulegraph.pdf;
${
SBATCH
}
snakemake -s
${
snakefile_base
}
--configfile
${
config_base
}
--resources mem_mb=
${
megabytes_resource
}
$@
)"
cmd
=
"(cd
${
output_dir
}
; echo
${
USER
}
@
${
HOSTNAME
}
:
${
from_dir
}
/
${
output_dir
}
> pwd.txt; snakemake -s
${
snakefile_base
}
--configfile
${
config_base
}
--rulegraph | dot -Tpdf > rulegraph.pdf; snakemake -s
${
snakefile_base
}
--configfile
${
config_base
}
--resources mem_mb=
${
megabytes_resource
}
$@
)"
echo
${
cmd
}
|
tee
-a
${
log_base
}
.log
# https://unix.stackexchange.com/a/245610/55127
...
...
This diff is collapsed.
Click to expand it.
singularity/run_pipeline.def
+
8
−
1
View file @
b387731b
...
...
@@ -58,7 +58,13 @@ From:workflows_base.sif
/usr/local/share/doc/qaf_demux_version.txt
%post
apt-get -y install fakeroot
apt-get -y install munge libmunge2
apt-get -y install hwloc libhwloc5
apt-get -y install libevent-2.1-6
apt-get -y install libpmix2
mkdir -p /opt/hpc/slurm
echo slurm:x:497:493::/home/slurm:/sbin/nologin >> /etc/passwd
cat /usr/local/share/doc/qaf_demux_version.txt >> /usr/local/share/doc/program_versions.txt
# To use the "local" python, not the system one.
export PATH="/usr/local/bin":$PATH
...
...
@@ -101,6 +107,7 @@ From:workflows_base.sif
# To avoid using python things installed in the HOME of the user
# (that will be mounted during container execution)
export PYTHONNOUSERSITE=1
export PATH=/opt/hpc/slurm/current/bin:"${PATH}"
export PATH=/usr/local/src/bioinfo_utils:"${PATH}"
export PATH=/usr/local/src/bioinfo_utils/Genome_preparation:"${PATH}"
...
...
This diff is collapsed.
Click to expand it.
singularity/run_pipeline.sh
+
17
−
6
View file @
b387731b
...
...
@@ -66,7 +66,7 @@ then
module
--version
2> /dev/null
&&
have_modules
=
1
if
[
${
have_modules
}
]
then
module load
singularity
||
error_exit
"singularity is needed to run the pipelines (see
${
install_doc
}
)"
module load
apptainer
||
error_exit
"singularity is needed to run the pipelines (see
${
install_doc
}
)"
else
error_exit
"singularity is needed to run the pipelines (see
${
install_doc
}
)"
fi
...
...
@@ -83,14 +83,25 @@ then
fi
fi
genome_dir
=
"/pasteur/entites/Mhe/Genomes"
gene_lists_dir
=
"/pasteur/entites/Mhe/Gene_lists"
# This should actually be taken from the pipeline config file.
[[
${
GENOME_DIR
}
]]
||
GENOME_DIR
=
"/pasteur/entites/Mhe/Genomes"
[[
${
GENE_LISTS_DIR
}
]]
||
GENE_LISTS_DIR
=
"/pasteur/entites/Mhe/Gene_lists"
if
[
!
-e
${
genome_dir
}
-o
!
-e
${
gene_lists_dir
}
]
if
[
!
-e
${
GENOME_DIR
}
-o
!
-e
${
GENE_LISTS_DIR
}
]
then
error_exit
"The pipelines will look for genome data in
${
genome_dir
}
and gene lists in
${
gene_lists_dir
}
. Make sure it's there."
error_exit
"The pipelines will look for genome data in
${
GENOME_DIR
}
and gene lists in
${
GENE_LISTS_DIR
}
. Make sure it's there."
fi
#############################################
# To run with sbatch on slurm queing system #
#############################################
[[
${
QOS
}
]]
||
QOS
=
"normal"
[[
${
PART
}
]]
||
PART
=
"common"
#cluster_opts="--cores 20 --cluster \'sbatch --mem={cluster.ram} --cpus-per-task={threads} --job-name={rule}-{wildcards} --qos=${QOS} --part=${PART} --mpi=none\' -j 300"
#cmd="APPTAINERENV_USER=${USER} apptainer run --cleanenv -B /opt/hpc/slurm -B /var/run/munge -B /pasteur ${container} ${PROGNAME} ${pipeline_config} ${cluster_opts} --cluster-config ${cluster_config}"
cmd
=
"APPTAINERENV_USER=
${
USER
}
apptainer run --cleanenv -B /opt/hpc/slurm -B /var/run/munge -B /pasteur
${
container
}
${
PROGNAME
}
$@
"
# This script can be called from various symbolic links.
# The name of the link determines which snakefile to use.
# PRO-seq and GRO-seq are actually the same pipeline
...
...
@@ -101,4 +112,4 @@ fi
# so that it finds the Genome configuration and gene lists
# that are expected to be in a specific location there.
# singularity run -B /pasteur -B /run/shm:/run/shm ${container} ${PROGNAME} $@
SINGULARITYENV_USER
=
${
USER
}
singularity run
--cleanenv
-B
/pasteur
-B
/run/shm:/run/shm
${
container
}
${
PROGNAME
}
$@
[[
$(
hostname
)
=
"pisa"
]]
&&
SINGULARITYENV_USER
=
${
USER
}
singularity run
--cleanenv
-B
/pasteur
-B
/run/shm:/run/shm
${
container
}
${
PROGNAME
}
$@
||
sbatch
--qos
=
${
QOS
}
--part
=
${
PART
}
--wrap
=
"
${
cmd
}
"
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment