diff --git a/.pdm-python b/.pdm-python
new file mode 100644
index 0000000000000000000000000000000000000000..ec0931a3addbe9aefeecb81e15bfdaf53f3c5830
--- /dev/null
+++ b/.pdm-python
@@ -0,0 +1 @@
+C:/Users/timot/Documents/Scripts/python/pypelines/.venv/Scripts/python.exe
\ No newline at end of file
diff --git a/pdm.lock b/pdm.lock
new file mode 100644
index 0000000000000000000000000000000000000000..9af7c149489522bdcaf2e60cd9189c7ced41f222
--- /dev/null
+++ b/pdm.lock
@@ -0,0 +1,485 @@
+# This file is @generated by PDM.
+# It is not intended for manual editing.
+
+[metadata]
+groups = ["default"]
+strategy = ["cross_platform", "inherit_metadata"]
+lock_version = "4.4.1"
+content_hash = "sha256:a07dbe37aabc8e1da53a5cac01f0c368a13ca13f13ae10a644403a436a8a416b"
+
+[[package]]
+name = "coloredlogs"
+version = "15.0.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+summary = "Colored terminal output for Python's logging module"
+groups = ["default"]
+dependencies = [
+    "humanfriendly>=9.1",
+]
+files = [
+    {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
+    {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
+]
+
+[[package]]
+name = "contourpy"
+version = "1.2.1"
+requires_python = ">=3.9"
+summary = "Python library for calculating contours of 2D quadrilateral grids"
+groups = ["default"]
+dependencies = [
+    "numpy>=1.20",
+]
+files = [
+    {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"},
+    {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"},
+    {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"},
+    {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"},
+    {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"},
+    {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"},
+    {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"},
+    {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"},
+    {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"},
+    {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"},
+    {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"},
+    {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"},
+    {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"},
+    {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"},
+    {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"},
+    {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"},
+    {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"},
+    {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"},
+    {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"},
+    {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"},
+    {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"},
+    {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"},
+    {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"},
+    {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"},
+    {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"},
+    {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"},
+    {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"},
+    {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"},
+    {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"},
+    {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"},
+    {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"},
+    {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"},
+    {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"},
+    {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"},
+]
+
+[[package]]
+name = "cycler"
+version = "0.12.1"
+requires_python = ">=3.8"
+summary = "Composable style cycles"
+groups = ["default"]
+files = [
+    {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
+    {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
+]
+
+[[package]]
+name = "dynaconf"
+version = "3.2.5"
+requires_python = ">=3.8"
+summary = "The dynamic configurator for your Python Project"
+groups = ["default"]
+files = [
+    {file = "dynaconf-3.2.5-py2.py3-none-any.whl", hash = "sha256:12202fc26546851c05d4194c80bee00197e7c2febcb026e502b0863be9cbbdd8"},
+    {file = "dynaconf-3.2.5.tar.gz", hash = "sha256:42c8d936b32332c4b84e4d4df6dd1626b6ef59c5a94eb60c10cd3c59d6b882f2"},
+]
+
+[[package]]
+name = "fonttools"
+version = "4.51.0"
+requires_python = ">=3.8"
+summary = "Tools to manipulate font files"
+groups = ["default"]
+files = [
+    {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"},
+    {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"},
+    {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"},
+    {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"},
+    {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"},
+    {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"},
+    {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"},
+    {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"},
+    {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"},
+    {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"},
+    {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"},
+    {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"},
+    {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"},
+    {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"},
+    {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"},
+    {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"},
+    {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"},
+    {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"},
+    {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"},
+    {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"},
+    {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"},
+    {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"},
+    {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"},
+    {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"},
+    {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"},
+    {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"},
+]
+
+[[package]]
+name = "humanfriendly"
+version = "10.0"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+summary = "Human friendly output for text interfaces using Python"
+groups = ["default"]
+dependencies = [
+    "pyreadline3; sys_platform == \"win32\" and python_version >= \"3.8\"",
+]
+files = [
+    {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
+    {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
+]
+
+[[package]]
+name = "kiwisolver"
+version = "1.4.5"
+requires_python = ">=3.7"
+summary = "A fast implementation of the Cassowary constraint solver"
+groups = ["default"]
+files = [
+    {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"},
+    {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"},
+    {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"},
+    {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"},
+    {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"},
+    {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"},
+    {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"},
+    {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"},
+    {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"},
+    {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"},
+    {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"},
+    {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"},
+    {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"},
+    {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"},
+    {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"},
+    {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"},
+    {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"},
+    {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"},
+    {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"},
+    {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"},
+]
+
+[[package]]
+name = "matplotlib"
+version = "3.8.4"
+requires_python = ">=3.9"
+summary = "Python plotting package"
+groups = ["default"]
+dependencies = [
+    "contourpy>=1.0.1",
+    "cycler>=0.10",
+    "fonttools>=4.22.0",
+    "kiwisolver>=1.3.1",
+    "numpy>=1.21",
+    "packaging>=20.0",
+    "pillow>=8",
+    "pyparsing>=2.3.1",
+    "python-dateutil>=2.7",
+]
+files = [
+    {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"},
+    {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"},
+    {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"},
+    {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"},
+    {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"},
+    {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"},
+    {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"},
+    {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"},
+    {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"},
+    {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"},
+    {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"},
+    {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"},
+    {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"},
+    {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"},
+    {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"},
+    {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"},
+    {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"},
+    {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"},
+    {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"},
+    {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"},
+    {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"},
+    {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"},
+]
+
+[[package]]
+name = "natsort"
+version = "8.4.0"
+requires_python = ">=3.7"
+summary = "Simple yet flexible natural sorting in Python."
+groups = ["default"]
+files = [
+    {file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"},
+    {file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"},
+]
+
+[[package]]
+name = "networkx"
+version = "3.3"
+requires_python = ">=3.10"
+summary = "Python package for creating and manipulating graphs and networks"
+groups = ["default"]
+files = [
+    {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"},
+    {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
+]
+
+[[package]]
+name = "numpy"
+version = "1.26.4"
+requires_python = ">=3.9"
+summary = "Fundamental package for array computing in Python"
+groups = ["default"]
+files = [
+    {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
+    {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
+    {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"},
+    {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"},
+    {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"},
+    {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"},
+    {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"},
+    {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"},
+    {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
+    {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
+    {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"},
+    {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"},
+    {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"},
+    {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"},
+    {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"},
+    {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"},
+    {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"},
+    {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"},
+    {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"},
+    {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"},
+    {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"},
+    {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"},
+    {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"},
+    {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"},
+    {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"},
+    {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"},
+    {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"},
+    {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
+]
+
+[[package]]
+name = "packaging"
+version = "24.0"
+requires_python = ">=3.7"
+summary = "Core utilities for Python packages"
+groups = ["default"]
+files = [
+    {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
+    {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
+]
+
+[[package]]
+name = "pandas"
+version = "2.2.1"
+requires_python = ">=3.9"
+summary = "Powerful data structures for data analysis, time series, and statistics"
+groups = ["default"]
+dependencies = [
+    "numpy<2,>=1.22.4; python_version < \"3.11\"",
+    "numpy<2,>=1.23.2; python_version == \"3.11\"",
+    "numpy<2,>=1.26.0; python_version >= \"3.12\"",
+    "python-dateutil>=2.8.2",
+    "pytz>=2020.1",
+    "tzdata>=2022.7",
+]
+files = [
+    {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"},
+    {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"},
+    {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"},
+    {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"},
+    {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"},
+    {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"},
+    {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"},
+    {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"},
+    {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"},
+    {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"},
+    {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"},
+    {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"},
+    {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"},
+    {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"},
+    {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"},
+    {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"},
+    {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"},
+    {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"},
+    {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"},
+    {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"},
+    {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"},
+    {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"},
+]
+
+[[package]]
+name = "pillow"
+version = "10.3.0"
+requires_python = ">=3.8"
+summary = "Python Imaging Library (Fork)"
+groups = ["default"]
+files = [
+    {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"},
+    {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"},
+    {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"},
+    {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"},
+    {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"},
+    {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"},
+    {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"},
+    {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"},
+    {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"},
+    {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"},
+    {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"},
+    {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"},
+    {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"},
+    {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"},
+    {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"},
+    {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"},
+    {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"},
+    {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"},
+    {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"},
+    {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"},
+    {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"},
+    {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"},
+    {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"},
+    {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"},
+    {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"},
+    {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"},
+    {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"},
+    {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"},
+    {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"},
+    {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"},
+    {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"},
+    {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"},
+    {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"},
+    {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"},
+    {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"},
+    {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"},
+    {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"},
+    {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"},
+    {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"},
+    {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"},
+    {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"},
+    {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"},
+    {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"},
+    {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"},
+    {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"},
+    {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"},
+    {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"},
+    {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"},
+]
+
+[[package]]
+name = "pyparsing"
+version = "3.1.2"
+requires_python = ">=3.6.8"
+summary = "pyparsing module - Classes and methods to define and execute parsing grammars"
+groups = ["default"]
+files = [
+    {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
+    {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"},
+]
+
+[[package]]
+name = "pyreadline3"
+version = "3.4.1"
+summary = "A python implementation of GNU readline."
+groups = ["default"]
+marker = "sys_platform == \"win32\" and python_version >= \"3.8\""
+files = [
+    {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
+    {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+summary = "Extensions to the standard Python datetime module"
+groups = ["default"]
+dependencies = [
+    "six>=1.5",
+]
+files = [
+    {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
+    {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
+]
+
+[[package]]
+name = "pytz"
+version = "2024.1"
+summary = "World timezone definitions, modern and historical"
+groups = ["default"]
+files = [
+    {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
+    {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+summary = "Python 2 and 3 compatibility utilities"
+groups = ["default"]
+files = [
+    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "tzdata"
+version = "2024.1"
+requires_python = ">=2"
+summary = "Provider of IANA time zone data"
+groups = ["default"]
+files = [
+    {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
+    {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
+]
diff --git a/pyproject.toml b/pyproject.toml
index e1b938136b844d97778fed6a4aff690f090ee2df..5c10269baa8fe83385e0b78334be1e563276c9f3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,9 +1,18 @@
 [build-system]
-requires = ["setuptools >= 66.0"]
-build-backend = "setuptools.build_meta"
+requires = ["pdm-backend"]
+build-backend = "pdm.backend"
 
 [project]
 name = "processing-pypelines"
+dependencies = [
+    "coloredlogs>=15.0.1",
+    "dynaconf>=3.2.4",
+    "natsort>=8.4.0",
+    "networkx>=3.1",
+    "numpy",
+    "matplotlib",
+    "pandas>=2.1.4",
+]
 authors = [
     { name = "Timothé Jost-Mousseau", email = "timothe.jost-mousseau@pasteur.fr" },
 ]
@@ -14,20 +23,16 @@ description = "Framework to organize processing code outputs to/from disk, proce
 readme = "README.md"
 requires-python = ">=3.10"
 license = { text = "MIT" }
-dynamic = ["version", "dependencies", "optional-dependencies"]
+dynamic = ["version"]
+
+[project.optional-dependencies]
+celery = ["celery>=5.3.5", "alyx_connector>=2.1.5"]
 
 [project.urls]
 homepage = "https://gitlab.pasteur.fr/haisslab/data-management/pypelines"
 repository = "https://gitlab.pasteur.fr/haisslab/data-management/pypelines"
 documentation = "https://gitlab.pasteur.fr/haisslab/data-management/pypelines"
 
-[tool.setuptools.dynamic]
-dependencies = { file = ["requirements.txt"] }
-optional-dependencies = { celery = { file = [
-    "requirements.txt",
-    "requirements-celery.txt",
-] } }
-version = { attr = "pypelines.__version__" }
-
-[tool.setuptools.packages.find]
-where = ["src"]
+[tool.pdm.version]
+source = "file"
+path = "src/pypelines/__init__.py"
diff --git a/requirements-celery.txt b/requirements-celery.txt
deleted file mode 100644
index 324358028bc1135cdfff9eb0a86648978770ac92..0000000000000000000000000000000000000000
--- a/requirements-celery.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-celery>=5.3.5
-alyx_connector>=2.1.5
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 41e3d270ebed200cbd2f2de49b588d3bbd825722..0000000000000000000000000000000000000000
--- a/requirements.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-coloredlogs>=15.0.1
-dynaconf>=3.2.4
-matplotlib>=3.8.0
-natsort>=8.4.0
-networkx>=3.1
-numpy>=1.25.2
-pandas>=2.1.4
-setuptools>=68.0.0
diff --git a/src/pypelines/__init__.py b/src/pypelines/__init__.py
index ca8dfbc54b8730fda5b84ddf52044ecd934f0da0..40b96c9f2635f4d47b9843870cfd159acbcee48c 100644
--- a/src/pypelines/__init__.py
+++ b/src/pypelines/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "0.0.55"
+__version__ = "0.0.56"
 
 from . import loggs
 from .pipes import *
@@ -7,6 +7,7 @@ from .steps import *
 from .disk import *
 from .sessions import *
 
+# NOTE:
+# pypelines is enabling the logging system by default when importing it
+# (it comprises colored logging, session prefix-logging, and logging to a file located in downloads folder)
 loggs.enable_logging()
-
-# from .versions import *
diff --git a/src/pypelines/accessors.py b/src/pypelines/accessors.py
index 03755e737b4ad96d1f71d18f953ea77dfac7a242..b8af88a7858bc9436fa3509f303d4ea2d4e9e94c 100644
--- a/src/pypelines/accessors.py
+++ b/src/pypelines/accessors.py
@@ -11,11 +11,27 @@ except AttributeError:
 @register_series_accessor("pipeline")
 class SeriesPipelineAcessor:
     def __init__(self, pandas_obj) -> None:
+        """Initializes the class with a pandas object after validating it.
+
+        Args:
+            pandas_obj: A pandas object to be validated and stored.
+
+        Returns:
+            None
+        """
         self._validate(pandas_obj)
         self._obj = pandas_obj
 
     @staticmethod
     def _validate(obj):
+        """Validate if the object has all the required fields.
+
+        Args:
+            obj: pandas.Series: The object to be validated.
+
+        Raises:
+            AttributeError: If the object is missing any of the required fields.
+        """
         required_fields = ["path", "subject", "date", "number"]
         missing_fields = []
         for req_field in required_fields:
@@ -28,14 +44,34 @@ class SeriesPipelineAcessor:
             )
 
     def subject(self):
+        """Return the subject of the object as a string."""
         return str(self._obj.subject)
 
     def number(self, zfill=3):
+        """Return a string representation of the number attribute of the object,
+        optionally zero-filled to a specified length.
+
+            Args:
+                zfill (int): The length to which the number should be zero-filled. Default is 3.
+
+            Returns:
+                str: A string representation of the number attribute, zero-filled if specified.
+        """
         number = str(self._obj.number) if self._obj.number is not None else ""
         number = number if zfill is None or number == "" else number.zfill(zfill)
         return number
 
     def alias(self, separator="_", zfill=3, date_format=None):
+        """Generate an alias based on the subject, date, and number.
+
+        Args:
+            separator (str): The separator to use between the subject, date, and number. Default is "_".
+            zfill (int): The zero padding for the number. Default is 3.
+            date_format (str): The format of the date. If None, the default format is used.
+
+        Returns:
+            str: The generated alias.
+        """
         subject = self.subject()
         date = self.date(date_format)
         number = self.number(zfill)
@@ -43,6 +79,14 @@ class SeriesPipelineAcessor:
         return subject + separator + date + ((separator + number) if number else "")
 
     def date(self, format=None):
+        """Return the date in the specified format if provided, otherwise return the date as a string.
+
+        Args:
+            format (str, optional): The format in which the date should be returned. Defaults to None.
+
+        Returns:
+            str: The date in the specified format or as a string.
+        """
         if format:
             return self._obj.date.strftime(format)
         return str(self._obj.date)
@@ -58,11 +102,27 @@ except AttributeError:
 @register_dataframe_accessor("pipeline")
 class DataFramePipelineAcessor:
     def __init__(self, pandas_obj) -> None:
+        """Initialize the object with a pandas DataFrame or Series.
+
+        Args:
+            pandas_obj: A pandas DataFrame or Series to be validated and stored.
+
+        Returns:
+            None
+        """
         self._validate(pandas_obj)
         self._obj = pandas_obj
 
     @staticmethod
     def _validate(obj):
+        """Validate if the input object has all the required columns.
+
+        Args:
+            obj: pandas DataFrame or Series. The object to be validated.
+
+        Raises:
+            AttributeError: If the input object is missing any of the required columns.
+        """
         required_columns = ["path", "subject", "date", "number"]
         missing_columns = []
         for req_col in required_columns:
diff --git a/src/pypelines/arguments.py b/src/pypelines/arguments.py
index d6c2fa8f77b3fdc6f1a654e3d1363fbe64d6676c..8906ed444721ee8b495da777a3625e9f0184aa05 100644
--- a/src/pypelines/arguments.py
+++ b/src/pypelines/arguments.py
@@ -25,6 +25,19 @@ def read_json_file(json_file: str):
 
 
 def read_session_arguments_file(session, step, file_suffix="_arguments.json"):
+    """Reads the arguments file for a specific session and step.
+
+    Args:
+        session: The session object containing the path information.
+        step: The step object for which the arguments file needs to be read.
+        file_suffix: The suffix to be appended to the arguments file name (default is "_arguments.json").
+
+    Returns:
+        The contents of the arguments file as a dictionary.
+
+    Raises:
+        FileNotFoundError: If the arguments file for the specified session and step is not found.
+    """
     file_name = step.pipeline.pipeline_name + file_suffix
     try:
         path = os.path.join(session.path, file_name)
@@ -46,7 +59,18 @@ def autoload_arguments(wrapped_function, step):
     """
 
     @wraps(wrapped_function)
-    def wraper(session, *args, **kwargs):
+    def wrapper(session, *args, **kwargs):
+        """Wrapper function that automatically loads arguments from pipelines_arguments.json
+        and overrides them with current call arguments.
+
+        Args:
+            session: The session object.
+            *args: Variable length argument list.
+            **kwargs: Arbitrary keyword arguments.
+
+        Returns:
+            The result of the wrapped function with updated arguments.
+        """
         local_log = getLogger("autoload_arguments")
 
         config_kwargs = get_step_arguments(session, step)
@@ -70,10 +94,24 @@ def autoload_arguments(wrapped_function, step):
         config_kwargs.update(kwargs)
         return wrapped_function(session, *args, **config_kwargs)
 
-    return wraper
+    return wrapper
 
 
 def get_step_arguments(session, step):
+    """Get the arguments for a specific step from the session's arguments file.
+
+    Args:
+        session (str): The name of the session.
+        step (Step): The step object for which arguments need to be retrieved.
+
+    Returns:
+        dict: The arguments for the specified step.
+
+    Raises:
+        FileNotFoundError: If the session arguments file is not found.
+        KeyError: If the 'functions' key or the key corresponding to step.relative_name
+            is not found in the arguments file.
+    """
     local_log = getLogger("autoload_arguments")
 
     try:
diff --git a/src/pypelines/celery_tasks.py b/src/pypelines/celery_tasks.py
index ae6128f4dad281fb54a149f977ea23c50d54a749..39fc47fc66d6889a679d22512f2ba23c9d16ae92 100644
--- a/src/pypelines/celery_tasks.py
+++ b/src/pypelines/celery_tasks.py
@@ -25,11 +25,31 @@ class CeleryAlyxTaskManager(BaseStepTaskManager):
     step: "BaseStep"
 
     def register_step(self):
+        """Register a step in the backend.
+
+        This method registers a task in the backend using the runner obtained from get_runner() method.
+
+        Returns:
+            None
+        """
         if self.backend:
             # self.backend.app.task(CeleryRunner, name=self.step.complete_name)
             self.backend.app.register_task(self.get_runner())
 
     def start(self, session, extra=None, **kwargs):
+        """Starts a task on a celery cluster.
+
+        Args:
+            session: The session to use for the task.
+            extra: Extra information to pass to the task (default is None).
+            **kwargs: Additional keyword arguments to pass to the task.
+
+        Raises:
+            NotImplementedError: If the pipeline does not have a working celery backend.
+
+        Returns:
+            The created CeleryTaskRecord.
+        """
 
         if not self.backend:
             raise NotImplementedError(
@@ -39,6 +59,17 @@ class CeleryAlyxTaskManager(BaseStepTaskManager):
         return CeleryTaskRecord.create(self, session, extra, **kwargs)
 
     def get_runner(superself):  # type: ignore
+        """Return a CeleryRunner task for executing a step in a pipeline.
+
+        Args:
+            superself: The parent object that contains the step information.
+
+        Returns:
+            CeleryRunner: A Celery Task object that runs the specified step.
+
+        Raises:
+            Any exceptions that occur during the execution of the task.
+        """
         from celery import Task
 
         class CeleryRunner(Task):
@@ -87,6 +118,15 @@ class CeleryTaskRecord(dict):
 
     # a class to make dictionnary keys accessible with attribute syntax
     def __init__(self, task_id, task_infos_dict={}, response_handle=None, session=None):
+        """Initialize the Task object.
+
+        Args:
+            task_id (str): The unique identifier for the task.
+            task_infos_dict (dict, optional): A dictionary containing information about the task.
+                If not provided, it will be fetched using the task_id. Defaults to {}.
+            response_handle (Any, optional): A handle for the response. Defaults to None.
+            session (Any, optional): A session object. Defaults to None.
+        """
 
         if not task_infos_dict:
             from one import ONE
@@ -99,6 +139,14 @@ class CeleryTaskRecord(dict):
         self.response = response_handle
 
     def status_from_logs(self, log_object):
+        """Update the status based on the content of the log file.
+
+        Args:
+            log_object: Log object containing the full path to the log file.
+
+        Returns:
+            None
+        """
         with open(log_object.fullpath, "r") as f:
             content = f.read()
 
@@ -116,12 +164,25 @@ class CeleryTaskRecord(dict):
         self["status"] = status
 
     def partial_update(self):
+        """Partially updates a task using the ONE API.
+
+        This function connects to the ONE database in remote mode and performs a partial update on a task
+        using the export data from the current instance.
+
+        Returns:
+            None
+        """
         from one import ONE
 
         connector = ONE(mode="remote", data_access_mode="remote")
         connector.alyx.rest("tasks", "partial_update", **self.export())
 
     def get_session(self):
+        """Retrieve the session object associated with the current instance.
+
+        Returns:
+            The session object.
+        """
         if self.session is None:
             from one import ONE
 
@@ -132,6 +193,14 @@ class CeleryTaskRecord(dict):
         return self.session
 
     def get_application(self):
+        """Return the application associated with the executable stored in the instance.
+
+        Returns:
+            str: The application associated with the executable.
+
+        Raises:
+            KeyError: If the application associated with the executable is not found in the APPLICATIONS_STORE.
+        """
         try:
             return APPLICATIONS_STORE[self["executable"]]
         except KeyError:
@@ -139,18 +208,30 @@ class CeleryTaskRecord(dict):
 
     @property
     def pipeline_name(self):
+        """Return the name of the pipeline by splitting the name attribute at '.' and returning the first part."""
         return self["name"].split(".")[0]
 
     @property
     def pipe_name(self):
+        """Return the name of the pipe by splitting the name attribute using '.' and returning the second element.
+
+        Returns:
+            str: The name of the pipe.
+        """
         return self["name"].split(".")[1]
 
     @property
     def step_name(self):
+        """Return the third element after splitting the 'name' attribute of the object with '.'."""
         return self["name"].split(".")[2]
 
     @property
     def arguments(self):
+        """Retrieve and filter arguments for the current step.
+
+        Returns:
+            dict: Filtered arguments for the current step.
+        """
         # once step arguments control will be done via file, these should take prio over the main step ran's file args
         args = self.get("arguments", {})
         args = args if args else {}
@@ -163,6 +244,16 @@ class CeleryTaskRecord(dict):
 
     @property
     def management_arguments(self):
+        """Returns a dictionary of management arguments based on the default values and any provided arguments.
+
+        Returns:
+            dict: A dictionary containing the management arguments with keys:
+                - "skip": A boolean indicating whether to skip management.
+                - "refresh": A boolean indicating whether to refresh.
+                - "refresh_requirements": A boolean indicating whether to refresh requirements.
+                - "check_requirements": A boolean indicating whether to check requirements.
+                - "save_output": A boolean indicating whether to save output.
+        """
         default_management_args = {
             "skip": True,
             "refresh": False,
@@ -182,17 +273,35 @@ class CeleryTaskRecord(dict):
 
     @property
     def session_path(self) -> str:
+        """Returns the path of the session."""
         return self.session["path"]
 
     @property
     def task_id(self):
+        """Return the task ID."""
         return self["id"]
 
     def export(self):
+        """Export the object as a dictionary with specific keys removed.
+
+        Returns:
+            dict: A dictionary containing the object's id and data with certain keys removed.
+        """
         return {"id": self["id"], "data": {k: v for k, v in self.items() if k not in ["id", "session_path"]}}
 
     @staticmethod
     def create(task_manager: CeleryAlyxTaskManager, session, extra=None, **kwargs):
+        """Creates a new task using the given CeleryAlyxTaskManager and session.
+
+        Args:
+            task_manager (CeleryAlyxTaskManager): The CeleryAlyxTaskManager instance to use.
+            session: The session to associate with the task.
+            extra (optional): Any extra information to include in the task.
+            **kwargs: Additional keyword arguments to pass to the task.
+
+        Returns:
+            CeleryTaskRecord: A CeleryTaskRecord object representing the created task.
+        """
         from one import ONE
 
         connector = ONE(mode="remote", data_access_mode="remote")
@@ -216,6 +325,19 @@ class CeleryTaskRecord(dict):
 
     @staticmethod
     def create_from_task_name(app: "Celery", task_name: str, pipeline_name: str, session, extra=None, **kwargs):
+        """Create a new task from the given task name and pipeline name.
+
+        Args:
+            app (Celery): The Celery application.
+            task_name (str): The name of the task to be created.
+            pipeline_name (str): The name of the pipeline.
+            session: The session object.
+            extra (optional): Extra information for the task.
+            **kwargs: Additional keyword arguments.
+
+        Returns:
+            CeleryTaskRecord: A record of the created Celery task.
+        """
         from one import ONE
 
         connector = ONE(mode="remote", data_access_mode="remote")
@@ -240,6 +362,21 @@ class CeleryTaskRecord(dict):
     def create_from_model(
         app: "Celery", task_model: type, task_name: str, pipeline_name: str, session: object, extra=None, **kwargs
     ):
+        """Create a new task from a given task model and send it to a Celery app.
+
+        Args:
+            app (Celery): The Celery app instance.
+            task_model (type): The task model class to create a new task instance.
+            task_name (str): The name of the task.
+            pipeline_name (str): The name of the pipeline.
+            session (object): The session object.
+            extra (optional): Extra information to pass to the task.
+            **kwargs: Additional keyword arguments to pass to the task.
+
+        Returns:
+            CeleryTaskRecord: A record of the created task with task ID, task information dictionary,
+                response handle, and session.
+        """
 
         new_task = task_model(name=task_name, session=session, arguments=kwargs, status=25, executable=pipeline_name)
         new_task.save()
@@ -259,6 +396,17 @@ class CeleryTaskBackend(BaseTaskBackend):
     task_manager_class = CeleryAlyxTaskManager
 
     def __init__(self, parent: Pipeline, app: "Celery | None" = None):
+        """Initialize the PipelineApp object.
+
+        Args:
+            parent (Pipeline): The parent Pipeline object.
+            app (str): The Celery app associated with the Pipeline, or None if not provided.
+
+        Attributes:
+            parent (Pipeline): The parent Pipeline object.
+            success (bool): Flag indicating if the initialization was successful.
+            app (str): The Celery app associated with the Pipeline.
+        """
         super().__init__(parent)
         self.parent = parent
 
@@ -271,9 +419,18 @@ class CeleryTaskBackend(BaseTaskBackend):
             self.app.pipelines = pipelines
 
     def start(self):
+        """Starts the application."""
         self.app.start()
 
     def create_task_manager(self, step):
+        """Create a task manager for the given step.
+
+        Args:
+            step: The step to be associated with the task manager.
+
+        Returns:
+            Task manager object initialized with the given step.
+        """
         task_manager = self.task_manager_class(step, self)
         task_manager.register_step()
         return task_manager
@@ -285,6 +442,13 @@ class CeleryPipeline(Pipeline):
 
 class LogTask:
     def __init__(self, task_record: CeleryTaskRecord, username=None, level="LOAD"):
+        """Initialize the TaskLogger object.
+
+        Args:
+            task_record (CeleryTaskRecord): The Celery task record.
+            username (str, optional): The username associated with the task. Defaults to None.
+            level (str, optional): The logging level for the task. Defaults to "LOAD".
+        """
         self.path = Path(task_record.session_path) / "logs"
         self.username = username if username is not None else (node() if node() else "unknown")
         self.worker_pk = task_record.task_id
@@ -292,15 +456,36 @@ class LogTask:
         self.level = getattr(logging, level.upper())
 
     def __enter__(self):
+        """Perform necessary setup tasks when entering a context manager.
+
+        Returns:
+            self: The current instance of the context manager.
+        """
         self.path.mkdir(exist_ok=True)
         self.logger = getLogger()
         self.set_handler()
         return self
 
     def __exit__(self, exc_type, exc_val, exc_tb):
+        """Clean up resources when exiting a context manager.
+
+        Args:
+            exc_type: The type of the exception that caused the exit, or None if no exception occurred.
+            exc_val: The exception instance that caused the exit, or None if no exception occurred.
+            exc_tb: The traceback of the exception that caused the exit, or None if no exception occurred.
+        """
         self.remove_handler()
 
     def set_handler(self):
+        """Set up logging handler for the current task.
+
+        This method sets up a logging handler for the current task by creating a log file
+        with a specific filename based on task details.
+        It then configures the file handler with appropriate formatters and filters for colored logging.
+
+        Returns:
+            None
+        """
         self.filename = f"task_log.{self.task_name}.{self.worker_pk}.log"
         self.fullpath = self.path / self.filename
         fh = logging.FileHandler(self.fullpath)
@@ -329,10 +514,22 @@ class LogTask:
         self.logger.addHandler(fh)
 
     def remove_handler(self):
+        """Removes the last handler from the logger."""
         self.logger.removeHandler(self.logger.handlers[-1])
 
 
 def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery | None":
+    """Create a Celery app with the given configuration.
+
+    Args:
+        conf_path (str): The path to the configuration file.
+        app_name (str): The name of the Celery app. Default is "pypelines".
+        v_host (str): The virtual host for the Celery app.
+
+    Returns:
+        Celery | None: The created Celery app instance or None if creation failed.
+
+    """
 
     failure_message = (
         f"Celery app : {app_name} failed to be created."
@@ -355,6 +552,14 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
     from types import MethodType
 
     def get_setting_files_path(conf_path) -> List[Path]:
+        """Get the paths of setting files for the given configuration path.
+
+        Args:
+            conf_path (str): The path to the configuration file.
+
+        Returns:
+            List[Path]: A list of Path objects representing the setting files found.
+        """
         conf_path = Path(conf_path)
         if conf_path.is_file():
             conf_path = conf_path.parent
@@ -366,12 +571,28 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
         return files
 
     def get_signature_as_string(signature):
+        """Return the function signature as a string without the 'session' parameter.
+
+        Args:
+            signature: The signature of the function.
+
+        Returns:
+            str: The function signature as a string without the 'session' parameter.
+        """
         params = [
             param_value for param_name, param_value in signature.parameters.items() if param_name not in ["session"]
         ]
         return str(signature.replace(parameters=params))[1:-1].replace(" *,", "")
 
     def get_type_name(annotation):
+        """Returns the name of the type hint for a given annotation.
+
+        Args:
+            annotation: The annotation for which to determine the type name.
+
+        Returns:
+            str: The name of the type hint.
+        """
         from inspect import Parameter
         from typing import get_args, get_origin
         from types import UnionType
@@ -394,6 +615,17 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
         return "__unknown__"
 
     def string_to_typehint(string_hint, globalns=None, localns=None):
+        """Converts a string type hint to a valid type hint object.
+
+        Args:
+            string_hint (str): The string representation of the type hint.
+            globalns (dict, optional): Global namespace dictionary. Defaults to None.
+            localns (dict, optional): Local namespace dictionary. Defaults to None.
+
+        Returns:
+            type: The type hint object corresponding to the input string hint,
+                or "__unknown__" if the type hint is not valid.
+        """
         from typing import ForwardRef, _eval_type
 
         try:
@@ -402,6 +634,18 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
             return "__unknown__"
 
     def get_signature_as_dict(signature):
+        """Return a dictionary containing information about the parameters of a function signature.
+
+        Args:
+            signature: A function signature object.
+
+        Returns:
+            dict: A dictionary where keys are parameter names and values
+                are dictionaries containing the following information:
+                - "typehint": The type hint of the parameter.
+                - "default_value": The default value of the parameter (or "__empty__" if no default value is specified).
+                - "kind": The kind of the parameter (e.g., POSITIONAL_ONLY, KEYWORD_ONLY, etc.).
+        """
         from inspect import Parameter
 
         parameters = signature.parameters
@@ -426,6 +670,15 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
         name = f"{app_name}.tasks_infos"
 
         def run(self, app_name, selfish=False):
+            """Run the specified app to gather tasks information.
+
+            Args:
+                app_name (str): The name of the app to run.
+                selfish (bool, optional): Flag to indicate whether to include selfish tasks. Defaults to False.
+
+            Returns:
+                dict: A dictionary containing tasks information for the specified app.
+            """
             app = APPLICATIONS_STORE[app_name]
             tasks_dynamic_data = {}
             pipelines = getattr(app, "pipelines", {})
@@ -457,6 +710,11 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
             return tasks_dynamic_data
 
     def get_remote_tasks(self):
+        """Retrieve information about remote tasks.
+
+        Returns:
+            dict: A dictionary containing information about remote tasks, including workers and task names.
+        """
         registered_tasks = self.control.inspect().registered_tasks()
         workers = []
         task_names = []
@@ -471,6 +729,19 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
     def get_celery_app_tasks(
         self, refresh=False, auto_refresh=3600 * 24, failed_refresh=60 * 5, initial_timeout=10, refresh_timeout=2
     ):
+        """Get the celery app tasks data with optional refresh mechanism.
+
+        Args:
+            refresh (bool): Flag to force refresh the tasks data. Default is False.
+            auto_refresh (int): Time interval in seconds for auto refresh. Default is 3600 seconds (1 hour).
+            failed_refresh (int): Time interval in seconds for retrying refresh after failure.
+                Default is 300 seconds (5 minutes).
+            initial_timeout (int): Timeout in seconds for initial task data retrieval. Default is 10 seconds.
+            refresh_timeout (int): Timeout in seconds for refreshing task data. Default is 2 seconds.
+
+        Returns:
+            dict: The task data of the celery app if available, otherwise None.
+        """
 
         from datetime import datetime, timedelta
 
@@ -520,6 +791,18 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
         return app_task_data["task_data"] if app_task_data is not None else None
 
     def launch_named_task_remotely(self, session_id, task_name, task_model=None, extra=None, kwargs={}):
+        """Launches a named task remotely.
+
+        Args:
+            session_id (str): The session ID for the task.
+            task_name (str): The name of the task to be launched.
+            task_model (object, optional): The task model object. Defaults to None.
+            extra (dict, optional): Extra data to be passed to the task. Defaults to None.
+            kwargs (dict, optional): Additional keyword arguments to be passed to the task. Defaults to {}.
+
+        Returns:
+            CeleryTaskRecord: The task record created for the launched task.
+        """
 
         if task_model is None:
             task_record = CeleryTaskRecord.create_from_task_name(
@@ -533,6 +816,11 @@ def create_celery_app(conf_path, app_name="pypelines", v_host=None) -> "Celery |
         return task_record
 
     def is_hand_shaken(self):
+        """Check if a handshake is successful.
+
+        Returns:
+            bool: True if handshake is successful, False otherwise.
+        """
         try:
             result = self.tasks[f"{app_name}.handshake"].delay().get(timeout=1)
             logger.warning(f"Handshake result : {result}")
diff --git a/src/pypelines/disk.py b/src/pypelines/disk.py
index cd534f16539a50c668c7f0cfa15637f62cf0d63e..8b73219171c00437a873c7eeb9b95c879f569f81 100644
--- a/src/pypelines/disk.py
+++ b/src/pypelines/disk.py
@@ -28,6 +28,22 @@ class BaseDiskObject(metaclass=ABCMeta):
     extra: str
 
     def __init__(self, session: Session, step: "BaseStep", extra="") -> None:
+        """Initialize the ShortLivedObject with the given session, step, and optional extra data.
+
+        Args:
+            session (Session): The session object to use.
+            step (BaseStep): The step object to use.
+            extra (str, optional): Extra data to include. Defaults to "".
+
+        Returns:
+            None
+
+        Notes:
+            This object is meant to be short lived. Created, check drive,
+            and quickly take action by saving or loading file according to the procedures defined.
+            The behavior is not meant to be edited after the init so that's why the methods
+            don't take arguments, at the exception of the save method which takes data to save as input.
+        """
         # this object is meant to be short lived. Created, check drive,
         # and quickly take action by saving or loading file according to the procedures defined.
         # The behaviour is not meant to be edited after the init so that's why the methods
@@ -41,14 +57,21 @@ class BaseDiskObject(metaclass=ABCMeta):
 
     @property
     def object_name(self):
+        """Return the full name of the object."""
         return f"{self.step.relative_name}{'.' + self.extra if self.extra else ''}"
 
     @abstractmethod
     def version_deprecated(self) -> bool:
+        """Returns a boolean value indicating whether the version is deprecated."""
         return False
 
     @abstractmethod
     def step_level_too_low(self) -> bool:
+        """Check if the step level is too low.
+
+        Returns:
+            bool: True if the step level is too low, False otherwise.
+        """
         return False
 
     @abstractmethod
@@ -78,6 +101,15 @@ class BaseDiskObject(metaclass=ABCMeta):
 
     @staticmethod
     def multisession_packer(sessions, session_result_dict: dict) -> dict:
+        """Packs the results of multiple sessions into a dictionary with session u_alias as keys.
+
+        Args:
+            sessions: DataFrame containing session information.
+            session_result_dict: Dictionary containing session results with session id as keys.
+
+        Returns:
+            Dictionary with session u_alias as keys and corresponding results as values.
+        """
         session_result_dict = {
             sessions.loc[key].u_alias: value for key, value in session_result_dict.items()
         }  # replace indices from session id with session u_alias
@@ -86,6 +118,15 @@ class BaseDiskObject(metaclass=ABCMeta):
 
     @staticmethod
     def multisession_unpacker(sessions, datas):
+        """Unpacks data from multiple sessions.
+
+        Args:
+            sessions (list): A list of session identifiers.
+            datas (list): A list of data corresponding to each session.
+
+        Raises:
+            NotImplementedError: This function is not implemented yet.
+        """
         raise NotImplementedError
 
     def disk_step_instance(self) -> "BaseStep | None":
@@ -95,17 +136,38 @@ class BaseDiskObject(metaclass=ABCMeta):
         return None
 
     def is_matching(self):
+        """Check if the object is matching the required criteria.
+
+        Returns:
+            bool: True if the object is matching, False otherwise.
+        """
         if self.is_loadable() and not (self.version_deprecated() or self.step_level_too_low()):
             return True
         return False
 
     def is_loadable(self) -> bool:
+        """Check if the object is loadable.
+
+        Returns:
+            bool: True if the object is loadable, False otherwise.
+        """
         return self.loadable
 
     def get_found_disk_object_description(self) -> str:
+        """Return the description of the found disk object.
+
+        Returns:
+            str: The description of the found disk object.
+        """
         return ""
 
     def get_status_message(self):
+        """Return a status message for the object.
+
+        Returns:
+            str: A message describing the status of the object, including loadability, deprecation, step level,
+                and found disk object description.
+        """
         loadable_disk_message = "A disk object is loadable. " if self.is_loadable() else ""
         deprecated_disk_message = (
             f"This object's version is {'deprecated' if self.version_deprecated() else 'the current one'}. "
@@ -134,19 +196,50 @@ class BaseDiskObject(metaclass=ABCMeta):
 
 class NullDiskObject(BaseDiskObject):
     def version_deprecated(self) -> bool:
+        """Indicates that the version of the function is deprecated.
+
+        Returns:
+            bool: True if the version is deprecated, False otherwise.
+        """
         return True
 
     def step_level_too_low(self) -> bool:
+        """Check if the step level is too low.
+
+        Returns:
+            bool: True if the step level is too low, False otherwise.
+        """
         return True
 
     def check_disk(self) -> bool:
+        """Check the disk status.
+
+        Returns:
+            bool: True if disk is healthy, False otherwise.
+        """
         return False
 
     def save(self, data: OutputData) -> None:
+        """Save the output data to disk.
+
+        Args:
+            data (OutputData): The output data to be saved.
+
+        Returns:
+            None
+        """
         # data is not saved to disk
         pass
 
     def load(self) -> OutputData:
+        """Load the output data.
+
+        Returns:
+            OutputData: The output data object.
+
+        Raises:
+            NotImplementedError: This should never be called as check_disk always returns False.
+        """
         # this should never be called as check_disk always return False
         raise NotImplementedError
 
@@ -156,6 +249,16 @@ _CACHE_STORAGE = {}  # this cache variable is cross instances
 
 class CachedDiskObject(BaseDiskObject):
     def __init__(self, session: Session, step: "BaseStep", extra="") -> None:
+        """Initialize the BaseStepLoader.
+
+        Args:
+            session (Session): The session object.
+            step (BaseStep): The BaseStep object.
+            extra (str, optional): Extra information. Defaults to "".
+
+        Returns:
+            None
+        """
         self.session = session
         self.step = step
         self.extra = extra
@@ -163,6 +266,11 @@ class CachedDiskObject(BaseDiskObject):
         self.loadable = self.check_disk()
 
     def get_cached_storage(self):
+        """Return cached storage for the current step, session, and extra data.
+
+        Returns:
+            dict: A dictionary containing the cached storage for the current step, session, and extra data.
+        """
         if self.step.pipe not in self.storage:
             self.storage[self.step.pipe] = {}
 
@@ -177,9 +285,18 @@ class CachedDiskObject(BaseDiskObject):
         return stored_dict
 
     def load(self):
+        """Load the content from the cached storage."""
         return self.get_cached_storage()["content"]
 
     def save(self, data):
+        """Save the data into the storage dictionary.
+
+        Args:
+            data: The data to be saved.
+
+        Returns:
+            dict: A dictionary containing the version, content, and step name of the saved data.
+        """
         stored_dict = {
             "version": self.step.version,
             "content": data,
@@ -189,6 +306,7 @@ class CachedDiskObject(BaseDiskObject):
         return stored_dict
 
     def check_disk(self):
+        """Check the disk status and return True if the disk content is not None, otherwise return False."""
         stored_cache = self.get_cached_storage()
         self.disk_version = stored_cache["version"]
         self.disk_step = stored_cache["step"]
@@ -199,11 +317,21 @@ class CachedDiskObject(BaseDiskObject):
         return True
 
     def version_deprecated(self):
+        """Check if the version is deprecated.
+
+        Returns:
+            bool: True if the version is deprecated, False otherwise.
+        """
         if self.step.version != self.disk_version:
             return True
         return False
 
     def step_level_too_low(self) -> bool:
+        """Check if the level of the disk step is lower than the current step.
+
+        Returns:
+            bool: True if the level of the disk step is lower than the current step, False otherwise.
+        """
         # we get the step instance that corresponds to the one on the disk
         disk_step = self.disk_step_instance()
 
@@ -214,5 +342,6 @@ class CachedDiskObject(BaseDiskObject):
         return False
 
     def clear_cache(self):
+        """Clears the cache by removing all items stored in the cache."""
         for pipe in list(self.storage.keys()):
             self.storage.pop(pipe)
diff --git a/src/pypelines/examples.py b/src/pypelines/examples.py
index f762fcd1057d24af9ff5c61d4386df13011ccbb3..e7604a2d052e42fb675df060037408fdbe1400ac 100644
--- a/src/pypelines/examples.py
+++ b/src/pypelines/examples.py
@@ -11,6 +11,18 @@ class treated_videos(PicklePipe):
 
     @stepmethod()
     def compress(self, session, video_codec="ffmpeg", extra="", compression_rate=0.5):
+        """Compresses a video using the specified video codec and compression rate.
+
+        Args:
+            session: The session to compress the video.
+            video_codec (str): The video codec to use for compression. Default is "ffmpeg".
+            extra (str): Any extra information for compression. Default is an empty string.
+            compression_rate (float): The compression rate to apply to the video. Default is 0.5.
+
+        Returns:
+            dict: A dictionary containing the compressed video information including pixels, video_codec,
+                and compression_rate.
+        """
         return {
             "pixels": [1, 2, 5, 7, 18, 8, 9, 8, 21],
             "video_codec": video_codec,
@@ -22,6 +34,15 @@ class treated_videos(PicklePipe):
 class modified_videos(PicklePipe):
     @stepmethod(requires="local_features.templates_new_locations", version="1")
     def draw_templates(self, session, extra=""):
+        """Draws templates on the video.
+
+        Args:
+            session: The session to load the data from.
+            extra: Additional information to specify the data to load (default is "").
+
+        Returns:
+            A dictionary containing the processed video with templates drawn on it.
+        """
         video = self.pipeline.treated_videos.compress.load(session, extra)["pixels"]
         templates = self.pipeline.local_features.templates_new_locations.load(session, extra)
         video = video + templates["processed_data"]
@@ -29,6 +50,16 @@ class modified_videos(PicklePipe):
 
     @stepmethod(requires=["modified_videos.draw_templates", "background_features.detect_buildings"], version="1")
     def draw_godzilla(self, session, roar="grrrr", extra=""):
+        """Draws a Godzilla with a caption and optional extra information.
+
+        Args:
+            session: The session to use for drawing.
+            roar (str): The sound Godzilla makes (default is "grrrr").
+            extra (str): Any extra information to include in the drawing (default is "").
+
+        Returns:
+            dict: A dictionary representing the drawn Godzilla with the caption and extra information.
+        """
         obj = self.object()
         obj["caption"] = roar
         return obj
@@ -38,22 +69,65 @@ class modified_videos(PicklePipe):
 class background_features(PicklePipe):
     @stepmethod(version="1", requires="background_features.enhanced_background")
     def blobs(self, session, argument1, extra="", optionnal_argument2="5"):
+        """Return a blob object with the specified arguments.
+
+        Args:
+            session: The session object.
+            argument1: The first argument.
+            extra (str, optional): Extra information. Defaults to "".
+            optionnal_argument2 (str, optional): Another optional argument. Defaults to "5".
+
+        Returns:
+            dict: A blob object with the specified arguments.
+        """
         obj = self.object()
         obj["optionnal_argument2"] = optionnal_argument2
         return obj
 
     @stepmethod(requires="treated_videos.compress", version="2")
     def enhanced_background(self, session, extra="", clahe_object=None):
+        """Return a dictionary containing the 'clahe_object' parameter.
+
+        Args:
+            session: The session object.
+            extra (str): An optional extra parameter (default is "").
+            clahe_object: An optional CLAHE object (default is None).
+
+        Returns:
+            dict: A dictionary containing the 'clahe_object' parameter.
+        """
         return {"clahe_object": clahe_object}
 
     @stepmethod(requires="treated_videos.compress", version="3")
     def scale_spaces(self, session, scales="0", extra=""):
+        """Scale the spaces based on the provided scales and extra parameters.
+
+        Args:
+            session: The session object.
+            scales (str): The scales to be applied (default is "0").
+            extra (str): Any extra parameters to be considered (default is "").
+
+        Returns:
+            str: The result of scaling the spaces.
+        """
         # obj = self.object()
         # obj.update({"scales" : scales, "argument2" : "i"})
         return "testouillet"  # obj
 
     @stepmethod(requires="treated_videos.compress", version="3")
     def detect_buildings(self, session, scales, extra=""):
+        """Detect buildings using the specified session and scales.
+
+        Args:
+            session: The session to use for detecting buildings.
+            scales: The scales to be used for detection.
+            extra: Additional information (default is an empty string).
+
+        Returns:
+            A dictionary containing the scales and an example argument.
+
+
+        """
         obj = self.object()
         return {"scales": scales, "argument2": "i"}
 
@@ -62,10 +136,31 @@ class background_features(PicklePipe):
 class local_features(PicklePipe):
     @stepmethod(version="1", requires="background_features.scale_spaces")
     def template_matches(self, session, argument1=1, extra="", optionnal_argument2="1"):
+        """Return a dictionary with the values of argument1 and optionnal_argument2.
+
+        Args:
+            session: The session object.
+            argument1: An integer representing the first argument (default is 1).
+            extra: An optional string argument (default is an empty string).
+            optionnal_argument2: An optional string argument (default is "1").
+
+        Returns:
+            A dictionary with the values of argument1 and optionnal_argument2.
+        """
         return {"argument1": argument1, "optionnal_argument2": optionnal_argument2}
 
     @stepmethod(requires=["local_features.template_matches", "background_features.blobs"], version="2")
     def templates_new_locations(self, session, new_locations, extra=""):
+        """Update the object with new locations and processed data.
+
+        Args:
+            session: The session object.
+            new_locations: A list of new locations to be added.
+            extra: An optional extra parameter (default is an empty string).
+
+        Returns:
+            The updated object with new locations and processed data.
+        """
         obj = self.object()  # get previous object version from disk
         obj.update({"new_locations": new_locations, "processed_data": [int(loc) * int(loc) for loc in new_locations]})
         return obj
diff --git a/src/pypelines/graphs.py b/src/pypelines/graphs.py
index 779b57d2fe657b227b2b18542a0f942acf24e3d4..ea74af2de39916e2f29b31bde35dc08aa3898aef 100644
--- a/src/pypelines/graphs.py
+++ b/src/pypelines/graphs.py
@@ -1,6 +1,3 @@
-import numpy as np
-import matplotlib.pyplot as plt
-
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
@@ -12,6 +9,14 @@ class PipelineGraph:
     name_graph: "DiGraph"
 
     def __init__(self, pipeline):
+        """Initialize the PipelineVisualizer object.
+
+        Args:
+            pipeline: The pipeline object to visualize.
+
+        Returns:
+            None
+        """
         from networkx import DiGraph, draw, spring_layout, draw_networkx_labels
 
         self.pipeline = pipeline
@@ -25,6 +30,15 @@ class PipelineGraph:
         self.make_graphs()
 
     def make_graphs(self):
+        """Generates two directed graphs based on the pipeline steps.
+
+        This method creates two directed graphs: callable_graph and display_graph.
+        The callable_graph represents the pipeline steps and their dependencies.
+        The display_graph represents the pipeline steps with their relative names.
+
+        Returns:
+            None
+        """
 
         callable_graph = self.DiGraph()
         display_graph = self.DiGraph()
@@ -52,7 +66,26 @@ class PipelineGraph:
         node_color="orange",
         **kwargs,
     ):
+        """Draws a requirement graph using NetworkX and Matplotlib.
+
+        Args:
+            font_size (int): Font size for node labels (default is 7).
+            layout (str): Layout type for the graph, either "aligned" or "spring" (default is "aligned").
+            ax (matplotlib.axes.Axes): Matplotlib axes to draw the graph on (default is None).
+            figsize (tuple): Figure size for the plot (default is (12, 7)).
+            line_return (bool): Whether to include line return in node labels (default is True).
+            remove_pipe (bool): Whether to remove pipe characters from node labels (default is True).
+            rotation (int): Rotation angle for node labels (default is 18).
+            max_spacing (float): Maximum spacing between nodes (default is 0.28).
+            node_color (str): Color for the nodes (default is "orange").
+            **kwargs: Additional keyword arguments to be passed to NetworkX drawing functions.
+
+        Returns:
+            matplotlib.axes.Axes: The matplotlib axes containing the drawn graph.
+        """
         if ax is None:
+            import matplotlib.pyplot as plt
+
             _, ax = plt.subplots(figsize=figsize)
         if layout == "aligned":
             pos = self.get_aligned_layout()
@@ -74,6 +107,17 @@ class PipelineGraph:
         return ax
 
     def draw_columns_labels(self, pos, ax, font_size=7, rotation=30):
+        """Draw column labels on the plot.
+
+        Args:
+            pos (dict): A dictionary containing the positions of the columns.
+            ax (matplotlib.axes.Axes): The axes object on which to draw the labels.
+            font_size (int, optional): The font size of the labels. Defaults to 7.
+            rotation (int, optional): The rotation angle of the labels in degrees. Defaults to 30.
+
+        Returns:
+            None
+        """
         unique_pos = {}
         for key, value in pos.items():
             column = key.split(".")[0]
@@ -88,6 +132,16 @@ class PipelineGraph:
             ax.axvline(x, ymin=0.1, ymax=0.85, zorder=-1, lw=0.5, color="gray")
 
     def get_labels(self, line_return=True, remove_pipe=True):
+        """Return formatted labels for nodes in the graph.
+
+        Args:
+            line_return (bool): Whether to replace '.' with '\n' in the formatted name. Default is True.
+            remove_pipe (bool): Whether to remove everything before the first '.' in the formatted name.
+                Default is True.
+
+        Returns:
+            dict: A dictionary containing node names as keys and their formatted names as values.
+        """
         labels = {}
         for node_name in self.name_graph.nodes:
             formated_name = node_name
@@ -99,6 +153,11 @@ class PipelineGraph:
         return labels
 
     def get_aligned_layout(self):
+        """Return the layout of nodes in a graph with aligned x-coordinates and negative y-coordinates.
+
+        Returns:
+            dict: A dictionary mapping node names to their (x, y) coordinates in the layout.
+        """
         pipe_x_indices = {pipe.pipe: index for index, pipe in enumerate(self.pipeline.pipes.values())}
         pos = {}
         for node in self.callable_graph.nodes:
@@ -109,6 +168,18 @@ class PipelineGraph:
         return pos
 
     def separate_crowded_levels(self, pos, max_spacing=0.35):
+        """Separate crowded levels by adjusting the x positions of pipes with the same y position.
+
+        Args:
+            pos (dict): A dictionary containing the positions of pipes in the format {pipe_name: (x_pos, y_pos)}.
+            max_spacing (float, optional): The maximum spacing allowed between pipes on the same level.
+                Defaults to 0.35.
+
+        Returns:
+            dict: A dictionary with adjusted positions to separate crowded levels.
+        """
+        from numpy import linspace
+
         treated_pipes = []
         for key, value in pos.items():
             pipe_name = key.split(".")[0]
@@ -120,7 +191,7 @@ class PipelineGraph:
             if len(multi_steps) == 1:
                 continue
             x_min, x_max = x_pos - max_spacing, x_pos + max_spacing
-            new_xs = np.linspace(x_min, x_max, len(multi_steps))
+            new_xs = linspace(x_min, x_max, len(multi_steps))
             for new_x, (k, (x, y)) in zip(new_xs, multi_steps.items()):
                 pos[k] = (new_x, y)
 
diff --git a/src/pypelines/loggs.py b/src/pypelines/loggs.py
index b93c187f97bfd6858353af7554e7ab573b214ba1..a25d927a4ae84804cda64f716265f735091be177 100644
--- a/src/pypelines/loggs.py
+++ b/src/pypelines/loggs.py
@@ -1,70 +1,73 @@
 import logging
 import sys
 import re
-import os
 from functools import wraps
-import coloredlogs
-
-NAMELENGTH = 33  # global variable for fromatting the length of the padding dedicated to name part in a logging record
-LEVELLENGTH = 8  # global variable for fromatting the length of the padding dedicated to levelname part in a record
-
-
-def enable_logging(terminal_level="NOTE", file_level="LOAD", programname="", username=""):
-    """_summary_
+from coloredlogs import (
+    coerce_string,
+    ansi_wrap,
+    Empty,
+    ColoredFormatter,
+    UserNameFilter,
+    ProgramNameFilter,
+    HostNameFilter,
+)
+from pathlib import Path
+
+NAMELENGTH = 33  # global variable for formatting the length of the padding dedicated to name part in a logging record
+LEVELLENGTH = 8  # global variable for formatting the length of the padding dedicated to levelname part in a record
+
+
+def enable_logging(
+    filename: str | None = None,
+    terminal_level: str = "NOTE",
+    file_level: str = "LOAD",
+    programname: str = "",
+    username: str = "",
+):
+    """Enable logging with specified configurations.
 
     Args:
-        terminal_level (str, optional): _description_. Defaults to "INFO".
-        file_level (str, optional): _description_. Defaults to "LOAD".
-        programname (str, optional): _description_. Defaults to "".
-        username (str, optional): _description_. Defaults to "".
+        filename (str, optional): Path to the log file. Defaults to None.
+        terminal_level (str, optional): Logging level for terminal output. Defaults to "INFO".
+        file_level (str, optional): Logging level for file output. Defaults to "LOAD".
+        programname (str, optional): Name of the program. Defaults to "".
+        username (str, optional): Username for logging. Defaults to "".
     """
     # Create a filehandler object for file
-    fh = logging.FileHandler("test.log", "w", "utf-8")
+    if filename is None:
+        logs_folder = Path.home() / ".python" / "pypelines_logs"
+        logs_folder.mkdir(parents=True, exist_ok=True)
+        filename = str(logs_folder / "logs.log")
+
+    fh = logging.FileHandler(filename, mode="a", encoding="utf-8")
     f_formater = FileFormatter()
     fh.setFormatter(f_formater)
 
-    coloredlogs.HostNameFilter.install(
-        fmt=f_formater.FORMAT,
-        handler=fh,
-        style=f_formater.STYLE,
-        use_chroot=True,
-    )
-    coloredlogs.ProgramNameFilter.install(
-        fmt=f_formater.FORMAT,
-        handler=fh,
-        programname=programname,
-        style=f_formater.STYLE,
-    )
-    coloredlogs.UserNameFilter.install(
-        fmt=f_formater.FORMAT,
-        handler=fh,
-        username=username,
-        style=f_formater.STYLE,
-    )
-
     # Create a filehandler object for terminal
     ch = logging.StreamHandler(sys.stdout)
     c_formater = TerminalFormatter()
     ch.setFormatter(c_formater)
 
-    coloredlogs.HostNameFilter.install(
-        fmt=c_formater.FORMAT,
-        handler=fh,
-        style=c_formater.STYLE,
-        use_chroot=True,
-    )
-    coloredlogs.ProgramNameFilter.install(
-        fmt=c_formater.FORMAT,
-        handler=fh,
-        programname=programname,
-        style=c_formater.STYLE,
-    )
-    coloredlogs.UserNameFilter.install(
-        fmt=c_formater.FORMAT,
-        handler=fh,
-        username=username,
-        style=c_formater.STYLE,
-    )
+    for handler, formater in zip([fh, ch], [f_formater, c_formater]):
+
+        HostNameFilter.install(
+            fmt=formater.FORMAT,
+            handler=handler,
+            style=f_formater.STYLE,
+            use_chroot=True,
+        )
+        ProgramNameFilter.install(
+            fmt=formater.FORMAT,
+            handler=handler,
+            programname=programname,
+            style=formater.STYLE,
+        )
+        UserNameFilter.install(
+            fmt=formater.FORMAT,
+            handler=handler,
+            username=username,
+            style=formater.STYLE,
+        )
 
     logger = logging.getLogger()  # root logger
 
@@ -84,13 +87,13 @@ def enable_logging(terminal_level="NOTE", file_level="LOAD", programname="", use
         min(terminal_level, file_level)
     )  # set logger level to the lowest usefull, to be sure we can capture messages necessary in handlers
 
-    fh.setLevel(file_level)
-    logger.addHandler(fh)
-    ch.setLevel(terminal_level)
-    logger.addHandler(ch)
+    for handler in [fh, ch]:
+
+        handler.setLevel(file_level)
+        logger.addHandler(handler)
 
 
-class DynamicColoredFormatter(coloredlogs.ColoredFormatter):
+class DynamicColoredFormatter(ColoredFormatter):
     """_summary_"""
 
     # note that only message, name, levelname, pathname, process, thread, lineno, levelno and filename can be dynamic.
@@ -98,15 +101,15 @@ class DynamicColoredFormatter(coloredlogs.ColoredFormatter):
     # as it would be more complex to implement otherwise, and for a small benefit.
 
     def __init__(self, fmt=None, datefmt=None, style="%", level_styles=None, field_styles=None, dynamic_levels=None):
-        """_summary_
+        """Initialize the logging formatter with custom formatting options.
 
         Args:
-            fmt (_type_, optional): _description_. Defaults to None.
-            datefmt (_type_, optional): _description_. Defaults to None.
-            style (str, optional): _description_. Defaults to "%".
-            level_styles (_type_, optional): _description_. Defaults to None.
-            field_styles (_type_, optional): _description_. Defaults to None.
-            dynamic_levels (_type_, optional): _description_. Defaults to None.
+            fmt (str, optional): A format string for the log message. Defaults to None.
+            datefmt (str, optional): A format string for the date/time portion of the log message. Defaults to None.
+            style (str, optional): The style of formatting to use. Defaults to "%".
+            level_styles (dict, optional): A dictionary mapping log levels to custom styles. Defaults to None.
+            field_styles (dict, optional): A dictionary mapping log fields to custom styles. Defaults to None.
+            dynamic_levels (dict, optional): A dictionary mapping dynamic log levels. Defaults to None.
         """
         self.dynamic_levels = dynamic_levels
         self.lenght_pre_formaters = self.get_length_pre_formaters(fmt)
@@ -119,13 +122,13 @@ class DynamicColoredFormatter(coloredlogs.ColoredFormatter):
         )
 
     def get_length_pre_formaters(self, fmt):
-        """_summary_
+        """Get the length of pre-formatters in the given format string.
 
         Args:
-            fmt (_type_): _description_
+            fmt (str): The format string containing pre-formatters.
 
         Returns:
-            _type_: _description_
+            dict: A dictionary containing the length of each pre-formatter.
         """
         pattern = r"%\((?P<part_name>\w+)\)-?(?P<length>\d+)?[sd]?"
         result = re.findall(pattern, fmt)
@@ -134,21 +137,21 @@ class DynamicColoredFormatter(coloredlogs.ColoredFormatter):
         return padding_dict
 
     def format(self, record: logging.LogRecord):
-        """_summary_
+        """Format the log record for display.
 
         Args:
-            record (_type_): _description_
+            record (logging.LogRecord): The log record to be formatted.
 
         Returns:
-            _type_: _description_
+            str: The formatted log message.
         """
         style = self.nn.get(self.level_styles, record.levelname)
         # print(repr(humanfriendly.terminal.ansi_style(**style)))
         record.message = record.getMessage()
         if self.usesTime():
             record.asctime = self.formatTime(record, self.datefmt)
-        if style and coloredlogs.Empty is not None:
-            copy = coloredlogs.Empty()
+        if style and Empty is not None:
+            copy = Empty()
             copy.__class__ = record.__class__
             copy.__dict__.update(record.__dict__)
             for part_name, length in self.lenght_pre_formaters.items():
@@ -159,7 +162,7 @@ class DynamicColoredFormatter(coloredlogs.ColoredFormatter):
                 if part_name in self.dynamic_levels.keys():
                     dyn_keys = self.dynamic_levels[part_name]
                     dynamic_style = {k: v for k, v in style.items() if k in dyn_keys or dyn_keys == "all"}
-                    part = coloredlogs.ansi_wrap(coloredlogs.coerce_string(part), **dynamic_style)
+                    part = ansi_wrap(coerce_string(part), **dynamic_style)
                 part = part + (" " * missing_length)
                 setattr(copy, part_name, part)
             record = copy  # type: ignore
@@ -211,6 +214,19 @@ class SugarColoredFormatter(DynamicColoredFormatter):
     }
 
     def __init__(self, fmt=None, datefmt=None, style=None, level_styles=None, field_styles=None, dynamic_levels=None):
+        """Initializes a custom logging formatter with specified parameters.
+
+        Args:
+            fmt (str): The log message format string.
+            datefmt (str): The date format string.
+            style (str): The log message style.
+            level_styles (dict): Dictionary mapping log levels to custom styles.
+            field_styles (dict): Dictionary mapping log fields to custom styles.
+            dynamic_levels (bool): Flag indicating whether dynamic levels are enabled.
+
+        Returns:
+            None
+        """
         self.STYLE = style if style is not None else self.STYLE
         self.FORMAT = fmt if fmt is not None else self.FORMAT
         self.DATE_FORMAT = datefmt if datefmt is not None else self.DATE_FORMAT
@@ -367,6 +383,17 @@ def loggedmethod(func):
 
 
 def add_all_custom_headers():
+    """Adds custom logging levels to the logging module.
+
+    This function adds custom logging levels "NOTE", "LOAD", "SAVE", "HEADER", "START",
+    and "END" to the logging module with specific integer values relative to existing levels.
+
+    Example:
+        add_all_custom_headers()
+
+    Note:
+        This function should be called before using the custom logging levels in the application.
+    """
     addLoggingLevel("NOTE", logging.INFO - 1, if_exists="keep")
     addLoggingLevel("LOAD", logging.DEBUG + 1, if_exists="keep")
     addLoggingLevel("SAVE", logging.DEBUG + 2, if_exists="keep")
diff --git a/src/pypelines/multisession.py b/src/pypelines/multisession.py
index 09615d0d094b4303d58af1aa389809056a185c1e..daf2ac99a37067d21727075f9604b30a6a2b7cd1 100644
--- a/src/pypelines/multisession.py
+++ b/src/pypelines/multisession.py
@@ -10,11 +10,34 @@ class BaseMultisessionAccessor:
     step: "BaseStep"
 
     def __init__(self, parent):
+        """Initializes a new instance of the class.
+
+        Args:
+            parent: The parent object.
+
+        Attributes:
+            step: The parent object.
+            _packer: The multisession packer object from the parent's disk class.
+            _unpacker: The multisession unpacker object from the parent's disk class.
+        """
         self.step = parent
         self._packer = self.step.pipe.disk_class.multisession_packer
         self._unpacker = self.step.pipe.disk_class.multisession_unpacker
 
     def load(self, sessions, extras=None):
+        """Load sessions with optional extras and return packed result.
+
+        Args:
+            sessions (DataFrame): The sessions to load.
+            extras (list or tuple, optional): Extra values to be used during loading. If not provided,
+                the same extra value will be used for all sessions.
+
+        Returns:
+            dict: A dictionary containing the loaded sessions.
+
+        Raises:
+            ValueError: If the number of extra values supplied is different than the number of sessions.
+        """
         session_result_dict = {}
 
         if not isinstance(extras, (list, tuple)):
@@ -31,6 +54,20 @@ class BaseMultisessionAccessor:
         return self._packer(sessions, session_result_dict)
 
     def save(self, sessions, datas, extras=None):
+        """Save the data for each session with optional extra values.
+
+        Args:
+            sessions (list): List of session objects.
+            datas (list): List of data objects corresponding to each session.
+            extras (list, tuple, optional): List of extra values to be saved along with each session's data.
+                If not provided, the same extra value will be used for all sessions.
+
+        Raises:
+            ValueError: If the number of extra values supplied is different than the number of sessions.
+
+        Returns:
+            None
+        """
         if not isinstance(extras, (list, tuple)):
             extras = [extras] * len(sessions)
 
@@ -45,6 +82,19 @@ class BaseMultisessionAccessor:
         return None
 
     def generate(self, sessions, *args, extras=None, extra=None, **kwargs):
+        """Generate session results based on provided extras for each session.
+
+        Args:
+            sessions (pandas.DataFrame): The sessions data to generate results for.
+            *args: Additional positional arguments to pass to the generation step.
+            extras (list or None): List of extra values to be used for each session.
+                If None, the same extra value will be used for all sessions.
+            extra: Deprecated argument. Use extras instead.
+            **kwargs: Additional keyword arguments to pass to the generation step.
+
+        Returns:
+            dict: A dictionary containing the generated results for each session.
+        """
         session_result_dict = {}
 
         if extra is not None:
@@ -77,11 +127,30 @@ class BaseMultisessionAccessor:
         return self._packer(sessions, session_result_dict)
 
     def start_tasks(self, sessions):
+        """Starts tasks for each session in the given sessions.
+
+        Args:
+            sessions: A pandas DataFrame containing sessions.
+
+        Returns:
+            None
+        """
         for session in sessions.iterrows():
             self.step.task.start(session)
 
 
 def assert_dataframe(sessions):
+    """Check if the input is a pandas DataFrame.
+
+    Args:
+        sessions (pd.DataFrame): The input to be checked.
+
+    Returns:
+        bool: True if the input is a pandas DataFrame.
+
+    Raises:
+        ValueError: If the input is a pandas Series or not a DataFrame.
+    """
     if isinstance(sessions, pd.DataFrame):
         return True
     elif isinstance(sessions, pd.Series):
diff --git a/src/pypelines/pickle_backend.py b/src/pypelines/pickle_backend.py
index 76d40e878c03eb655762a59bfc41237c3a3e5855..f66cfd0bbb05625820939b213fb1b075ca8a54c9 100644
--- a/src/pypelines/pickle_backend.py
+++ b/src/pypelines/pickle_backend.py
@@ -16,10 +16,25 @@ class PickleDiskObject(BaseDiskObject):
     is_legacy_format = False
 
     def __init__(self, session, step, extra=""):
+        """Initialize the StepTask object.
+
+        Args:
+            session: The session object for the task.
+            step: The step object for the task.
+            extra: Additional information for the task (default is an empty string).
+        """
         self.file_prefix = step.pipeline.pipeline_name
         super().__init__(session, step, extra)
 
     def version_deprecated(self) -> bool:
+        """Check if the current version is deprecated.
+
+        This method compares the current version with the disk version and returns True if they are different,
+        indicating that the current version is deprecated. Otherwise, it returns False.
+
+        Returns:
+            bool: True if the current version is deprecated, False otherwise.
+        """
         logger = logging.getLogger("pickle.version_deprecated")
 
         # if we didn't found the disk version, we return False.
@@ -39,6 +54,11 @@ class PickleDiskObject(BaseDiskObject):
         return False
 
     def step_level_too_low(self) -> bool:
+        """Check if the level of the disk step is lower than the current step level.
+
+        Returns:
+            bool: True if the disk step level is lower than the current step level, False otherwise.
+        """
         logger = logging.getLogger("pickle.step_level_too_low")
 
         # we get the step instance that corresponds to the one on the disk
@@ -64,9 +84,19 @@ class PickleDiskObject(BaseDiskObject):
 
     @property
     def version(self):
+        """Return the version of the pipeline."""
         return self.step.pipe.version
 
     def parse_extra(self, extra, regexp=False):
+        """Parses the extra string by optionally applying a regular expression pattern.
+
+        Args:
+            extra (str): The extra string to be parsed.
+            regexp (bool): A flag indicating whether to apply regular expression pattern (default is False).
+
+        Returns:
+            str: The parsed extra string.
+        """
         extra = extra.strip(".")
         if regexp:
             extra = extra.replace(".", r"\.")
@@ -76,6 +106,11 @@ class PickleDiskObject(BaseDiskObject):
         return extra
 
     def make_file_name_pattern(self):
+        """Generate a file name pattern based on the steps in the pipeline.
+
+        Returns:
+            str: A regular expression pattern for creating file names based on the steps in the pipeline.
+        """
         steps_patterns = []
 
         for key in sorted(self.step.pipe.steps.keys()):
@@ -93,6 +128,11 @@ class PickleDiskObject(BaseDiskObject):
         return pattern
 
     def get_file_name(self):
+        """Return the file name based on the object attributes.
+
+        Returns:
+            str: The generated file name.
+        """
         extra = self.parse_extra(self.extra, regexp=False)
         version_string = "." + self.version if self.version else ""
         filename = (
@@ -109,6 +149,11 @@ class PickleDiskObject(BaseDiskObject):
         return filename
 
     def check_disk(self):
+        """Check disk for matching files based on specified pattern and expected values.
+
+        Returns:
+            bool: True if a matching file is found, False otherwise.
+        """
         logger = logging.getLogger("pickle.check_disk")
 
         search_path = os.path.join(self.session.path, os.path.sep.join(self.collection))
@@ -167,13 +212,28 @@ class PickleDiskObject(BaseDiskObject):
             return False
 
     def get_found_disk_object_description(self):
+        """Return the description of the found disk object."""
         return str(self.current_disk_file)
 
     def get_full_path(self):
+        """Return the full path of the file by joining the session path, collection, and file name.
+
+        Returns:
+            str: The full path of the file.
+        """
         full_path = os.path.join(self.session.path, os.path.sep.join(self.collection), self.get_file_name())
         return full_path
 
     def save(self, data):
+        """Save data to disk.
+
+        Args:
+            data: Data to be saved to disk. If data is a pandas DataFrame, it will be saved as a pickle file.
+                Otherwise, it will be pickled and saved.
+
+        Returns:
+            None
+        """
         logger = logging.getLogger("PickleDiskObject.save")
         new_full_path = self.get_full_path()
         logger.debug(f"Saving to path : {new_full_path}")
@@ -192,6 +252,14 @@ class PickleDiskObject(BaseDiskObject):
         self.current_disk_file = new_full_path
 
     def load(self):
+        """Load data from the current disk file.
+
+        Raises:
+            IOError: If no file was found on disk or 'check_disk()' was not run.
+
+        Returns:
+            The loaded data from the disk file.
+        """
         logger = logging.getLogger("PickleDiskObject.load")
         logger.debug(f"Current disk file status : {self.current_disk_file=}")
         if self.current_disk_file is None:
@@ -219,6 +287,17 @@ class PickleDiskObject(BaseDiskObject):
 
     @staticmethod
     def multisession_packer(sessions, session_result_dict: dict) -> pd.DataFrame | dict:
+        """Packs the results of multiple sessions into a DataFrame
+            if all values in the session_result_dict are DataFrames.
+
+        Args:
+            sessions: List of sessions.
+            session_result_dict (dict): Dictionary containing the results of each session.
+
+        Returns:
+            pd.DataFrame or dict: Returns a DataFrame if all values in session_result_dict are DataFrames,
+                otherwise returns the original session_result_dict.
+        """
         session_result_dict = BaseDiskObject.multisession_packer(sessions, session_result_dict)
 
         are_dataframe = [isinstance(item, pd.core.frame.DataFrame) for item in session_result_dict.values()]
@@ -230,6 +309,15 @@ class PickleDiskObject(BaseDiskObject):
 
     @staticmethod
     def get_multi_session_df(multisession_data_dict: dict, add_session_level: bool = False) -> pd.DataFrame:
+        """Return a pandas DataFrame by combining multiple session dataframes.
+
+        Args:
+            multisession_data_dict (dict): A dictionary containing session names as keys and dataframes as values.
+            add_session_level (bool, optional): Whether to add session level to the index. Defaults to False.
+
+        Returns:
+            pd.DataFrame: A combined dataframe containing data from all sessions.
+        """
         dataframes = []
         for session_name, dataframe in multisession_data_dict.items():
             level_names = list(dataframe.index.names)
@@ -257,6 +345,15 @@ class PickleDiskObject(BaseDiskObject):
 
     @staticmethod
     def merge_index_element(values: tuple | str | float | int, session_name: str) -> tuple:
+        """Merge the elements of the input values with the session name.
+
+        Args:
+            values (tuple | str | float | int): The values to be merged with the session name.
+            session_name (str): The name of the session to be merged with the values.
+
+        Returns:
+            tuple: A tuple containing the merged values with the session name.
+        """
         if not isinstance(values, tuple):
             values = (values,)
 
@@ -305,6 +402,14 @@ def files(
     output_list = []
 
     def _recursive_search(_input_path):
+        """Recursively search for files and directories in the given input path.
+
+        Args:
+            _input_path (str): The input path to start the recursive search from.
+
+        Returns:
+            None
+        """
         nonlocal current_level
         for subdir in os.listdir(_input_path):
             fullpath = os.path.join(_input_path, subdir)
diff --git a/src/pypelines/pipelines.py b/src/pypelines/pipelines.py
index fc1e1ff655fe2628379b5512bec3b14d6db83ab3..a140325219b1ffb4e2a6cdc049f5c11a2e69c962 100644
--- a/src/pypelines/pipelines.py
+++ b/src/pypelines/pipelines.py
@@ -15,6 +15,19 @@ class Pipeline:
     runner_backend_class = BaseTaskBackend
 
     def __init__(self, name: str, **runner_args):
+        """Initialize the pipeline with the given name and runner arguments.
+
+        Args:
+            name (str): The name of the pipeline.
+            **runner_args: Additional keyword arguments for the runner backend.
+
+        Attributes:
+            pipeline_name (str): The name of the pipeline.
+            pipes (dict): Dictionary to store pipeline components.
+            resolved (bool): Flag to indicate if the pipeline is resolved.
+            runner_backend: The runner backend object created with the provided arguments.
+                If creation fails, it evaluates to False as a boolean.
+        """
         self.pipeline_name = name
         self.pipes = {}
         self.resolved = False
@@ -25,7 +38,8 @@ class Pipeline:
 
     def register_pipe(self, pipe_class: Type["BasePipe"]) -> Type["BasePipe"]:
         """Wrapper to instanciate and attache a a class inheriting from BasePipe it to the Pipeline instance.
-        The Wraper returns the class without changing it."""
+        The Wraper returns the class without changing it.
+        """
         instance = pipe_class(self)
 
         # attaches the instance itself to the pipeline, and to the dictionnary 'pipes' of the current pipeline
@@ -47,6 +61,17 @@ class Pipeline:
         return pipe_class
 
     def resolve_instance(self, instance_name: str) -> "BaseStep":
+        """Resolve the specified instance name to a BaseStep object.
+
+        Args:
+            instance_name (str): The name of the instance in the format 'pipe_name.step_name'.
+
+        Returns:
+            BaseStep: The BaseStep object corresponding to the instance name.
+
+        Raises:
+            KeyError: If the specified instance name is not found in the pipeline.
+        """
         pipe_name, step_name = instance_name.split(".")
         try:
             pipe = self.pipes[pipe_name]
@@ -139,6 +164,7 @@ class Pipeline:
 
     @property
     def graph(self) -> "PipelineGraph":
+        """Return a PipelineGraph object representing the graph of the pipeline."""
         from .graphs import PipelineGraph
 
         return PipelineGraph(self)
diff --git a/src/pypelines/pipes.py b/src/pypelines/pipes.py
index ebaeb7eb0c5b2c86ea8ebb3b39622acfd8107f21..aca5ce15a7f9f7b41251a2913537683970ff6e68 100644
--- a/src/pypelines/pipes.py
+++ b/src/pypelines/pipes.py
@@ -29,6 +29,34 @@ class BasePipe(metaclass=ABCMeta):
     steps: Dict[str, BaseStep]
 
     def __init__(self, parent_pipeline: "Pipeline") -> None:
+        """Initialize the Pipeline object with the parent pipeline and set up the steps based on the methods decorated
+        with @stepmethod.
+
+        Args:
+            parent_pipeline (Pipeline): The parent pipeline object.
+
+        Raises:
+            ValueError: If no step class is registered with @stepmethod decorator, or if single_step is set to
+                True with more than one step, or if steps are not linked in hierarchical order.
+
+        Notes:
+            - The step methods must inherit from BaseStep.
+            - The steps should be linked in hierarchical order with `requires` specification for at least N-1 steps
+                in a single pipe.
+
+        Syntaxic sugar:
+            - If the pipe is a single step, accessing any pipe instance in the pipeline can be done by iterating on
+                pipeline.pipes.pipe.
+
+        Attributes:
+            pipeline (Pipeline): The parent pipeline object.
+            pipe_name (str): The name of the pipeline.
+            steps (Dict[str, BaseStep]): Dictionary containing the step objects.
+            pipe: A reference to the pipeline object.
+
+        Returns:
+            None
+        """
         self.pipeline = parent_pipeline
         self.pipe_name = self.__class__.__name__
 
@@ -76,6 +104,11 @@ class BasePipe(metaclass=ABCMeta):
 
     @property
     def version(self):
+        """Return a hash representing the versions of all steps in the object.
+
+        Returns:
+            str: A 7-character hexadecimal hash representing the versions of all steps.
+        """
         versions = []
         for step in self.steps.values():
             versions.append(str(step.version))
@@ -89,6 +122,18 @@ class BasePipe(metaclass=ABCMeta):
         return version_hash
 
     def get_levels(self, selfish=True):
+        """Get the levels of each step in the pipeline.
+
+        Args:
+            selfish (bool, optional): Flag to indicate if the levels should be calculated selfishly. Defaults to True.
+
+        Returns:
+            dict: A dictionary containing the steps as keys and their corresponding levels as values.
+
+        Raises:
+            ValueError: If there are multiple steps with the same level and the saving backend doesn't
+                support multi-step version identification.
+        """
         levels = {}
         for step in self.steps.values():
             levels[step] = step.get_level(selfish=selfish)
@@ -108,6 +153,11 @@ class BasePipe(metaclass=ABCMeta):
         return levels
 
     def __repr__(self) -> str:
+        """Return a string representation of the PipeObject in the format: "<BaseClassName.pipe_name PipeObject>".
+
+        Returns:
+            str: A string representation of the PipeObject.
+        """
         return f"<{self.__class__.__bases__[0].__name__}.{self.pipe_name} PipeObject>"
 
     # @abstractmethod
@@ -117,14 +167,38 @@ class BasePipe(metaclass=ABCMeta):
     #     return None
 
     def dispatcher(self, function: Callable, dispatcher_type):
+        """Dispatches the given function based on the dispatcher type.
+
+        Args:
+            function (Callable): The function to be dispatched.
+            dispatcher_type: The type of dispatcher to be used.
+
+        Returns:
+            Callable: A wrapped function based on the dispatcher type.
+        """
         # the dispatcher must be return a wrapped function
         return function
 
     def pre_run_wrapper(self, function: Callable):
+        """Return a wrapped function by the dispatcher."""
         # the dispatcher must be return a wrapped function
         return function
 
     def load(self, session, extra="", which: Literal["lowest", "highest"] = "highest"):
+        """Load a step object for a session with optional extra data.
+
+        Args:
+            session: The session object to load the step for.
+            extra (str, optional): Additional data to pass to the step object. Defaults to "".
+            which (Literal["lowest", "highest"], optional): Determines whether to load the lowest or highest step.
+                Defaults to "highest".
+
+        Returns:
+            The loaded step object.
+
+        Raises:
+            ValueError: If no matching step object is found for the session.
+        """
         if which == "lowest":
             reverse = False
         else:
diff --git a/src/pypelines/sessions.py b/src/pypelines/sessions.py
index 89530c0e370b40d268ada6fc2988a6189873c843..c791a4560f2195f6693713daf78a1fc957d8882c 100644
--- a/src/pypelines/sessions.py
+++ b/src/pypelines/sessions.py
@@ -16,6 +16,22 @@ class Session(pd.Series):
         zfill=3,
         separator="_",
     ):
+        """Create a new series with specified attributes.
+
+        Args:
+            series (pd.Series, optional): The series to be modified. Defaults to None.
+            subject (str, optional): The subject to be added to the series. Defaults to None.
+            date (str, optional): The date to be added to the series. Defaults to None.
+            number (str, optional): The number to be added to the series. Defaults to None.
+            path (str, optional): The path to be added to the series. Defaults to None.
+            auto_path (bool, optional): Whether to automatically generate the path. Defaults to False.
+            date_format (str, optional): The format of the date. Defaults to None.
+            zfill (int, optional): The zero-fill width for number formatting. Defaults to 3.
+            separator (str, optional): The separator for alias generation. Defaults to "_".
+
+        Returns:
+            pd.Series: The modified series with the specified attributes.
+        """
         if series is None:
             series = pd.Series()
 
@@ -51,6 +67,17 @@ class Session(pd.Series):
 
 class Sessions(pd.DataFrame):
     def __new__(cls, series_list):
+        """Create a new Sessions dataframe from a list of series.
+
+        Args:
+            series_list (list): A list of series to create the Sessions dataframe from.
+
+        Returns:
+            pd.DataFrame: A new Sessions dataframe created from the provided series list.
+
+        Raises:
+            AttributeError: If the dataframe does not comply with the pipeline accessor.
+        """
         # also works seamlessly if a dataframe is passed and is already a Sessions dataframe.
         df = pd.DataFrame(series_list)
 
diff --git a/src/pypelines/steps.py b/src/pypelines/steps.py
index 87903fab9337863c46fb2aed615a92e332789084..3d7116561c72eedd4aa3234e9494788d7535674f 100644
--- a/src/pypelines/steps.py
+++ b/src/pypelines/steps.py
@@ -42,6 +42,15 @@ def stepmethod(requires=[], version=None, do_dispatch=True, on_save_callbacks=[]
     # It basically just step an "is_step" stamp on the method that are defined as steps.
     # This stamp will later be used in the metaclass __new__ to set additionnal usefull attributes to those methods
     def registrate(function: Callable):
+        """Registers a function as a step in a process.
+
+        Args:
+            function (Callable): The function to be registered as a step.
+
+        Returns:
+            Callable: The registered function with additional attributes such as 'requires', 'is_step', 'version',
+                'do_dispatch', 'step_name', and 'callbacks'.
+        """
         function.requires = [requires] if not isinstance(requires, list) else requires
         function.is_step = True
         function.version = version
@@ -73,6 +82,25 @@ class BaseStep:
         pipe: "BasePipe",
         worker: MethodType,
     ):
+        """Initialize a BaseStep object.
+
+        Args:
+            pipeline (Pipeline): The parent pipeline object.
+            pipe (BasePipe): The parent pipe object.
+            worker (MethodType): The worker method associated with this step.
+
+        Attributes:
+            pipeline (Pipeline): An instance of the parent pipeline.
+            pipe (BasePipe): An instance of the parent pipe.
+            worker (MethodType): An instance of the worker method.
+            do_dispatch: The do_dispatch attribute of the worker method.
+            version: The version attribute of the worker method.
+            requires: The requires attribute of the worker method.
+            step_name: The step_name attribute of the worker method.
+            callbacks: The callbacks attribute of the worker method.
+            multisession: An instance of the multisession class associated with the pipe.
+            task: The task manager created by the runner backend of the pipeline.
+        """
         # save an instanciated access to the pipeline parent
         self.pipeline = pipeline
         # save an instanciated access to the pipe parent
@@ -102,44 +130,61 @@ class BaseStep:
 
     @property
     def requirement_stack(self) -> Callable:
+        """Return a partial function that calls the get_requirement_stack method of the pipeline
+        attribute with the instance set to self.
+        """
         return partial(self.pipeline.get_requirement_stack, instance=self)
 
     @property
     def pipe_name(self) -> str:
+        """Return the name of the pipe."""
         return self.pipe.pipe_name
 
     @property
     def relative_name(self) -> str:
+        """Return the relative name of the object by concatenating the pipe name and step name."""
         return f"{self.pipe_name}.{self.step_name}"
 
     @property
     def pipeline_name(self) -> str:
+        """Return the name of the pipeline."""
         return self.pipe.pipeline.pipeline_name
 
     @property
     def complete_name(self) -> str:
+        """Return the complete name by combining the pipeline name and relative name."""
         return f"{self.pipeline_name}.{self.relative_name}"
 
     def disk_step(self, session, extra=""):
+        """Retrieve the disk object and return the disk step instance."""
         disk_object = self.get_disk_object(session, extra)
         return disk_object.disk_step_instance()
 
     def __call__(self, *args, **kwargs):
+        """Call the worker method with the given arguments and keyword arguments."""
         return self.worker(*args, **kwargs)
 
     def __repr__(self):
+        """Return a string representation of the StepObject in the format: "<pipe_name.step_name StepObject>"."""
         return f"<{self.pipe_name}.{self.step_name} StepObject>"
 
     @property
     def load(self):
+        """Load data using the get_load_wrapped method."""
         return self.get_load_wrapped()
 
     @property
     def save(self):
+        """Save the current state of the object.
+
+        Returns:
+            The saved state of the object.
+        """
         return self.get_save_wrapped()
 
     @property
     def generate(self):
+        """Return the result of calling the get_generate_wrapped method."""
         return self.get_generate_wrapped()
 
     # def make_wrapped_functions(self):
@@ -148,8 +193,31 @@ class BaseStep:
     #     self.generate = self.make_wrapped_generate()
 
     def get_save_wrapped(self):
+        """Returns a wrapped function that saves data using the disk class.
+
+        This function wraps the save method of the disk class with additional functionality.
+
+        Args:
+            session: The session to use for saving the data.
+            data: The data to be saved.
+            extra: Additional information to be used during saving (default is None).
+
+        Returns:
+            The wrapped function that saves the data using the disk class.
+        """
+
         @wraps(self.pipe.disk_class.save)
         def wrapper(session, data, extra=None):
+            """Wrapper function to save data to disk.
+
+            Args:
+                session: The session object.
+                data: The data to be saved.
+                extra: Additional information (default is None).
+
+            Returns:
+                The result of saving the data to disk.
+            """
             if extra is None:
                 extra = self.get_default_extra()
             self.pipeline.resolve()
@@ -161,8 +229,34 @@ class BaseStep:
         return wrapper
 
     def get_load_wrapped(self):
+        """Get a wrapped function for loading disk objects.
+
+        This function wraps the load method of the disk class with the provided session, extra, and strict parameters.
+
+        Args:
+            session: The session to use for loading the disk object.
+            extra: Additional parameters for loading the disk object (default is None).
+            strict: A boolean flag indicating whether to strictly load the disk object (default is False).
+
+        Returns:
+            The wrapped function for loading disk objects.
+        """
+
         @wraps(self.pipe.disk_class.load)
         def wrapper(session, extra=None, strict=False):
+            """Wrapper function to load disk object with session and optional extra parameters.
+
+            Args:
+                session: The session to use for loading the disk object.
+                extra (optional): Extra parameters to be passed for loading the disk object. Defaults to None.
+                strict (bool, optional): Flag to indicate strict loading. Defaults to False.
+
+            Returns:
+                The loaded disk object.
+
+            Raises:
+                ValueError: If the disk object does not match and has a status message.
+            """
             # print("extra in load wrapper : ", extra)
             if extra is None:
                 extra = self.get_default_extra()
@@ -178,6 +272,11 @@ class BaseStep:
         return wrapper
 
     def get_generate_wrapped(self):
+        """Return the wrapped generation mechanism with optional dispatching.
+
+        Returns:
+            The wrapped generation mechanism with optional dispatching.
+        """
         if self.do_dispatch:
             return autoload_arguments(
                 self.pipe.dispatcher(loggedmethod(self.generation_mechanism), "generator"),
@@ -186,16 +285,52 @@ class BaseStep:
         return autoload_arguments(loggedmethod(self.generation_mechanism), self)
 
     def get_level(self, selfish=False) -> int:
+        """Get the level of the step.
+
+        Args:
+            selfish (bool): Whether to calculate the level selfishly. Defaults to False.
+
+        Returns:
+            int: The level of the step.
+        """
         self.pipeline.resolve()
         return StepLevel(self).resolve_level(selfish=selfish)
 
     def get_disk_object(self, session, extra=None):
+        """Return a disk object based on the provided session and optional extra parameters.
+
+        Args:
+            session: The session to use for creating the disk object.
+            extra (optional): Additional parameters to be passed to the disk object. Defaults to None.
+
+        Returns:
+            Disk: A disk object created using the provided session and extra parameters.
+        """
         if extra is None:
             extra = self.get_default_extra()
         return self.pipe.disk_class(session, self, extra)
 
     @property
     def generation_mechanism(self):
+        """Generates a wrapper function for the given worker function with additional functionality such as skipping,
+        refreshing, checking requirements, and saving output to file.
+
+        Args:
+            session: The session object.
+            *args: Positional arguments for the worker function.
+            extra: Additional argument for the worker function (default is None).
+            skip: If True, the step doesn't get loaded if found on the drive (default is False).
+            refresh: If True, the step's value gets refreshed instead of used from a file (default is False).
+            refresh_requirements: If True, refreshes all requirements; if list of strings, refreshes specific
+                steps/pipes (default is False).
+            check_requirements: If True, checks requirements with skip=True (default is False).
+            save_output: If False, doesn't save the output to file after calculation (default is True).
+            **kwargs: Additional keyword arguments for the worker function.
+
+        Returns:
+            The wrapper function with extended functionality.
+        """
+
         @wraps(self.worker)
         def wrapper(
             session,
@@ -435,6 +570,12 @@ class BaseStep:
         return wrapper
 
     def generate_doc(self) -> str:
+        """Generate a new docstring by inserting a chapter about Pipeline Args before the existing
+        docstring of the function.
+        If the existing docstring contains 'Raises' or 'Returns', the new chapter will be inserted before that.
+        If not, it will be inserted at the end of the existing docstring.
+        """
+
         new_doc = ""
         doc = self.worker.__doc__
         if doc is None:
@@ -503,6 +644,11 @@ class BaseStep:
         return param.default
 
     def is_refresh_in_kwargs(self):
+        """Check if the 'refresh' parameter is present in the keyword arguments of the function.
+
+        Returns:
+            bool: True if the 'refresh' parameter is present, False otherwise.
+        """
         sig = inspect.signature(self.worker)
         param = sig.parameters.get("refresh")
         if param is None:
@@ -510,6 +656,19 @@ class BaseStep:
         return True
 
     def load_requirement(self, pipe_name, session, extra=None):
+        """Load the specified requirement step for the given pipe name.
+
+        Args:
+            pipe_name (str): The name of the pipe for which the requirement step needs to be loaded.
+            session: The session to be used for loading the requirement step.
+            extra (optional): Any extra information to be passed while loading the requirement step.
+
+        Returns:
+            The loaded requirement step.
+
+        Raises:
+            IndexError: If the required step with the specified pipe name is not found in the requirement stack.
+        """
         try:
             req_step = [step for step in self.requirement_stack() if step.pipe_name == pipe_name][-1]
         except IndexError as e:
@@ -520,9 +679,27 @@ class BaseStep:
         return req_step.load(session, extra=extra)
 
     def set_arguments(self, session, **arguments):
+        """Set the arguments for the session.
+
+        Args:
+            session: The session to set the arguments for.
+            **arguments: Additional keyword arguments to set.
+
+        Raises:
+            NotImplementedError: This method is not implemented and should be overridden in a subclass.
+        """
         raise NotImplementedError
 
     def get_arguments(self, session):
+        """Get the arguments for the specified session.
+
+        Args:
+            self: The object instance.
+            session: The session for which arguments need to be retrieved.
+
+        Raises:
+            NotImplementedError: This method must be implemented in a subclass.
+        """
         raise NotImplementedError
 
 
diff --git a/src/pypelines/tasks.py b/src/pypelines/tasks.py
index c9fe587ebe122ea1273f83139ad53c6239da7a57..4451b7454b2b64c02a12205ceee246f2089d22fc 100644
--- a/src/pypelines/tasks.py
+++ b/src/pypelines/tasks.py
@@ -1,5 +1,7 @@
 from functools import wraps
 from typing import TYPE_CHECKING
+from contextlib import contextmanager
+import builtins
 
 if TYPE_CHECKING:
     from .pipelines import Pipeline
@@ -11,10 +13,26 @@ class BaseStepTaskManager:
     backend: "BaseTaskBackend"
 
     def __init__(self, step, backend):
+        """Initializes the class with the specified step and backend.
+
+        Args:
+            step: The step value to be assigned.
+            backend: The backend value to be assigned.
+        """
         self.step = step
         self.backend = backend
 
     def start(self, session, *args, **kwargs):
+        """Start the session with the given arguments.
+
+        Args:
+            session: The session to start.
+            *args: Variable length argument list.
+            **kwargs: Arbitrary keyword arguments.
+
+        Raises:
+            NotImplementedError: If the backend is not set.
+        """
         if not self.backend:
             raise NotImplementedError
 
@@ -25,10 +43,86 @@ class BaseTaskBackend:
     success: bool = False
 
     def __init__(self, parent: "Pipeline", **kwargs):
+        """Initializes a PipelineNode object.
+
+        Args:
+            parent (Pipeline): The parent Pipeline object.
+            **kwargs: Additional keyword arguments.
+        """
         self.parent = parent
 
     def __bool__(self):
+        """Return the boolean value of the object based on the success attribute."""
         return self.success
 
     def create_task_manager(self, step) -> "BaseStepTaskManager":
+        """Create a task manager for the given step.
+
+        Args:
+            step: The step for which the task manager is created.
+
+        Returns:
+            BaseStepTaskManager: An instance of BaseStepTaskManager for the given step.
+        """
         return self.task_manager_class(step, self)
+
+
+class NoImport:
+    def __getattr__(self, name):
+        """Return the value of the specified attribute name."""
+        return self
+
+    def __getitem__(self, index):
+        """Return the item at the specified index."""
+        return self
+
+    def __setattr__(self, name, value):
+        """Set the attribute with the specified name to the given value.
+
+        Args:
+            name (str): The name of the attribute to be set.
+            value (any): The value to be assigned to the attribute.
+        """
+        pass
+
+    def __setitem__(self, index, value):
+        """Set the value at the specified index in the object."""
+        pass
+
+
+@contextmanager
+def mock_failed_imports():
+    """Mocks failed imports by replacing the built-in __import__ function with a custom implementation that returns
+    a NoImport object when an ImportError occurs.
+
+    This function is intended to be used as a context manager with a 'try/finally' block to ensure
+    that the original __import__ function is restored after the mocked imports are no longer needed.
+
+    Example:
+    with mock_failed_imports():
+        # Code that may raise ImportError during import statements
+    """
+    original_import = builtins.__import__
+
+    def custom_import(name, *args, **kwargs):
+        """Custom import function that tries to import a module using the original import function.
+        If the import fails, it returns a NoImport object.
+
+        Args:
+            name (str): The name of the module to import.
+            *args: Additional positional arguments to pass to the original import function.
+            **kwargs: Additional keyword arguments to pass to the original import function.
+
+        Returns:
+            The imported module if successful, otherwise a NoImport object.
+        """
+        try:
+            return original_import(name, *args, **kwargs)
+        except ImportError:
+            return NoImport()
+
+    builtins.__import__ = custom_import
+    try:
+        yield
+    finally:
+        builtins.__import__ = original_import
diff --git a/src/pypelines/versions.py b/src/pypelines/versions.py
index 22d1a9a9e70e486644e7cd69eb02d105de56ea9a..9c5a611e0bac66398e797038c349b9b522385afe 100644
--- a/src/pypelines/versions.py
+++ b/src/pypelines/versions.py
@@ -1,126 +1,225 @@
 from dataclasses import dataclass
 import hashlib, random, json, inspect, re
+from abc import ABCMeta, abstractmethod
 
 from typing import Callable, Type, Iterable, Protocol, TYPE_CHECKING
 
+
 if TYPE_CHECKING:
     from .steps import BaseStep
 
+
 @dataclass
 class Version:
-    pipe_name : str
-    id : str
-    detail : dict
+    pipe_name: str
+    id: str
+    detail: dict
 
     @property
-    def deprecated(self) :
+    def deprecated(self):
+        """Return the deprecated status of the object."""
         return self.detail["deprecated"]
 
     @property
-    def function_hash(self) :
+    def function_hash(self):
+        """Return the hash value of the function."""
         return self.detail["function_hash"]
 
     @property
-    def step_name(self) :
+    def step_name(self):
+        """Return the name of the step."""
         return self.detail["step_name"]
 
     @property
-    def creation_date(self) :
+    def creation_date(self):
+        """Return the creation date of the object."""
         return self.detail["creation_date"]
 
     def update_function_hash(self, new_function_hash):
+        """Update the function hash in the detail dictionary.
+
+        Args:
+            new_function_hash: The new function hash to be updated.
+
+        Returns:
+            None
+        """
         self.detail["function_hash"] = new_function_hash
 
     def deprecate(self):
+        """Mark the function as deprecated."""
         self.detail["deprecated"] = True
 
     def __str__(self):
+        """Return a string representation of the object."""
         return self.id
-    
-class BaseVersionHandler:
+
+
+class BaseVersionHandler(metaclass=ABCMeta):
 
     function_hash_remove = ["comments", " ", "\n"]
 
     def __init__(self, pipe, *args, **kwargs):
+        """Initializes the class with the provided pipe.
+
+        Args:
+            pipe: The pipe object to be assigned.
+            *args: Variable length argument list.
+            **kwargs: Arbitrary keyword arguments.
+        """
         self.pipe = pipe
 
     def compare_function_hash(self, step):
-        try :
+        """Compares the function hash of the active version with the hash of the current function.
+
+        Args:
+            step: The step for which the function hash needs to be compared.
+
+        Returns:
+            bool: True if the function hashes match, False otherwise.
+        """
+        try:
             version = self.get_active_version(step)
-        except KeyError :
+        except KeyError:
             return False
         current_hash = self.get_function_hash(step.step)
         return version.function_hash == current_hash
-    
-    def get_function_hash(self, function) -> str :
 
-        def remove_comments(self, source):
+    def get_function_hash(self, function) -> str:
+        """Get the hash value of a function after removing specified elements.
+
+        Args:
+            function: The function for which the hash value needs to be calculated.
+
+        Returns:
+            str: The hash value of the function.
+        """
+
+        def remove_comments(source):
+            """Remove all single-line and multi-line comments from the given source code.
+
+            Args:
+                source (str): The source code containing comments.
+
+            Returns:
+                str: The source code with all comments removed.
+            """
             # remove all occurance of single-line comments (#comments) from the source
-            source_no_comments = re.sub(r'#[^\n]*', '', source)
+            source_no_comments = re.sub(r"#[^\n]*", "", source)
             # remove all occurance of multi-line comments ("""comment""") from the source
-            source_no_comments = re.sub(r'\'\'\'.*?\'\'\'', '', source_no_comments, flags=re.DOTALL)
-            source_no_comments = re.sub(r'\"\"\".*?\"\"\"', '', source_no_comments, flags=re.DOTALL)
+            source_no_comments = re.sub(r"\'\'\'.*?\'\'\'", "", source_no_comments, flags=re.DOTALL)
+            source_no_comments = re.sub(r"\"\"\".*?\"\"\"", "", source_no_comments, flags=re.DOTALL)
             return source_no_comments
 
         remove = self.function_hash_remove
         source = inspect.getsource(function)
-    
-        if "comments" in remove :
+
+        if "comments" in remove:
             remove.pop(remove.index("comments"))
             source = remove_comments(source)
-            
-        for rem in remove :
+
+        for rem in remove:
             source = source.replace(rem, "")
-    
+
         return hashlib.sha256(source.encode()).hexdigest()
 
-    def get_new_version_string(self) -> str :
-        ...
+    @abstractmethod
+    def get_new_version_string(self) -> str:
+        """Returns a new version string."""
+
+    @abstractmethod
+    def get_active_version(self, step: "BaseStep") -> Version:
+        """Get the active version for a given step.
+
+        Args:
+            step (BaseStep): The step for which to retrieve the active version.
 
-    def get_active_version(self, step : "BaseStep") -> Version :
-        ...
+        Returns:
+            Version: The active version for the given step.
+        """
+
+    @abstractmethod
+    def apply_changes(self, versions) -> None:
+        """Apply changes to the object based on the given versions.
+
+        Args:
+            versions (list): A list of versions containing the changes to be applied.
+
+        Returns:
+            None
+        """
 
-    def apply_changes(self, versions) -> None :
-        ...
 
 class HashVersionHandler(BaseVersionHandler):
 
     hash_collision_max_attempts = 3
 
-    def __init__(self, pipe, file_path) :
+    def __init__(self, pipe, file_path):
+        """Initializes the class with the provided pipe and file path.
+
+        Args:
+            pipe: The pipe object to be used.
+            file_path: The path to the file containing memory data.
+        """
         super().__init__(pipe)
         self.path = file_path
-        self.memory = json.load(open(file_path,"r"))
-        self.verify_structure()
+        self.memory = json.load(open(file_path, "r"))
+        self.verify_structure(pipe.pipeline)
 
-    def get_new_version_string(self) -> str :
+    def get_new_version_string(self) -> str:
+        """Generate a new unique version string by creating a hash and checking for collisions.
+
+        Returns:
+            str: A new unique version string.
+
+        Raises:
+            ValueError: If a unique hash cannot be determined after the maximum attempts.
+        """
         max_attempts = self.hash_collision_max_attempts
-        for i in range(max_attempts):# max no-collision attempts, then raises error
-        
+        for i in range(max_attempts):  # max no-collision attempts, then raises error
+
             m = hashlib.sha256()
             r = str(random.random()).encode()
             m.update(r)
             new_hash = m.hexdigest()[0:7]
-            
+
             if new_hash not in self.memory["versions"].keys():
                 return new_hash
-            
-        raise ValueError("Could not determine a unique hash not colliding with existing values. Please investigate code / step_architecture.json file ?") 
 
-    def apply_changes(self, versions ):
-        if not isinstance(versions, list) :
+        raise ValueError(
+            "Could not determine a unique hash not colliding with existing values. "
+            "Please investigate code / step_architecture.json file ?"
+        )
+
+    def apply_changes(self, versions):
+        """Apply changes to the memory based on the provided versions.
+
+        Args:
+            versions (list or object): A list of versions or a single version object.
+
+        Returns:
+            None
+        """
+        if not isinstance(versions, list):
             versions = [versions]
 
-        for version in versions :
-            try : 
+        for version in versions:
+            try:
                 edited_object = self.memory["versions"][version.id]
             except KeyError:
-                self.steps_dict[version.pipe_name] =  self.steps_dict.get(version.pipe_name,{"versions":{},"step_renamings":{}})
-                edited_object = self.steps_dict[version.pipe_name]["versions"][version.id] = self.steps_dict[version.pipe_name]["versions"].get(version.id,{})
-            edited_object.update(version.detail) 
+                self.steps_dict[version.pipe_name] = self.steps_dict.get(
+                    version.pipe_name, {"versions": {}, "step_renamings": {}}
+                )
+                edited_object = self.steps_dict[version.pipe_name]["versions"][version.id] = self.steps_dict[
+                    version.pipe_name
+                ]["versions"].get(version.id, {})
+            edited_object.update(version.detail)
 
     def verify_structure(self, pipeline):
+        """Verify the structure of the pipeline by iterating through each pipe and step."""
         for pipe_name, pipe in pipeline.pipes.items():
             for step_name, step in pipe.steps.items():
                 pass
-                #in here, check function hash of the current implementation matches the one in the version, or send a warning to user that he may update the version or ignor by updating the function hash and keeping the same version
\ No newline at end of file
+                # in here, check function hash of the current implementation matches the one in the version,
+                # or send a warning to user that he may update the version or ignor by updating the function
+                # hash and keeping the same version