diff --git a/LICENSE b/LICENSE
index e047d11a0921d8543224f4dce4482b27c6b8057f..1473c0cede580821495a5e623fb05b8985616a6d 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
 MIT License
 
-Copyright (c) 2022 François Laurent, Institut Pasteur
+Copyright (c) 2022-2023 François Laurent, Institut Pasteur
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
diff --git a/pyproject.toml b/pyproject.toml
index 8130550d5205dbdaf5273b28fa59f1af9c3b3d3d..41bb395db8adc236f77df711463bf6e91090e173 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "MaggotUBA-adapter"
-version = "0.12.2"
+version = "0.12.3"
 description = "Interface between MaggotUBA and the Nyx tagging UI"
 authors = ["François Laurent"]
 license = "MIT"
@@ -10,11 +10,11 @@ packages = [
 
 [tool.poetry.dependencies]
 python = "^3.8,<3.10"
-maggotuba-core = {git = "https://gitlab.pasteur.fr/nyx/MaggotUBA-core", rev="main"}
+maggotuba-core = {git = "https://gitlab.pasteur.fr/nyx/MaggotUBA-core", tag = "v1.0.1"}
 torch = "^1.11.0"
 numpy = "^1.19.3"
 protobuf = "3.9.2"
-taggingbackends = {git = "https://gitlab.pasteur.fr/nyx/TaggingBackends", rev="main"}
+taggingbackends = {git = "https://gitlab.pasteur.fr/nyx/TaggingBackends", tag = "v0.12.3"}
 
 [build-system]
 requires = ["poetry-core>=1.0.0"]
diff --git a/src/maggotuba/models/modules.py b/src/maggotuba/models/modules.py
index a598e01eff69e95ee964142fb14d9a2f921e1b6b..19912ad45dcace6404c2c42c0e63b27df2aa14ee 100644
--- a/src/maggotuba/models/modules.py
+++ b/src/maggotuba/models/modules.py
@@ -106,7 +106,9 @@ class MaggotModule(nn.Module):
             self.config[entry] = self.path_for_config(Path(self.config[entry]))
 
     def path_for_config(self, path):
-        if self.root_dir and path.is_absolute():
+        if path.name.endswith('.pt'):
+            path = path.name
+        elif self.root_dir and path.is_absolute():
             # Path.is_relative_to available from Python >= 3.9 only;
             # we want the present code to run on Python >= 3.8
             relativepath = path.relative_to(self.root_dir)
@@ -182,7 +184,7 @@ class MaggotEncoder(MaggotModule):
         # if state file not found or config option "load_state" is False,
         # (re-)initialize the model's weights
         if _reason:
-            logging.debug(f"initializing the encoder ({_reason})")
+            logging.info(f"initializing the encoder ({_reason})")
             _init, _bias = config.get('init', None), config.get('bias', None)
             for child in encoder.children():
                 if isinstance(child,
diff --git a/src/maggotuba/models/train_model.py b/src/maggotuba/models/train_model.py
index aedc21506816c90214ec0c7737c083461d32d366..c9665388f06b0710f3f30e24e791ad4e8cc46654 100644
--- a/src/maggotuba/models/train_model.py
+++ b/src/maggotuba/models/train_model.py
@@ -1,6 +1,6 @@
 from taggingbackends.data.labels import Labels
 from taggingbackends.data.dataset import LarvaDataset
-from maggotuba.models.trainers import make_trainer, new_generator
+from maggotuba.models.trainers import make_trainer, new_generator, enforce_reproducibility
 import glob
 
 def train_model(backend, layers=1, pretrained_model_instance="default",
@@ -12,6 +12,15 @@ def train_model(backend, layers=1, pretrained_model_instance="default",
     larva_dataset_file = glob.glob(str(backend.interim_data_dir() / "larva_dataset_*.hdf5")) # this other one is not recursive
     assert len(larva_dataset_file) == 1
 
+    # argument `rng_seed` predates `seed`
+    try:
+        seed = kwargs.pop('seed')
+    except KeyError:
+        pass
+    else:
+        if rng_seed is None:
+            rng_seed = seed
+
     # instanciate a LarvaDataset object, that is similar to a PyTorch DataLoader
     # add can initialize a Labels object
     # note: subsets=(1, 0, 0) => all data are training data; no validation or test subsets
@@ -25,6 +34,9 @@ def train_model(backend, layers=1, pretrained_model_instance="default",
     # the labels may be bytes objects; convert to str
     labels = labels if isinstance(labels[0], str) else [s.decode() for s in labels]
 
+    # could be moved into `make_trainer`, but we need it to access the generator
+    enforce_reproducibility(dataset.generator)
+
     # copy and load the pretrained model into the model instance directory
     model = make_trainer(backend, pretrained_model_instance, labels, layers, iterations)
 
diff --git a/src/maggotuba/models/trainers.py b/src/maggotuba/models/trainers.py
index 00f3040f6629716ee3955873b5b3fb2a170ea1f7..c0fae21cb459d1c2ac2211617d8199b6c2e23a28 100644
--- a/src/maggotuba/models/trainers.py
+++ b/src/maggotuba/models/trainers.py
@@ -2,7 +2,6 @@ import numpy as np
 import torch
 import torch.nn as nn
 from behavior_model.models.neural_nets import device
-#import behavior_model.data.utils as data_utils
 from maggotuba.models.modules import SupervisedMaggot, MultiscaleSupervisedMaggot, MaggotBag
 from taggingbackends.features.skeleton import interpolate
 from taggingbackends.explorer import BackendExplorer, check_permissions
@@ -252,6 +251,20 @@ def new_generator(seed=None):
     if seed is None: seed = 0b11010111001001101001110
     return generator.manual_seed(seed)
 
+def enforce_reproducibility(generator=None):
+    import random
+    if generator is None:
+        seed = 0b11010111001001101001110
+    else:
+        seed = generator.initial_seed()
+    # see https://pytorch.org/docs/1.13/notes/randomness.html
+    torch.use_deterministic_algorithms(True)
+    # torch.backends.cudnn.deterministic = True
+    torch.manual_seed(seed)
+    seed = seed % 2**32
+    np.random.seed(seed)
+    random.seed(seed)
+
 
 class MultiscaleMaggotTrainer(MaggotTrainer):
     def __init__(self, cfgfilepath, behaviors=[], n_layers=1, n_iterations=None,
@@ -314,6 +327,7 @@ def make_trainer(first_arg, *args, **kwargs):
 
     else:
         config_file = first_arg
+        #enforce_reproducibility()
 
         # the type criterion does not fail in the case of unimplemented bagging,
         # as config files are listed in a pretrained_models subdirectory.