Skip to content
Snippets Groups Projects
Commit 17fa2603 authored by François  LAURENT's avatar François LAURENT
Browse files

Merge branch 'dev' into 20230311

parents 66e86120 3e8bc2e6
No related branches found
No related tags found
No related merge requests found
MIT License MIT License
Copyright (c) 2022 François Laurent, Institut Pasteur Copyright (c) 2022-2023 François Laurent, Institut Pasteur
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal
......
[tool.poetry] [tool.poetry]
name = "MaggotUBA-adapter" name = "MaggotUBA-adapter"
version = "0.12.2" version = "0.12.3"
description = "Interface between MaggotUBA and the Nyx tagging UI" description = "Interface between MaggotUBA and the Nyx tagging UI"
authors = ["François Laurent"] authors = ["François Laurent"]
license = "MIT" license = "MIT"
...@@ -10,11 +10,11 @@ packages = [ ...@@ -10,11 +10,11 @@ packages = [
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8,<3.10" python = "^3.8,<3.10"
maggotuba-core = {git = "https://gitlab.pasteur.fr/nyx/MaggotUBA-core", rev="main"} maggotuba-core = {git = "https://gitlab.pasteur.fr/nyx/MaggotUBA-core", tag = "v1.0.1"}
torch = "^1.11.0" torch = "^1.11.0"
numpy = "^1.19.3" numpy = "^1.19.3"
protobuf = "3.9.2" protobuf = "3.9.2"
taggingbackends = {git = "https://gitlab.pasteur.fr/nyx/TaggingBackends", rev="main"} taggingbackends = {git = "https://gitlab.pasteur.fr/nyx/TaggingBackends", tag = "v0.12.3"}
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]
......
...@@ -106,7 +106,9 @@ class MaggotModule(nn.Module): ...@@ -106,7 +106,9 @@ class MaggotModule(nn.Module):
self.config[entry] = self.path_for_config(Path(self.config[entry])) self.config[entry] = self.path_for_config(Path(self.config[entry]))
def path_for_config(self, path): def path_for_config(self, path):
if self.root_dir and path.is_absolute(): if path.name.endswith('.pt'):
path = path.name
elif self.root_dir and path.is_absolute():
# Path.is_relative_to available from Python >= 3.9 only; # Path.is_relative_to available from Python >= 3.9 only;
# we want the present code to run on Python >= 3.8 # we want the present code to run on Python >= 3.8
relativepath = path.relative_to(self.root_dir) relativepath = path.relative_to(self.root_dir)
...@@ -182,7 +184,7 @@ class MaggotEncoder(MaggotModule): ...@@ -182,7 +184,7 @@ class MaggotEncoder(MaggotModule):
# if state file not found or config option "load_state" is False, # if state file not found or config option "load_state" is False,
# (re-)initialize the model's weights # (re-)initialize the model's weights
if _reason: if _reason:
logging.debug(f"initializing the encoder ({_reason})") logging.info(f"initializing the encoder ({_reason})")
_init, _bias = config.get('init', None), config.get('bias', None) _init, _bias = config.get('init', None), config.get('bias', None)
for child in encoder.children(): for child in encoder.children():
if isinstance(child, if isinstance(child,
......
from taggingbackends.data.labels import Labels from taggingbackends.data.labels import Labels
from taggingbackends.data.dataset import LarvaDataset from taggingbackends.data.dataset import LarvaDataset
from maggotuba.models.trainers import make_trainer, new_generator from maggotuba.models.trainers import make_trainer, new_generator, enforce_reproducibility
import glob import glob
def train_model(backend, layers=1, pretrained_model_instance="default", def train_model(backend, layers=1, pretrained_model_instance="default",
...@@ -12,6 +12,15 @@ def train_model(backend, layers=1, pretrained_model_instance="default", ...@@ -12,6 +12,15 @@ def train_model(backend, layers=1, pretrained_model_instance="default",
larva_dataset_file = glob.glob(str(backend.interim_data_dir() / "larva_dataset_*.hdf5")) # this other one is not recursive larva_dataset_file = glob.glob(str(backend.interim_data_dir() / "larva_dataset_*.hdf5")) # this other one is not recursive
assert len(larva_dataset_file) == 1 assert len(larva_dataset_file) == 1
# argument `rng_seed` predates `seed`
try:
seed = kwargs.pop('seed')
except KeyError:
pass
else:
if rng_seed is None:
rng_seed = seed
# instanciate a LarvaDataset object, that is similar to a PyTorch DataLoader # instanciate a LarvaDataset object, that is similar to a PyTorch DataLoader
# add can initialize a Labels object # add can initialize a Labels object
# note: subsets=(1, 0, 0) => all data are training data; no validation or test subsets # note: subsets=(1, 0, 0) => all data are training data; no validation or test subsets
...@@ -25,6 +34,9 @@ def train_model(backend, layers=1, pretrained_model_instance="default", ...@@ -25,6 +34,9 @@ def train_model(backend, layers=1, pretrained_model_instance="default",
# the labels may be bytes objects; convert to str # the labels may be bytes objects; convert to str
labels = labels if isinstance(labels[0], str) else [s.decode() for s in labels] labels = labels if isinstance(labels[0], str) else [s.decode() for s in labels]
# could be moved into `make_trainer`, but we need it to access the generator
enforce_reproducibility(dataset.generator)
# copy and load the pretrained model into the model instance directory # copy and load the pretrained model into the model instance directory
model = make_trainer(backend, pretrained_model_instance, labels, layers, iterations) model = make_trainer(backend, pretrained_model_instance, labels, layers, iterations)
......
...@@ -2,7 +2,6 @@ import numpy as np ...@@ -2,7 +2,6 @@ import numpy as np
import torch import torch
import torch.nn as nn import torch.nn as nn
from behavior_model.models.neural_nets import device from behavior_model.models.neural_nets import device
#import behavior_model.data.utils as data_utils
from maggotuba.models.modules import SupervisedMaggot, MultiscaleSupervisedMaggot, MaggotBag from maggotuba.models.modules import SupervisedMaggot, MultiscaleSupervisedMaggot, MaggotBag
from taggingbackends.features.skeleton import interpolate from taggingbackends.features.skeleton import interpolate
from taggingbackends.explorer import BackendExplorer, check_permissions from taggingbackends.explorer import BackendExplorer, check_permissions
...@@ -252,6 +251,20 @@ def new_generator(seed=None): ...@@ -252,6 +251,20 @@ def new_generator(seed=None):
if seed is None: seed = 0b11010111001001101001110 if seed is None: seed = 0b11010111001001101001110
return generator.manual_seed(seed) return generator.manual_seed(seed)
def enforce_reproducibility(generator=None):
import random
if generator is None:
seed = 0b11010111001001101001110
else:
seed = generator.initial_seed()
# see https://pytorch.org/docs/1.13/notes/randomness.html
torch.use_deterministic_algorithms(True)
# torch.backends.cudnn.deterministic = True
torch.manual_seed(seed)
seed = seed % 2**32
np.random.seed(seed)
random.seed(seed)
class MultiscaleMaggotTrainer(MaggotTrainer): class MultiscaleMaggotTrainer(MaggotTrainer):
def __init__(self, cfgfilepath, behaviors=[], n_layers=1, n_iterations=None, def __init__(self, cfgfilepath, behaviors=[], n_layers=1, n_iterations=None,
...@@ -314,6 +327,7 @@ def make_trainer(first_arg, *args, **kwargs): ...@@ -314,6 +327,7 @@ def make_trainer(first_arg, *args, **kwargs):
else: else:
config_file = first_arg config_file = first_arg
#enforce_reproducibility()
# the type criterion does not fail in the case of unimplemented bagging, # the type criterion does not fail in the case of unimplemented bagging,
# as config files are listed in a pretrained_models subdirectory. # as config files are listed in a pretrained_models subdirectory.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment