Skip to content
Snippets Groups Projects
Commit cad434b5 authored by François  LAURENT's avatar François LAURENT
Browse files

debug-level log messages

parent 7b62019f
No related branches found
No related tags found
1 merge request!8debug-level log messages
[tool.poetry] [tool.poetry]
name = "MaggotUBA-adapter" name = "MaggotUBA-adapter"
version = "0.16.3" version = "0.16.4"
description = "Interface between MaggotUBA and the Nyx tagging UI" description = "Interface between MaggotUBA and the Nyx tagging UI"
authors = ["François Laurent"] authors = ["François Laurent"]
license = "MIT" license = "MIT"
...@@ -14,7 +14,7 @@ maggotuba-core = {git = "https://gitlab.pasteur.fr/nyx/MaggotUBA-core", tag = "v ...@@ -14,7 +14,7 @@ maggotuba-core = {git = "https://gitlab.pasteur.fr/nyx/MaggotUBA-core", tag = "v
torch = "^1.11.0" torch = "^1.11.0"
numpy = "^1.19.3" numpy = "^1.19.3"
protobuf = "3.9.2" protobuf = "3.9.2"
taggingbackends = {git = "https://gitlab.pasteur.fr/nyx/TaggingBackends", tag = "v0.15.2"} taggingbackends = {git = "https://gitlab.pasteur.fr/nyx/TaggingBackends", tag = "v0.15.3"}
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]
......
import logging import logging
import os import os
from pathlib import Path from pathlib import Path
import numpy as np
import torch import torch
from torch import nn from torch import nn
import json import json
...@@ -29,6 +30,7 @@ class MaggotModule(nn.Module): ...@@ -29,6 +30,7 @@ class MaggotModule(nn.Module):
@classmethod @classmethod
def load_config(cls, path): def load_config(cls, path):
with open(path, "r") as f: with open(path, "r") as f:
logging.debug(f"loading config file: {path}")
return json.load(f) return json.load(f)
@property @property
...@@ -73,6 +75,7 @@ class MaggotModule(nn.Module): ...@@ -73,6 +75,7 @@ class MaggotModule(nn.Module):
def save_config(self, cfgfile=None): def save_config(self, cfgfile=None):
if cfgfile is None: cfgfile = self.cfgfile if cfgfile is None: cfgfile = self.cfgfile
path = self.path / cfgfile path = self.path / cfgfile
logging.debug(f"saving config to file: {path}")
with open(path, "w") as f: with open(path, "w") as f:
json.dump(self.config, f, indent=2) json.dump(self.config, f, indent=2)
check_permissions(path) check_permissions(path)
...@@ -81,6 +84,7 @@ class MaggotModule(nn.Module): ...@@ -81,6 +84,7 @@ class MaggotModule(nn.Module):
def save_model(self, ptfile=None): def save_model(self, ptfile=None):
if ptfile is None: ptfile = self.ptfile if ptfile is None: ptfile = self.ptfile
path = self.path / ptfile path = self.path / ptfile
logging.debug(f"saving neural network state to file: {path}")
torch.save(self.model.state_dict(), path) torch.save(self.model.state_dict(), path)
check_permissions(path) check_permissions(path)
return path return path
...@@ -179,6 +183,8 @@ class MaggotEncoder(MaggotModule): ...@@ -179,6 +183,8 @@ class MaggotEncoder(MaggotModule):
except Exception as e: except Exception as e:
_reason = e _reason = e
config['load_state'] = False # for `was_pretrained` to properly work config['load_state'] = False # for `was_pretrained` to properly work
else:
logging.debug(f"loading neural network state: {path}")
else: else:
_reason = '"load_state" is set to false' _reason = '"load_state" is set to false'
# if state file not found or config option "load_state" is False, # if state file not found or config option "load_state" is False,
...@@ -330,9 +336,11 @@ class DeepLinear(nn.Module): ...@@ -330,9 +336,11 @@ class DeepLinear(nn.Module):
return self.layers(x) return self.layers(x)
def load(self, path): def load(self, path):
logging.debug(f"loading neural network state: {path}")
self.load_state_dict(torch.load(path)) self.load_state_dict(torch.load(path))
def save(self, path): def save(self, path):
logging.debug(f"saving neural network state to file: {path}")
torch.save(self.state_dict(), path) torch.save(self.state_dict(), path)
check_permissions(path) check_permissions(path)
......
...@@ -386,17 +386,20 @@ def import_pretrained_model(backend, pretrained_model_instance): ...@@ -386,17 +386,20 @@ def import_pretrained_model(backend, pretrained_model_instance):
for file in pretrained_autoencoder_dir.iterdir(): for file in pretrained_autoencoder_dir.iterdir():
if not file.is_file(): if not file.is_file():
continue continue
logging.debug(f"copying file: {file}")
dst = backend.model_dir() / file.name dst = backend.model_dir() / file.name
if file.name.endswith("config.json"): if file.name.endswith("config.json"):
with open(file) as f: with open(file) as f:
config = json.load(f) config = json.load(f)
dir = backend.model_dir().relative_to(backend.project_dir) dir = backend.model_dir().relative_to(backend.project_dir)
config["log_dir"] = str(dir) config["log_dir"] = str(dir)
logging.debug(f"log_dir: \"{config['log_dir']}\"")
with open(dst, "w") as f: with open(dst, "w") as f:
json.dump(config, f, indent=2) json.dump(config, f, indent=2)
assert config_file is None assert config_file is None
config_file = dst config_file = dst
else: else:
assert file.name != 'trained_classifier.pt'
with open(file, "rb") as i: with open(file, "rb") as i:
with open(dst, "wb") as o: with open(dst, "wb") as o:
o.write(i.read()) o.write(i.read())
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment