Skip to content
Snippets Groups Projects

debug-level log messages

Merged François LAURENT requested to merge debug into dev
4 files
+ 13
1
Compare changes
  • Side-by-side
  • Inline
Files
4
@@ -30,6 +30,7 @@ class MaggotModule(nn.Module):
@classmethod
def load_config(cls, path):
with open(path, "r") as f:
logging.debug(f"loading config file: {path}")
return json.load(f)
@property
@@ -74,6 +75,7 @@ class MaggotModule(nn.Module):
def save_config(self, cfgfile=None):
if cfgfile is None: cfgfile = self.cfgfile
path = self.path / cfgfile
logging.debug(f"saving config to file: {path}")
with open(path, "w") as f:
json.dump(self.config, f, indent=2)
check_permissions(path)
@@ -82,6 +84,7 @@ class MaggotModule(nn.Module):
def save_model(self, ptfile=None):
if ptfile is None: ptfile = self.ptfile
path = self.path / ptfile
logging.debug(f"saving neural network state to file: {path}")
torch.save(self.model.state_dict(), path)
check_permissions(path)
return path
@@ -180,6 +183,8 @@ class MaggotEncoder(MaggotModule):
except Exception as e:
_reason = e
config['load_state'] = False # for `was_pretrained` to properly work
else:
logging.debug(f"loading neural network state: {path}")
else:
_reason = '"load_state" is set to false'
# if state file not found or config option "load_state" is False,
@@ -331,9 +336,11 @@ class DeepLinear(nn.Module):
return self.layers(x)
def load(self, path):
logging.debug(f"loading neural network state: {path}")
self.load_state_dict(torch.load(path))
def save(self, path):
logging.debug(f"saving neural network state to file: {path}")
torch.save(self.state_dict(), path)
check_permissions(path)
Loading