diff --git a/.coverage b/.coverage index ff754ea8b04db0e4bc7db8aee37126477feee56f..7f4cc21f0600cbe15bb35deaaf856e3f4f26e781 100644 Binary files a/.coverage and b/.coverage differ diff --git a/pypelines/__pycache__/__init__.cpython-311.pyc b/pypelines/__pycache__/__init__.cpython-311.pyc index b7aa15ca9688d5596858b845778a2b66ee45ae48..8550abbac6f7ee0b29e5f4d402f453d9c6cdd4e9 100644 Binary files a/pypelines/__pycache__/__init__.cpython-311.pyc and b/pypelines/__pycache__/__init__.cpython-311.pyc differ diff --git a/pypelines/__pycache__/examples.cpython-311.pyc b/pypelines/__pycache__/examples.cpython-311.pyc index 40848c1fb7dc11c2603a36056b8407268dd92f5e..780df463f25f93e2172633060d028bf9f416db06 100644 Binary files a/pypelines/__pycache__/examples.cpython-311.pyc and b/pypelines/__pycache__/examples.cpython-311.pyc differ diff --git a/pypelines/__pycache__/loggs.cpython-311.pyc b/pypelines/__pycache__/loggs.cpython-311.pyc index 25a687dceb042dab4bdeec568adea40eaabdaa34..59a5b8beef301a67600a51883a78eafa02fc3586 100644 Binary files a/pypelines/__pycache__/loggs.cpython-311.pyc and b/pypelines/__pycache__/loggs.cpython-311.pyc differ diff --git a/pypelines/__pycache__/multisession.cpython-311.pyc b/pypelines/__pycache__/multisession.cpython-311.pyc index a024fa8bec35026803c235c6a3f33f5577435152..8f189ac499e82d61dfcc199f78f3844e6fe17e9a 100644 Binary files a/pypelines/__pycache__/multisession.cpython-311.pyc and b/pypelines/__pycache__/multisession.cpython-311.pyc differ diff --git a/pypelines/__pycache__/pickle_backend.cpython-311.pyc b/pypelines/__pycache__/pickle_backend.cpython-311.pyc index 1c2d1ffebae72841ab748a80da5580b7f0e6b5e5..3b724cfb7ee20e8dbeae3727c4c62d0942f3a241 100644 Binary files a/pypelines/__pycache__/pickle_backend.cpython-311.pyc and b/pypelines/__pycache__/pickle_backend.cpython-311.pyc differ diff --git a/pypelines/__pycache__/pipe.cpython-311.pyc b/pypelines/__pycache__/pipe.cpython-311.pyc index d02f820c147218d4c27578979870148cf51a4628..245faedfbd79e61c050d0ce15c0ff7af4844816c 100644 Binary files a/pypelines/__pycache__/pipe.cpython-311.pyc and b/pypelines/__pycache__/pipe.cpython-311.pyc differ diff --git a/pypelines/__pycache__/pipeline.cpython-311.pyc b/pypelines/__pycache__/pipeline.cpython-311.pyc index 854f3a0b5c624ba6e57eb6f8036e18b925426a02..caf0bc1b20115b3044272f07dc526daaa7680c37 100644 Binary files a/pypelines/__pycache__/pipeline.cpython-311.pyc and b/pypelines/__pycache__/pipeline.cpython-311.pyc differ diff --git a/pypelines/__pycache__/sessions.cpython-311.pyc b/pypelines/__pycache__/sessions.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..124a23a8ee19395a2d1085bb47e2163257425ff2 Binary files /dev/null and b/pypelines/__pycache__/sessions.cpython-311.pyc differ diff --git a/pypelines/__pycache__/step.cpython-311.pyc b/pypelines/__pycache__/step.cpython-311.pyc index e61d01184b005bb1eb8e4676f5a31a89bbba6c4c..c4d11c73dbea1bb430c7db0000641c7f854d66ed 100644 Binary files a/pypelines/__pycache__/step.cpython-311.pyc and b/pypelines/__pycache__/step.cpython-311.pyc differ diff --git a/pypelines/__pycache__/versions.cpython-311.pyc b/pypelines/__pycache__/versions.cpython-311.pyc index 71ec6deb30e45d6dcaea734234554b8054ce6f31..5fbe64984c9967bf7beedd4b37399cafcfd2eebc 100644 Binary files a/pypelines/__pycache__/versions.cpython-311.pyc and b/pypelines/__pycache__/versions.cpython-311.pyc differ diff --git a/pypelines/disk.py b/pypelines/disk.py new file mode 100644 index 0000000000000000000000000000000000000000..cfd1cfb2da31ca788553abdb9a272d9d34179e82 --- /dev/null +++ b/pypelines/disk.py @@ -0,0 +1,132 @@ +import os +from . sessions import Session + +from typing import Callable, Type, Iterable, Protocol, TYPE_CHECKING + +if TYPE_CHECKING: + from .step import BaseStep + +class OutputData(Protocol): + """Can be a mapping, iterable, single element, or None. + + This class is defined for typehints, and is not a real class useable at runtime""" + +class BaseDiskObject : + + disk_version = None + disk_step = None + + def __init__(self, session : Session, step : BaseStep, extra = "") -> None : + + self.step = None + self.session = session + self.step = step + self.extra = extra + + self.check_disk() + + def check_disk(self): + """sets self.disk_version and self.disk_step""" + ... + + def save(self, object): + ... + + def load(self) -> OutputData: + ... + + def step_exist(self, session : Session): + """returns True if the file(s) found had a stamp corresponding to the current step. False otherwise""" + return self.step == self.disk_step + + def version_exist(self, session : Session): + """returns True if the file found had a stamp for that step corresponding to the current version. False otherwise""" + return self.step.version == self.disk_version + + +class PickleObject(BaseDiskObject) : + + collection = ["preprocessing_saves"] #collection a.k.a subfolders in the session.path before the file itself + file_prefix = "preproc_data" + extension = "pickle" + current_suffixes = "" + + def make_file_prefix_path(self): + prefix_path = self.file_prefix + "." + self.step.pipe_name + rigid_pattern = self.file_prefix + + pattern = "" + + if self.step.pipe.single_step : + pass + + if self.step.use_version : + pass + + + flexible_pattern = self.f + + def check_disk(self): + search_path = os.path.join(self.session.path, self.collection) + + + def save(self, object): + ... + + def load(self) -> OutputData: + ... + + +import natsort +from . import extract + +def files(input_path, re_pattern = None, relative = False,levels = -1, get = "files", parts = "all", sort = True): + """ + Get full path of files from all folders under the ``input_path`` (including itself). + Can return specific files with optionnal conditions + Args: + input_path (str): A valid path to a folder. + This folder is used as the root to return files found + (possible condition selection by giving to re_callback a function taking a regexp pattern and a string as argument, an returning a boolean). + Returns: + list: List of the file fullpaths found under ``input_path`` folder and subfolders. + """ + #if levels = -1, we get everything whatever the depth (at least up to 32767 subfolders, but this should be fine...) + + if levels == -1 : + levels = 32767 + current_level = 0 + output_list = [] + + def _recursive_search(_input_path): + nonlocal current_level + for subdir in os.listdir(_input_path): + fullpath = os.path.join(_input_path,subdir) + if os.path.isfile(fullpath): + if (get == "all" or get == "files") and (re_pattern is None or extract.qregexp(re_pattern,fullpath)): + output_list.append(os.path.normpath(fullpath)) + + else : + if (get == "all" or get == "dirs" or get == "folders") and (re_pattern is None or extract.qregexp(re_pattern,fullpath)): + output_list.append(os.path.normpath(fullpath)) + if current_level < levels: + current_level += 1 + _recursive_search(fullpath) + current_level -= 1 + + if os.path.isfile(input_path): + raise ValueError(f"Can only list files in a directory. A file was given : {input_path}") + + _recursive_search(input_path) + + if relative : + output_list = [os.path.relpath(file,start = input_path) for file in output_list] + if parts == "name" : + output_list = [os.path.basename(file) for file in output_list] + if sort : + output_list = natsort.natsorted(output_list) + return output_list + + + + \ No newline at end of file diff --git a/pypelines/examples.py b/pypelines/examples.py index eb6d392ae851db3cd680e7520f021fe9edd326fd..bc46c657bb224d1c83468f08c6e62c340f8053e9 100644 --- a/pypelines/examples.py +++ b/pypelines/examples.py @@ -11,10 +11,10 @@ example_pipeline = ExamplePipeline() @example_pipeline.register_pipe class ExamplePipe(PicklePipe): - @stepmethod() - def example_step1(self, argument1, optionnal_argument2 = "23"): + @stepmethod(version = "1") + def example_step1(self, session, argument1, optionnal_argument2 = "23"): return {"argument1" : argument1, "optionnal_argument2" : optionnal_argument2} - @stepmethod(requires = [example_step1]) - def example_step2(self, argument1, argument2): - return {"argument1" : argument1, "argument2" : argument2} + @stepmethod(requires = "ExamplePipe.example_step1") + def example_step2(self, session, argument1, argument2): + return {"argument1" : argument1, "argument2" : argument2} \ No newline at end of file diff --git a/pypelines/pipe.py b/pypelines/pipe.py index 89620f9eac394076597d421df7c838ac40a0edb4..8934883fd49ac1ca9230dc1b88b027c1ab853fab 100644 --- a/pypelines/pipe.py +++ b/pypelines/pipe.py @@ -1,5 +1,6 @@ from . step import BaseStep from . multisession import BaseMultisessionAccessor +from . sessions import Session from functools import wraps import inspect @@ -9,39 +10,23 @@ from typing import Callable, Type, Iterable, Protocol, TYPE_CHECKING if TYPE_CHECKING: from .pipeline import BasePipeline -class OutputData(Protocol): - """Can be a mapping, iterable, single element, or None. - - This class is defined for typehints, and is not a real class useable at runtime""" - class PipeMetaclass(type): def __new__(cls : Type, pipe_name : str, bases : Iterable[Type], attributes : dict) -> Type: - print(pipe_name, attributes) - attributes["pipe_name"] = pipe_name - - steps = {} - # this loop allows to populate cls.steps from the unistanciated the step methods of the cls. - for name, attribute in attributes.items(): - if getattr(attribute, "is_step", False): - steps[name] = PipeMetaclass.make_step_attributes(attribute , pipe_name , name) - - attributes["steps"] = steps - - if len(attributes["steps"]) > 1 and attributes["single_step"]: - raise ValueError(f"Cannot set single_step to True if you registered more than one step inside {pipe_name} class") - return super().__new__(cls, pipe_name, bases, attributes) def __init__(cls : Type, pipe_name : str, bases : Iterable[Type], attributes : dict) -> None: - print(f"init of {pipe_name}") - print(cls.__dict__) + steps = getattr(cls,"steps",{}) + for name, attribute in attributes.items(): + if getattr(attribute, "is_step", False): + steps[name] = PipeMetaclass.step_with_attributes(attribute , pipe_name , name) + setattr(cls,"steps",steps) @staticmethod - def make_step_attributes(step : Callable, pipe_name : str, step_name : str) -> Callable: + def step_with_attributes(step : BaseStep, pipe_name : str, step_name : str) -> BaseStep: setattr(step, "pipe_name", pipe_name) setattr(step, "step_name", step_name) @@ -62,14 +47,16 @@ class BasePipe(metaclass = PipeMetaclass): self.multisession = self.multisession_class(self) self.pipeline = parent_pipeline + self.pipe_name = self.__class__.__name__ + print(self.pipe_name) + + if len(self.steps) > 1 and self.single_step: + raise ValueError(f"Cannot set single_step to True if you registered more than one step inside {self.pipe_name} class") # this loop allows to populate self.steps from the now instanciated version of the step method. # Using only instanciated version is important to be able to use self into it later, # without confusing ourselved with uninstanciated versions in the steps dict - methods = inspect.getmembers(self, predicate= inspect.ismethod) - print(methods) - for step_name, _ in self.steps.items(): step = getattr(self , step_name) # get the instanciated step method from name. step = self.step_class(self.pipeline, self, step, step_name) @@ -85,50 +72,47 @@ class BasePipe(metaclass = PipeMetaclass): self.pipeline.pipes[self.pipe_name] = self setattr(self.pipeline, self.pipe_name, self) - self._make_wrapped_functions() - - def _make_wrapped_functions(self): - self.make_wrapped_save() - self.make_wrapped_load() - def __repr__(self) -> str: return f"<{self.__class__.__bases__[0].__name__}.{self.pipe_name} PipeObject>" - def make_wrapped_save(self): - self.save = self.dispatcher(self.file_saver) - - def make_wrapped_load(self): - self.load = self.dispatcher(self.file_loader) - - def file_getter(self, session, extra, version) -> OutputData : - #finds file, opens it, and return data. - #if it cannot find the file, it returns a IOError - ... - #it will get - def _check_version(self, step_name , found_version): #checks the found_version of the file is above or equal in the requirement order, to the step we are looking for - #TODO + #TODO, need to intelligently think about how to implement this to be easily moduleable self.pipeline.get_requirement_stack(step_name) - def step_version(self, step): - #simply returns the current string of the version that is in . - ... + def identify_disk_version(self, version_string : str) -> BaseStep : + """It will be called if the current version and disk version do not match - def disk_version(self, session, extra) -> str : - #simply returns the version string of the file(s) that it found. + This function returns True for OK, you can load this tep, it is a valid one, + or False for no, not a valid version, you need to relaunch the requires chain list to regenerate from the last step that was in an ok version. + + If the disk version is a non deprecated version : + - check if that step is above the current version, and in that case, returns True, else returns False. + + If the disk version is a deprecated version : + - directly reruns the requires chain list. + + """ + + def disk_step(self, session : Session, extra = "") -> BaseStep : + #simply returns the pipe's (most recent in the step requirement order) step instance that corrresponds to the step that is found on the disk + return None ... - def file_saver(self, session, dumped_object, extra, version ): + def file_saver(self, session : Session, dumped_object, step : BaseStep, extra = "") -> None: ... - def file_loader(self, session, extra, version ): + def file_loader(self, session : Session, step : BaseStep, extra = "") : + """Loads a file that corresponds to the pipe, the step and the eventual version + required (if step.use_version is True and step.version == the version of the file on the drive) + If the version is not """ ... - - def file_checker(self, session): - ... - - def dispatcher(self, function): + + def dispatcher(self, function : Callable): # the dispatcher must be return a wrapped function - ... + return function + + def pre_run_wrapper(self, function : Callable): + # the dispatcher must be return a wrapped function + return function diff --git a/pypelines/pipeline.py b/pypelines/pipeline.py index ec6abf881a3719f77c0b3d2be5a3f8a36b624b27..01af955e2068bfba923d8060c49f21a9a66c406b 100644 --- a/pypelines/pipeline.py +++ b/pypelines/pipeline.py @@ -15,9 +15,6 @@ class BasePipeline: pipe_class(self) return pipe_class - - def __init__(self, versions = None): - self.versions = versions def resolve(self, instance_name : str) : pipe_name , step_name = instance_name.split(".") diff --git a/pypelines/sessions.py b/pypelines/sessions.py new file mode 100644 index 0000000000000000000000000000000000000000..fcb92f83a0d583a17946fed7d2046bb0a8070fda --- /dev/null +++ b/pypelines/sessions.py @@ -0,0 +1,120 @@ +import pandas as pd, os + +@pd.api.extensions.register_series_accessor("pipeline") +class SeriesPipelineAcessor: + def __init__(self, pandas_obj) -> None: + self._validate(pandas_obj) + self._obj = pandas_obj + + @staticmethod + def _validate(obj): + required_fields = ["path", "subject", "date", "number"] + missing_fields = [] + for req_field in required_fields: + if not req_field in obj.index: + missing_fields.append(req_field) + if len(missing_fields): + raise AttributeError( + f"The series must have some fields to use one acessor. This object is missing fields : {','.join(missing_fields)}" + ) + + def subject(self): + return str(self._obj.subject) + + def number(self, zfill = 3): + number = str(self._obj.number) if self._obj.number is not None else "" + number = ( + number + if zfill is None or number == "" + else number.zfill(zfill) + ) + return number + + def alias(self, separator = "_" , zfill = 3 , date_format = None): + + subject = self.subject() + date = self.date(date_format) + number = self.number(zfill) + + return ( + subject + + separator + + date + + ((separator + number) if number else "") + ) + + def date(self, format = None): + if format : + return self._obj.date.strftime(format) + return str(self._obj.date) + +@pd.api.extensions.register_dataframe_accessor("pipeline") +class DataFramePipelineAcessor: + def __init__(self, pandas_obj) -> None: + self._validate(pandas_obj) + self._obj = pandas_obj + + @staticmethod + def _validate(obj): + required_columns = ["path", "subject", "date", "number"] + missing_columns = [] + for req_col in required_columns: + if not req_col in obj.columns: + missing_columns.append(req_col) + if len(missing_columns): + raise AttributeError( + f"The series must have some fields to use one acessor. This object is missing fields : {','.join(missing_columns)}" + ) + +class Session(pd.Series): + def __new__( + cls, + series=None, + *, + subject=None, + date=None, + number=None, + path=None, + auto_path=False, + date_format = None, + zfill = 3, + separator = "_" + ): + if series is None: + series = pd.Series() + + if subject is not None: + series["subject"] = subject + if date is not None: + series["date"] = date + if number is not None or "number" not in series.index: + series["number"] = number + if path is not None: + series["path"] = path + + series.pipeline # verify the series complies with pipeline acessor + + if auto_path: + series["path"] = os.path.normpath(os.path.join( + series["path"], + series.pipeline.subject(), + series.pipeline.date(date_format), + series.pipeline.number(zfill) + )) + + if series.name is None: + series.name = series.pipeline.alias(separator = separator, zfill = zfill , date_format = date_format) + + if not "alias" in series.index: + series["alias"] = series.pipeline.alias(separator = separator, zfill = zfill , date_format = date_format) + + return series + +class Sessions(pd.DataFrame): + def __new__(cls, series_list): + # also works seamlessly if a dataframe is passed and is already a Sessions dataframe. + df = pd.DataFrame(series_list) + + df.pipeline # verify the df complies with pipeline acessor, then returns + + return df \ No newline at end of file diff --git a/pypelines/step.py b/pypelines/step.py index 18bc5ff98ec6557a533cd1df1506f96b92c0921e..e2caa58741605bdfe800c01cf7db3d6bb3f98169 100644 --- a/pypelines/step.py +++ b/pypelines/step.py @@ -3,27 +3,35 @@ from .loggs import loggedmethod import logging from typing import Callable -def stepmethod(requires = []): +from typing import Callable, Type, Iterable, Protocol, TYPE_CHECKING + +if TYPE_CHECKING: + from .pipeline import BasePipeline + from .pipe import BasePipe + +def stepmethod(requires = [], version = None): # This method allows to register class methods inheriting of BasePipe as steps. # It basically just step an "is_step" stamp on the method that are defined as steps. # This stamp will later be used in the metaclass __new__ to set additionnal usefull attributes to those methods - if not isinstance(requires, list): - requires = [requires] - def registrate(function): - function.requires = requires + + function.requires = [requires] if not isinstance(requires, list) else requires function.is_step = True + function.use_version = False if version is None else True + function.version = version return function return registrate class BaseStep: - - def __init__(self, pipeline, pipe, step, step_name): + + def __init__(self, pipeline : "BasePipeline", pipe : "BasePipe", step : "BaseStep", step_name : str): self.pipeline = pipeline # save an instanciated access to the pipeline parent self.pipe = pipe # save an instanciated access to the pipe parent self.step = step # save an instanciated access to the step function (undecorated) self.pipe_name = pipe.pipe_name self.step_name = step_name + self.use_version = self.step.use_version + self.version = self.step.version self.single_step = self.pipe.single_step self.requires = self.step.requires @@ -41,32 +49,23 @@ class BaseStep: def __repr__(self): return f"<{self.pipe_name}.{self.step_name} StepObject>" - def _saving_wrapper(self, function): - return self.pipe.dispatcher(self._version_wrapper(function, self.pipe.step_version)) - - def _loading_wrapper(self, function): - return self.pipe.dispatcher(self._version_wrapper(function, self.pipe.step_version)) - - def _generating_wrapper(self, function): - return - def _make_wrapped_functions(self): self.make_wrapped_save() self.make_wrapped_load() self.make_wrapped_generate() def make_wrapped_save(self): - self.save = self._saving_wrapper(self.pipe.file_saver) + self.save = self.pipe.dispatcher(self._version_wrapper(self.pipe.file_saver)) def make_wrapped_load(self): - self.load = self._loading_wrapper(self.pipe.file_loader) + self.load = self.pipe.dispatcher(self._version_wrapper(self.pipe.file_loader)) def make_wrapped_generate(self): self.generate = loggedmethod( self._version_wrapper( self.pipe.dispatcher( - self._loading_wrapper( - self._saving_wrapper( + self._load_or_generate_wrapper( + self._save_after_generate_wrapper( self.pipe.pre_run_wrapper(self.step) ) ) @@ -74,20 +73,26 @@ class BaseStep: ) ) - - def _version_wrapper(self, function_to_wrap, version_getter): - @wraps(function_to_wrap) + + + def step_current_version(self) -> str: + #simply returns the current string of the version that is in the config file. + return "version" + ... + + def _version_wrapper(self, function): + @wraps(function) def wrapper(*args,**kwargs): - version = version_getter(self) - return function_to_wrap(*args, version=version, **kwargs) + version = self.step_current_version(self) + return function(*args, version=version, **kwargs) return wrapper - def _loading_wrapper(self, func: Callable): + def _load_or_generate_wrapper(self, function: Callable): """ Decorator to load instead of calculating if not refreshing and saved data exists """ - @wraps(func) + @wraps(function) def wrap(session_details, *args, **kwargs): """ Decorator function @@ -108,7 +113,8 @@ class BaseStep: logger = logging.getLogger("load_pipeline") kwargs = kwargs.copy() - extra = kwargs.get("extra", None) + extra = kwargs.get("extra", "") + version = kwargs.get("version", "") skipping = kwargs.pop("skip", False) # we raise if file not found only if skipping is True refresh = kwargs.get("refresh", False) @@ -130,14 +136,14 @@ class BaseStep: ) if not refresh: - if skipping and self.pipe.file_checker(session_details, extra): + if skipping and self.pipe.file_checker(session_details, extra=extra, version=version): logger.load_info( f"File exists for {self.pipe_name}{'.' + extra if extra else ''}. Loading and processing have been skipped" ) return None logger.debug(f"Trying to load saved data") try: - result = self.pipe.file_loader(session_details, extra=extra) + result = self.pipe.file_loader(session_details, extra=extra, version=version) logger.load_info( f"Found and loaded {self.pipe_name}{'.' + extra if extra else ''} file. Processing has been skipped " ) @@ -150,31 +156,31 @@ class BaseStep: logger.load_info( f"Performing the computation to generate {self.pipe_name}{'.' + extra if extra else ''}. Hold tight." ) - return func(session_details, *args, **kwargs) + return function(session_details, *args, **kwargs) return wrap - def _saving_wrapper(self, func: Callable): + def _save_after_generate_wrapper(self, function: Callable): # decorator to load instead of calculating if not refreshing and saved data exists - @wraps(func) - def wrap(session_details, *args, **kwargs): + @wraps(function) + def wrap(session, *args, **kwargs): logger = logging.getLogger("save_pipeline") kwargs = kwargs.copy() extra = kwargs.get("extra", "") + version = kwargs.get("version", "") save_pipeline = kwargs.pop("save_pipeline", True) - - result = func(session_details, *args, **kwargs) - if session_details is not None: + result = function(session, *args, **kwargs) + if session is not None: if save_pipeline: # we overwrite inside saver, if file exists and save_pipeline is True - self.pipe.file_checker(result, session_details, extra=extra) + self.pipe.file_saver(session, result, extra=extra, version=version) else: logger.warning( f"Cannot guess data saving location for {self.pipe_name}: 'session_details' argument must be supplied." ) return result - return wrap + return wrap \ No newline at end of file diff --git a/pypelines/versions.py b/pypelines/versions.py index 02cfdaed30a8a30a582252a809df33054d61aaf6..63face024554f611c9daeec1ebba5e41a3b8552d 100644 --- a/pypelines/versions.py +++ b/pypelines/versions.py @@ -1,6 +1,11 @@ from dataclasses import dataclass import hashlib, random, json, inspect, re +from typing import Callable, Type, Iterable, Protocol, TYPE_CHECKING + +if TYPE_CHECKING: + from . step import BaseStep + @dataclass class Version: pipe_name : str @@ -72,7 +77,7 @@ class BaseVersionHandler: def get_new_version_string(self) -> str : ... - def get_active_version(self, step) -> Version : + def get_active_version(self, step : "BaseStep") -> Version : ... def apply_changes(self, versions) -> None : diff --git a/setup.py b/setup.py index 2046f522bc9d8888293fe6637f3d68d81cf5999f..96e1e4573b722da250b1c2e2d7033eb5bd7f1717 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ setup( license= 'MIT', author= 'Timothé Jost-MOUSSEAU', author_email= 'timothe.jost-mousseau@pasteur.com', - description= 'Image and video management for research use, in an unified easy to use API', + description= 'Framework to organize pprocessing files outputs.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', diff --git a/tests/__pycache__/tests.cpython-311.pyc b/tests/__pycache__/tests.cpython-311.pyc index afa9a4a52152bf6dc1c5436b35c23f22c8904aad..b7fd70566d32d16f28c1d314b77bf84174b64f1a 100644 Binary files a/tests/__pycache__/tests.cpython-311.pyc and b/tests/__pycache__/tests.cpython-311.pyc differ diff --git a/tests/instances.py b/tests/instances.py deleted file mode 100644 index cda334b1a9597aeb5ec9172ab57cd7f8133ecd3a..0000000000000000000000000000000000000000 --- a/tests/instances.py +++ /dev/null @@ -1,9 +0,0 @@ - -import pypelines - -pipeline_test_instance = pypelines.BasePipeline() - - -@pipeline_test_instance.register_pipe -class TestPipe(pypelines.BasePipe): - \ No newline at end of file diff --git a/tests/tests.py b/tests/tests.py index 97dc509076f6d5875daaf53b0254af3e09d70d45..3afaa61b44770b21bd2e2a01d819c6e5dd3ab685 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -2,19 +2,19 @@ import unittest, sys, os sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))) -import pypelines - from pypelines import examples +from pypelines.sessions import Session class TestVersions(unittest.TestCase): def setUp(self): self.pipeline = examples.example_pipeline + self.session = Session(subject = "test_subject", date = "2023-10-10", number = 0, path = "C:/test", auto_path = True) + print(self.session.alias) def test_pipeline_generate(self): - self.assertEqual(self.pipeline.ExamplePipe.example_step1.generate("bonjour"), {"argument1" : "bonjour", "optionnal_argument2" : 23}) - -if __name__ == '__main__': - unittest.main() + self.assertEqual(self.pipeline.ExamplePipe.example_step1.generate(self.session, "bonjour"), {"argument1" : "bonjour", "optionnal_argument2" : 23}) +if __name__ == '__main__': + unittest.main() \ No newline at end of file