Commit 05c1996f authored by amichaut's avatar amichaut
Browse files

added trackmate ready option

parent c2f8d44f
Pipeline #76437 passed with stages
in 22 seconds
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -59,6 +59,7 @@ setup(name='track_analyzer',
'console_scripts': [
'traj_analysis=track_analyzer.scripts.analyze_tracks:main',
'map_analysis=track_analyzer.scripts.analyze_maps:main',
'TA_config=track_analyzer.scripts.make_default_config:main',
]
}
)
......
......@@ -652,7 +652,7 @@ def get_info(data_dir):
def get_data(data_dir, df=None, refresh=False, split_traj=False, set_origin_=False, image=None, reset_dim=['x', 'y'],
invert_axes=[], custom_var={}):
invert_axes=[], custom_var={}, trackmate_input=False):
"""
Main function to import data and perform the initial processing (scaling and computing of time derivatives).
It saves the database as a pickle object.
......@@ -678,6 +678,8 @@ def get_data(data_dir, df=None, refresh=False, split_traj=False, set_origin_=Fal
:type invert_axes: list
:param custom_var: dict of custom variables: {'var_i':{'name':'var_name','unit':'var_unit'}}
:type custom_var: dict
:param trackmate_input: True if input file is a trackmate (v7) csv file
:type trackmate_input: bool
:return: dict with dataframe and key info
:rtype: dict
"""
......@@ -700,7 +702,16 @@ def get_data(data_dir, df=None, refresh=False, split_traj=False, set_origin_=Fal
data_file = osp.join(data_dir, 'positions.csv')
sep = info["separator"] if "separator" in info.keys() else ',' # by default comma separated
sep = '\t' if sep == 'tab' else sep
df = pd.read_csv(data_file, sep=sep) # columns must be ['x','y','z','frame','track']
if osp.exists(data_file):
df = pd.read_csv(data_file, sep=sep) # columns must be ['x','y','z','frame','track']
else:
raise Exception("""data file does not exist, it must be named 'positions.csv' or 'positions.txt'""")
# trackmate specific case
if trackmate_input:
df = df[['POSITION_X','POSITION_Y','POSITION_Z','FRAME','TRACK_ID']]
df.columns = ['x', 'y', 'z', 'frame', 'track']
df = df.loc[3:]
# check data type
df = df.apply(pd.to_numeric, errors='ignore')
......@@ -1495,6 +1506,7 @@ def make_data_config(data_dir=None, export_config=True):
'set_origin_': False, # to reset origin of coordinates with dict with dimensions as key and coordinates as values
'reset_dim': ['x', 'y'], # list of dimensions to reset, as set_origin_ contains coordinates along all dimensions
'invert_axes': [], # list of dimensions to invert (change sign)
'trackmate_input': False, # if the input file is a trackmate (v7) csv file
}
if export_config:
......
......@@ -53,7 +53,8 @@ def map_analysis(data_dir, data=None, image=None, refresh=False, parallelize=Fal
data_config[k] = data_config_default[k]
if data is None:
data = tpr.get_data(data_dir, refresh=refresh, split_traj=data_config["split_traj"], set_origin_=data_config["set_origin_"],
image=image, reset_dim=data_config["reset_dim"], invert_axes=data_config["invert_axes"])
image=image, reset_dim=data_config["reset_dim"], invert_axes=data_config["invert_axes"],
trackmate_input=data_config["trackmate_input"])
df = data['df']
dimensions = data['dimensions']
......@@ -242,13 +243,9 @@ def parse_args(args=None):
parse arguments for main()
"""
# description = """Analyze trajectories
# Argument :
# -
# """
# parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=description)
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(description="""
Run the map analysis module in the data_dir directory
""")
parser.add_argument('data_dir',
help='path of the data directory')
......
......@@ -53,7 +53,8 @@ def traj_analysis(data_dir, data=None, image=None, refresh=False, parallelize=Fa
data_config[k] = data_config_default[k]
if data is None:
data = tpr.get_data(data_dir, refresh=refresh, split_traj=data_config["split_traj"], set_origin_=data_config["set_origin_"],
image=image, reset_dim=data_config["reset_dim"], invert_axes=data_config["invert_axes"])
image=image, reset_dim=data_config["reset_dim"], invert_axes=data_config["invert_axes"],
trackmate_input=data_config["trackmate_input"])
df = data['df']
dim = data['dim']
......@@ -290,13 +291,9 @@ def parse_args(args=None):
parse arguments for main()
"""
# description = """Analyze trajectories
# Argument :
# -
# """
# parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=description)
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(description="""
Run the trajectory analysis module in the data_dir directory
""")
parser.add_argument('data_dir',
help='path of the data directory')
......
##########################################################################
# Track Analyzer - Quantification and visualization of tracking data #
# Authors: Arthur Michaut #
# Copyright 2016-2019 Harvard Medical School and Brigham and #
# Women's Hospital #
# Copyright 2019-2022 Institut Pasteur and CNRS–UMR3738 #
# See the COPYRIGHT file for details #
# #
# This file is part of Track Analyzer package. #
# #
# Track Analyzer is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# Track Analyzer is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details . #
# #
# You should have received a copy of the GNU General Public License #
# along with Track Analyzer (COPYING). #
# If not, see <https://www.gnu.org/licenses/>. #
##########################################################################
import os.path as osp
import sys
import argparse
from track_analyzer import prepare as tpr
from track_analyzer import plotting as tpl
from track_analyzer import calculate as tca
def make_default_config(data_dir):
"""Make default config and export it to data_dir"""
config_dir = osp.join(data_dir, 'config')
tpr.safe_mkdir(config_dir)
data_config = tpr.make_data_config(data_dir=data_dir, export_config=True)
plot_config = tpl.make_plot_config(data_dir=data_dir, export_config=True)
traj_config = tpr.make_traj_config(data_dir=data_dir, export_config=True)
map_config = tpr.make_map_config(data_dir=data_dir, export_config=True)
config = {'data_config':data_config,
'plot_config':plot_config,
'traj_config':traj_config,
'map_config':map_config,
}
return config
def parse_args(args=None):
"""
parse arguments for main()
"""
parser = argparse.ArgumentParser(description="""
Make a default set of config files and save it to data_dir
""")
parser.add_argument('data_dir',
help='path of the data directory')
parsed_args = parser.parse_args(args)
return parsed_args
def main(args=None):
""" main function to run traj_analysis from command line"""
args = sys.argv[1:] if args is None else args
parsed_args = parse_args(args)
data_dir = osp.realpath(parsed_args.data_dir)
if not osp.exists(data_dir):
raise Exception("ERROR: the passed data directory does not exist. Aborting...")
if not osp.isdir(data_dir):
raise Exception("ERROR: the passed data directory is not a directory. Aborting...")
# make config
make_default_config(data_dir)
if __name__ == '__main__':
main()
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment