diff --git a/src/nd2_combine/__init__.py b/src/nd2_combine/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/src/nd2_combine/__main__.py b/src/nd2_combine/__main__.py
deleted file mode 100644
index d32e597e5c083528d0c1922cfe5bbc07d3bf3f87..0000000000000000000000000000000000000000
--- a/src/nd2_combine/__main__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from nd2_combine.tools import main
-import logging
-
-logging.basicConfig(level=logging.INFO)
-
-if __name__ == "__main__":
-    main()
diff --git a/src/nd2_combine/tools.py b/src/nd2_combine/tools.py
deleted file mode 100644
index f9f35c957d3c783dc7044a0640ba399fad2c4582..0000000000000000000000000000000000000000
--- a/src/nd2_combine/tools.py
+++ /dev/null
@@ -1,122 +0,0 @@
-from glob import glob
-import pims_nd2 as nd
-import os
-import sys
-import numpy as np
-import logging
-from tqdm import tqdm
-from nd2tif.transform import Well
-
-logger = logging.getLogger(__name__)
-
-
-def get_paths(path):
-    files = glob(os.path.join(path, "*", "*.nd2"))
-    return files
-
-
-def get_conditions(files):
-    return set(sorted([f.split(os.path.sep)[-1].split(".")[0] for f in files]))
-
-
-def group_input_paths(path, conditions):
-    groups = {
-        c: {"inputs": sorted(glob(os.path.join(path, "*", f"{c}.nd2")))}
-        for c in conditions
-    }
-    logger.info(f"Groups of {len(groups)} datasets: {groups}")
-    return groups
-
-
-def gen_out_folder_names(path, conditions, subname):
-    return {c: os.path.join(path, subname, c) for c in conditions}
-
-
-def create_out_folder(path, condition, subname="Combined"):
-    subpath = os.path.join(path, subname)
-    if not os.path.exists(subpath):
-        os.mkdir(subpath)
-    out_dir = os.path.join(path, subname, condition)
-    try:
-        os.mkdir(out_dir)
-    except FileExistsError:
-        logging.warning(f"File exists error: skipping mkdir {out_dir}")
-        pass
-    return out_dir
-
-
-def read_nd2(path: str, bundle_axes="yx", pos_limit=None):
-    '''
-    Reads nd2 file using pims_ND2.ND2_reader
-    Yields a dictionary
-    {
-        "well_index": index,
-        "well": well, - single well ('m')
-        "order": bundle_axes,
-        "calibration_um": px_size_um,
-    }
-    '''
-    logger.debug(f"read_nd2: open {path}")
-    with nd.ND2_Reader(path,) as frames:
-        logger.debug(frames.sizes)
-        # logger.debug(frames.metadata)
-        px_size_um = frames.calibration
-        frames.iter_axes = "m"
-        frames.bundle_axes = bundle_axes
-        for index, well in enumerate(frames[:pos_limit]):
-            yield {
-                "well_index": index,
-                "well": well,
-                "order": bundle_axes,
-                "calibration_um": px_size_um,
-            }
-
-
-def combine_nd2(*paths, out_folder):
-    # get handlers to every file
-    # iterate by 'm'
-    # read first 'm's
-    # stack them
-    # save tif
-    readers = [read_nd2(p) for p in paths]
-
-    logger.info(f'Saving tifs to {os.path.join(out_folder, "Pos_XXX.tif")}')
-
-    for i, images in tqdm(enumerate(zip(*readers))):
-        time_series = np.array([im["well"] for im in images], dtype="uint16")
-        well = Well(time_series, "tyx", images[0]["calibration_um"])
-        logger.debug(time_series.shape)
-        path = os.path.join(out_folder, f"Pos_{i:03d}.tif")
-        well.save_tif(path)
-        logger.debug(f"saving to {path}")
-
-
-def main():
-
-    subname = "Combined"
-
-    path = sys.argv[-1]
-    logger.info(f"processing {path}")
-
-    files = get_paths(path)
-    logger.info(f"found {len(files)} datasets: \n{files}")
-
-    conditions = get_conditions(files)
-    logger.info(f"Found {len(conditions)} conditions: \n{conditions}")
-
-    inputs = group_input_paths(path, conditions)
-
-    def process_condition(cond):
-        logger.info(f"Condition: {cond}")
-        out_folder = create_out_folder(path, cond, subname)
-        combine_nd2(*inputs[cond]["inputs"], out_folder=out_folder)
-        return True
-
-    _ = list(map(process_condition, conditions))
-    logger.info("Done processing")
-
-    exit(0)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/src/tests/test_multiwell.py b/src/tests/test_multiwell.py
deleted file mode 100644
index 590d42dddf3cc21670ca08d0f1f034f8e281cc64..0000000000000000000000000000000000000000
--- a/src/tests/test_multiwell.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from droplet_growth import multiwell
-import pytest
-
-
-# def test_number_of_wells(
-#     expected=500,
-#     test_path=r'Z:\Andrey\data\Salome\20200227_4ngml_2x10-3_Elissa.nd2'
-# ):
-#     bf = multiwell.read_stitched_nd2(test_path, bundle='zyx', channel=0, time_limit=None)
-#     wells = multiwell.detect_wells(bf)
-#     assert wells['count'] == expected
-
-# if __name__ == "__main__":
-#     pytest.main()
diff --git a/src/tests/test_write.py b/src/tests/test_write.py
deleted file mode 100755
index 9f0f60851736091f4359b1fae4af5e53ea2829c0..0000000000000000000000000000000000000000
--- a/src/tests/test_write.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from nd2tif import save
-import numpy
-from nd2tif import read
-
-import os, sys
-
-
-def test_save_zeros():
-    arr = numpy.random.randint(0, 2 ** 12, (20, 256, 256), dtype="uint16")
-
-    _ = save.tiff("zeros.tif", arr)
-    _ = save.tiff("zeros_cal.tif", arr, calibration_um=0.1)
-
-    assert os.path.exists("zeros.tif")
-    assert os.path.exists("zeros_cal.tif")
-
-    tif = read.tiff("zeros.tif")
-    numpy.testing.assert_array_equal(arr, tif)