diff --git a/notebooks/membrane_processing.ipynb b/notebooks/membrane_processing.ipynb
index fd18205b22febf9ecfc865b31fafe0f9fadbcc73..8dd582b93241a126c48e25588b5d35c4437a6bcf 100644
--- a/notebooks/membrane_processing.ipynb
+++ b/notebooks/membrane_processing.ipynb
@@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "# Membrane surface reconstruction from full tomograms\n",
+ "# Membrane surface reconstruction\n",
"\n",
"This notebook is used to\n",
"- reconstruct the different surfaces segmented earlier\n",
@@ -61,7 +61,11 @@
"source": [
"# indicate where data is stored\n",
"data_dir = Path(\"/Volumes/Eirene/Segmentation/membrain_seg_original_pixel_spacing/Auto_Seg_curation\")\n",
- "out_dir = Path(\"/Volumes/Eirene/Segmentation/membrain_seg_original_pixel_spacing/Auto_Seg_curation/surface_morphology\")\n",
+ "tomogram_dir = Path(\"/Volumes/Eirene/Segmentation\")\n",
+ "\n",
+ "out_dir = Path(\"/Volumes/Eirene/Segmentation/membrain_seg_original_pixel_spacing/Auto_Seg_curation/surface_morphology_spacing_corr\")\n",
+ "if not os.path.exists(out_dir):\n",
+ " os.makedirs(out_dir)\n",
"\n",
"# use pathlib\n",
"files = sorted(list(data_dir.glob(\"*_cor.tif\")))"
@@ -135,10 +139,21 @@
"\n",
"for i, file in enumerate(files):\n",
"\n",
- " tmpout = out_dir / (file.stem + f\"_surf_{3}.obj\")\n",
- " if os.path.exists(tmpout):\n",
- " print('skipping', tmpout)\n",
- " continue\n",
+ " # # restrict to subset of tomos\n",
+ " # tomos_to_process = [\n",
+ " # 'TS_01',\n",
+ " # 'TS_10',\n",
+ " # 'TS_23',\n",
+ " # ]\n",
+ "\n",
+ " # if not max([t in file.stem for t in tomos_to_process]):\n",
+ " # continue\n",
+ "\n",
+ " # # don't process if already processed\n",
+ " # tmpout = out_dir / (file.stem + f\"_surf_{3}.obj\")\n",
+ " # if os.path.exists(tmpout):\n",
+ " # print('skipping', tmpout)\n",
+ " # continue\n",
"\n",
" print(f\"Processing file {i+1} of {len(files)}\")\n",
"\n",
@@ -149,7 +164,8 @@
" seg = ndimage.binary_erosion(seg, iterations=2)\n",
" im = seg * im\n",
"\n",
- " spacing = 1\n",
+ " tomogram_path = tomogram_dir / (file.stem.split('_MemBrain')[0] + \".mrc\")\n",
+ " spacing = float(mrcfile.mmap(tomogram_path).voxel_size.x) / 10.\n",
" k_dec = 50\n",
"\n",
" surfs = []\n",
@@ -186,14 +202,13 @@
" pts_mesh = surf.points\n",
" tree = cKDTree(pts)\n",
" distances, indices = tree.query(pts_mesh)\n",
- " surf, ridx = surf.remove_points(distances > 10)\n",
+ " surf, ridx = surf.remove_points(distances > 10, mode='all')\n",
"\n",
" # smooth\n",
" surf = surf.smooth_taubin(n_iter=50)\n",
- "\n",
" # creates wedges, so try to be more conservative\n",
" surf = surf.smooth(n_iter=1000, boundary_smoothing=True, relaxation_factor=.1)\n",
- "\n",
+ " \n",
" surfs.append(surf)\n",
"\n",
" # save cloud points\n",
@@ -233,7 +248,6 @@
"df = []\n",
"for i, file in enumerate(files):\n",
" d = np.load(out_dir / (file.stem + \"_distances.npy\"))\n",
- " # c = np.load(out_dir / (file.stem + \"_curvature.npy\"))\n",
" \n",
" tmpdf = pd.DataFrame({\n",
" 'distance': d,\n",
@@ -304,7 +318,7 @@
"\n",
"\n",
"distance_clim = [np.percentile(df.distance, p) for p in [5, 95]]\n",
- "distance_clim = None\n",
+ "# distance_clim = None\n",
"\n",
"# output file suffix\n",
"# suffix = \"_distances\"\n",
@@ -312,6 +326,18 @@
"\n",
"for i, file in enumerate(files[:]):\n",
"\n",
+ " print('processing', file)\n",
+ "\n",
+ " # # restrict to subset of tomos\n",
+ " # tomos_to_process = [\n",
+ " # 'TS_01',\n",
+ " # 'TS_10',\n",
+ " # 'TS_23',\n",
+ " # ]\n",
+ "\n",
+ " # if not max([t in file.stem for t in tomos_to_process]):\n",
+ " # continue\n",
+ "\n",
" print(f\"Processing file {i+1} of {len(files)}\")\n",
"\n",
" orig_surfs = []\n",
@@ -322,10 +348,13 @@
" surf = pv.reader.OBJReader(out_file).read()\n",
" orig_surfs.append(surf)\n",
"\n",
- " if '057B30G2_TS_20_bin2_tiltcor_rec_corrected_flatcrop' in file.stem:\n",
+ " # for full tomograms\n",
+ " smoothen_borders = False\n",
+ " \n",
+ " if not smoothen_borders:\n",
" s = surf\n",
" else:\n",
- " # smooth borders\n",
+ " # smoothen borders in case of cropouts\n",
" pts = np.load(out_dir / (file.stem + f\"_pts_{object_id}.npy\"))\n",
" tree = cKDTree(pts)\n",
" s = surf.subdivide(2, subfilter='linear')\n",
@@ -334,10 +363,10 @@
" while 1:\n",
" pts_mesh = s.points\n",
" distances, indices = tree.query(pts_mesh)\n",
- " points_to_remove = np.where(distances > 5)[0]\n",
+ " points_to_remove = np.where(distances > 6)[0]\n",
"\n",
" border_indices = get_border_vertex_inds(s)\n",
- " points_to_remove = np.where((distances > 5) & np.isin(np.arange(len(pts_mesh)), border_indices))[0]\n",
+ " points_to_remove = np.where((distances > 6) & np.isin(np.arange(len(pts_mesh)), border_indices))[0]\n",
" \n",
" s, ridx = s.remove_points(points_to_remove)\n",
"\n",