44from typing import Any , Tuple , Union
55import weakref
66import zipfile
7+ from mapmanagercore .lazy_geo_pd_images .loader .base import Position
78import numpy as np
89import pandas as pd
910
2425from mapmanagercore .analysis_params import AnalysisParams
2526from mapmanagercore .logger import logger
2627
28+
2729class AnnotationsBase (LazyImagesGeoPandas ):
2830 _images : ImageLoader
2931
@@ -32,7 +34,8 @@ def __init__(self,
3234 lineSegments : Union [str , pd .DataFrame ] = pd .DataFrame (),
3335 points : Union [str , pd .DataFrame ] = pd .DataFrame (),
3436 analysisParams : AnalysisParams = AnalysisParams (),
35- path : str = None ):
37+ path : str = None ,
38+ version : int = None ):
3639
3740 super ().__init__ (loader )
3841
@@ -49,35 +52,43 @@ def __init__(self,
4952
5053 self ._segments = LazyGeoFrame (
5154 Segment , data = lineSegments , store = weakref .ref (self ))
52- self ._points = LazyGeoFrame (Spine , data = points , store = weakref .ref (self ))
55+ self ._points = LazyGeoFrame (
56+ Spine , data = points , store = weakref .ref (self ))
5357
5458 self .loader = loader
5559 self .path = path
60+
61+ # To invalidate columns that were miss-computed in previous version
62+ # we can conditionally check the version number
63+ # if version === 0:
64+ # then we can invalidate the invalid columns by name
65+ # self._segments.invalidateColumns([... columns ...])
66+
5667
5768 # abb
5869 def __str__ (self ):
5970 """Print info about the map.
60-
71+
6172 See: _SingleTimePointAnnotationsBase()
6273 """
6374 numTimepoints = len (self ._images .timePoints ())
6475 numPnts = len (self .points ._rootDf )
6576 numSegments = len (self .segments ._rootDf )
6677
6778 return f't:{ numTimepoints } , points:{ numPnts } segments:{ numSegments } loader:{ self .loader } '
68-
79+
6980 @property
7081 def segments (self ) -> LazyGeoFrame :
7182 return self ._segments
7283
7384 @property
7485 def points (self ) -> LazyGeoFrame :
7586 return self ._points
76-
87+
7788 @property
7889 def analysisParams (self ) -> AnalysisParams :
7990 return self ._analysisParams
80-
91+
8192 def filterPoints (self , filter : Any ):
8293 """
8394 Filters the points.
@@ -100,7 +111,7 @@ def getTimePoint(self, time: int):
100111 """
101112 from .single_time_point import SingleTimePointAnnotations
102113 return SingleTimePointAnnotations (self , time )
103-
114+
104115 def getPixels (self , time : int , channel : int , zRange : Tuple [int , int ] = None , z : int = None , zSpread : int = 0 ) -> ImageSlice :
105116 """
106117 Loads the image data for a slice.
@@ -152,7 +163,7 @@ def checkFile(cls, path: str, lazy=True, verbose=False) -> bool:
152163 store = zarr .DirectoryStore (path )
153164 else :
154165 store = zarr .ZipStore (path , mode = "r" )
155-
166+
156167 group = zarr .group (store = store )
157168
158169 if verbose :
@@ -191,7 +202,8 @@ def checkFile(cls, path: str, lazy=True, verbose=False) -> bool:
191202
192203 # (2) points
193204 try :
194- _points = group ["points" ] # zarr.core.Array '/points' (255865,) uint8
205+ # zarr.core.Array '/points' (255865,) uint8
206+ _points = group ["points" ]
195207 except (KeyError ) as e :
196208 logger .error ('did not find group "points"' )
197209 logger .error (f' { e } ' )
@@ -216,7 +228,8 @@ def checkFile(cls, path: str, lazy=True, verbose=False) -> bool:
216228 _errors += 1
217229 finally :
218230 try :
219- _lineSegments = pd .read_pickle (BytesIO (_lineSegments [:].tobytes ()))
231+ _lineSegments = pd .read_pickle (
232+ BytesIO (_lineSegments [:].tobytes ()))
220233 if verbose :
221234 logger .info (f'lineSegments: { len (_lineSegments )} ' )
222235 # print(_lineSegments.head())
@@ -244,24 +257,33 @@ def checkFile(cls, path: str, lazy=True, verbose=False) -> bool:
244257 logger .info (f'encountered { _errors } errors while inspecting { path } ' )
245258
246259 return _errors == 0
247-
260+
261+ def merge (self , loader : ImageLoader ):
262+ self .loader .merge (loader )
263+
248264 @classmethod
249265 def load (cls , path : Union [str , None ], lazy = False ):
250266 loader = ZarrLoader (path , lazy = lazy )
251- points = pd .read_pickle (BytesIO (loader .group ["points" ][:].tobytes ()))
252- points = gp .GeoDataFrame (points , geometry = "point" )
253- lineSegments = pd .read_pickle (
254- BytesIO (loader .group ["lineSegments" ][:].tobytes ()))
255- lineSegments = gp .GeoDataFrame (lineSegments , geometry = "segment" )
256267
257- # abb analysisparams
258- _analysisParams_json = loader .group .attrs ['analysisParams' ] # json str
259- analysisParams = AnalysisParams (loadJson = _analysisParams_json )
268+ if "points" in loader .group :
269+ points = pd .read_pickle (
270+ BytesIO (loader .group ["points" ][:].tobytes ()))
271+ points = gp .GeoDataFrame (points , geometry = "point" )
272+ else :
273+ points = gp .GeoDataFrame ()
260274
275+ if "lineSegments" in loader .group :
276+ lineSegments = pd .read_pickle (
277+ BytesIO (loader .group ["lineSegments" ][:].tobytes ()))
278+ lineSegments = gp .GeoDataFrame (lineSegments , geometry = "segment" )
279+ else :
280+ lineSegments = gp .GeoDataFrame ()
281+
282+ analysisParams = loader .analysisParams ()
261283
262284 return cls (loader , lineSegments , points , analysisParams , path )
263285
264- def save (self , path : str = None , compression = zipfile .ZIP_STORED ):
286+ def save (self , path : str = None , compression = zipfile .ZIP_STORED ):
265287 if path is None :
266288 path = self .path
267289
@@ -279,7 +301,8 @@ def save(self, path: str=None, compression=zipfile.ZIP_STORED):
279301
280302 with fs as store :
281303 group = zarr .group (store = store )
282- self ._images .saveTo (group )
304+ images = group .create_group ("images" );
305+ self ._images .saveTo (images )
283306 group .create_dataset (
284307 "points" , data = self .points .toBytes (), dtype = np .uint8 )
285308 group .create_dataset (
0 commit comments