From be799749d0616ece7412c0b123aa34f2945648fa Mon Sep 17 00:00:00 2001 From: Pat Gunn Date: Thu, 7 Apr 2022 16:55:29 -0400 Subject: [PATCH 1/8] Bump version for new in-tree (dev) --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 6ae756c47..f0a2883d6 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.9.9 +1.9.10 From 9651108f8a96e2c43d60274ba5383b827378dfe8 Mon Sep 17 00:00:00 2001 From: EricThomson Date: Mon, 11 Apr 2022 21:55:47 -0400 Subject: [PATCH 2/8] fixed nb viewer (hv, nb, and nb3d) check on empty R --- caiman/source_extraction/cnmf/estimates.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/caiman/source_extraction/cnmf/estimates.py b/caiman/source_extraction/cnmf/estimates.py index f3cdc3dc7..9dd2062d5 100644 --- a/caiman/source_extraction/cnmf/estimates.py +++ b/caiman/source_extraction/cnmf/estimates.py @@ -381,7 +381,7 @@ def nb_view_components(self, Yr=None, img=None, idx=None, plt.ion() nr, T = self.C.shape - if self.R is None: + if self.R is None or self.R == b'NoneType': self.R = self.YrA if self.R.shape != [nr, T]: if self.YrA is None: @@ -433,7 +433,7 @@ def hv_view_components(self, Yr=None, img=None, idx=None, plt.ion() nr, T = self.C.shape - if self.R is None: + if self.R is None or self.R == b'NoneType': self.R = self.YrA if self.R.shape != [nr, T]: if self.YrA is None: @@ -500,7 +500,7 @@ def nb_view_components_3d(self, Yr=None, image_type='mean', dims=None, dims = self.dims plt.ion() nr, T = self.C.shape - if self.R is None: + if self.R is None or self.R == b'NoneType': self.R = self.YrA if self.R.shape != [nr, T]: if self.YrA is None: From 4afbd709b103e21d0cb43c2d3b37ae0d037b1c4f Mon Sep 17 00:00:00 2001 From: Pat Gunn Date: Fri, 15 Apr 2022 16:03:53 -0400 Subject: [PATCH 3/8] Update docs and environment files - py38-py310 are now the supported versions --- CHANGELOG.txt | 9 +++++++++ environment-minimal.yml | 2 +- environment.yml | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.txt b/CHANGELOG.txt index b62d16a0a..0fc4478c0 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -1,5 +1,14 @@ Please check this file after new releases; it will cover (only) major changes or changes that will impact how CaImAn runs, and will not generally cover new features or minor changes (see the version history on Github for that). Most recent changes are at the top. +1.9.10 +------ +Python 3.8-3.10 are supported versions of Caiman. 3.7 is no longer supported. + +1.9.9 +----- +Dependencies have updated; internal hdf5 file semantics use is changed, and both tensorflow and h5py +use now is compatible with modern versions of those libraries (tensorflow 2.4.x+ is now required) + 1.9.0 ----- This implements a storage layer that, if enabled, will try not to write files diff --git a/environment-minimal.yml b/environment-minimal.yml index 642a44290..4dd2e2d0c 100644 --- a/environment-minimal.yml +++ b/environment-minimal.yml @@ -1,7 +1,7 @@ channels: - conda-forge dependencies: -- python >=3.9 +- python >=3.10 - cython - future - h5py diff --git a/environment.yml b/environment.yml index 1fe8e349e..f244be6ee 100644 --- a/environment.yml +++ b/environment.yml @@ -1,7 +1,7 @@ channels: - conda-forge dependencies: -- python >=3.9 +- python >=3.10 - bokeh - coverage - cython From 627ec0cc85f8dc4f00b598095263d48272cd33bb Mon Sep 17 00:00:00 2001 From: Pat Gunn Date: Fri, 15 Apr 2022 16:24:26 -0400 Subject: [PATCH 4/8] more f-strings --- caiman/cluster.py | 4 ++-- caiman/components_evaluation.py | 4 ++-- caiman/motion_correction.py | 12 ++++++------ 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/caiman/cluster.py b/caiman/cluster.py index 5a59ccde7..1330def99 100644 --- a/caiman/cluster.py +++ b/caiman/cluster.py @@ -223,9 +223,9 @@ def start_server(slurm_script: str = None, ipcluster: str = "ipcluster", ncpus: if slurm_script is None: if ipcluster == "ipcluster": - subprocess.Popen("ipcluster start -n {0}".format(ncpus), shell=True, close_fds=(os.name != 'nt')) + subprocess.Popen(f"ipcluster start -n {ncpus}", shell=True, close_fds=(os.name != 'nt')) else: - subprocess.Popen(shlex.split("{0} start -n {1}".format(ipcluster, ncpus)), + subprocess.Popen(shlex.split(f"{ipcluster} start -n {ncpus}"), shell=True, close_fds=(os.name != 'nt')) time.sleep(1.5) diff --git a/caiman/components_evaluation.py b/caiman/components_evaluation.py index cacec6a87..0f17e6122 100644 --- a/caiman/components_evaluation.py +++ b/caiman/components_evaluation.py @@ -299,7 +299,7 @@ def evaluate_components_CNN(A, model_file = model_name + ".json" model_weights = model_name + ".h5" else: - raise FileNotFoundError("File for requested model {} not found".format(model_name)) + raise FileNotFoundError(f"File for requested model {model_name} not found") with open(model_file, 'r') as json_file: print('USING MODEL:' + model_file) loaded_model_json = json_file.read() @@ -313,7 +313,7 @@ def evaluate_components_CNN(A, elif os.path.isfile(model_name + ".h5.pb"): model_file = model_name + ".h5.pb" else: - raise FileNotFoundError("File for requested model {} not found".format(model_name)) + raise FileNotFoundError(f"File for requested model {model_name} not found") loaded_model = load_graph(model_file) logging.debug("Loaded model from disk") diff --git a/caiman/motion_correction.py b/caiman/motion_correction.py index 795ae2161..86084e73c 100644 --- a/caiman/motion_correction.py +++ b/caiman/motion_correction.py @@ -438,8 +438,8 @@ def apply_shifts_movie(self, fname, rigid_shifts:bool=None, save_memmap:bool=Fal if rigid_shifts is not None: logging.warning('The rigid_shifts flag is deprecated and it is ' + 'being ignored. The value is read directly from' + - ' mc.pw_rigid and is current set to the opposite' + - ' of {}'.format(self.pw_rigid)) + ' mc.pw_rigid and is currently set to the opposite' + + f' of {self.pw_rigid}') if self.pw_rigid is False: if self.is3D: @@ -3097,7 +3097,7 @@ def motion_correction_piecewise(fname, splits, strides, overlaps, add_to_movie=0 dims, T = cm.source_extraction.cnmf.utilities.get_file_size(fname, var_name_hdf5=var_name_hdf5) z = np.zeros(dims) dims = z[indices].shape - logging.debug('Number of Splits: {}'.format(splits)) + logging.debug(f'Number of Splits: {splits}') if isinstance(splits, int): if subidx is None: rng = range(T) @@ -3110,7 +3110,7 @@ def motion_correction_piecewise(fname, splits, strides, overlaps, add_to_movie=0 idxs = splits save_movie = False if template is None: - raise Exception('Not implemented') + raise Exception('motion_correction_piecewise(): Templateless not implemented') shape_mov = (np.prod(dims), T) # if is3D: @@ -3133,13 +3133,13 @@ def motion_correction_piecewise(fname, splits, strides, overlaps, add_to_movie=0 np.memmap(fname_tot, mode='w+', dtype=np.float32, shape=prepare_shape(shape_mov), order=order) - logging.info('Saving file as {}'.format(fname_tot)) + logging.info(f'Saving file as {fname_tot}') else: fname_tot = None pars = [] for idx in idxs: - logging.debug('Processing: frames: {}'.format(idx)) + logging.debug(f'Processing: frames: {idx}') pars.append([fname, fname_tot, idx, shape_mov, template, strides, overlaps, max_shifts, np.array( add_to_movie, dtype=np.float32), max_deviation_rigid, upsample_factor_grid, newoverlaps, newstrides, shifts_opencv, nonneg_movie, gSig_filt, is_fiji, From 6c5d97ec9b4720e35081700d83b7bba4e939e07e Mon Sep 17 00:00:00 2001 From: Pat Gunn Date: Tue, 10 May 2022 17:00:53 -0400 Subject: [PATCH 5/8] Fix an old typo that somehow survived --- caiman/source_extraction/cnmf/params.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caiman/source_extraction/cnmf/params.py b/caiman/source_extraction/cnmf/params.py index d36af73af..cca725ff6 100644 --- a/caiman/source_extraction/cnmf/params.py +++ b/caiman/source_extraction/cnmf/params.py @@ -1007,7 +1007,7 @@ def get_group(self, group): def __eq__(self, other): - if not instance(other, CNMFParams): + if not isinstance(other, CNMFParams): return False parent_dict1 = self.to_dict() From 00d32b69a14c5203b4d696f52f33d5fba5f88475 Mon Sep 17 00:00:00 2001 From: Pat Gunn Date: Tue, 17 May 2022 11:57:54 -0400 Subject: [PATCH 6/8] caiman hdf5 logic: make it both more ambitious and cautious --- caiman/base/movies.py | 8 ++++++-- caiman/source_extraction/cnmf/utilities.py | 5 +++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/caiman/base/movies.py b/caiman/base/movies.py index 6acc57152..bdf95d0a6 100644 --- a/caiman/base/movies.py +++ b/caiman/base/movies.py @@ -1657,9 +1657,13 @@ def rgb2gray(rgb): return movie(**f).astype(outtype) elif extension in ('.hdf5', '.h5', '.nwb'): + # TODO: Merge logic here with utilities.py:get_file_size() with h5py.File(file_name, "r") as f: - fkeys = list(f.keys()) - if len(fkeys) == 1: # If the hdf5 file we're parsing has only one dataset inside it, ignore the arg and pick that dataset + ignore_keys = ['__DATA_TYPES__'] # Known metadata that tools provide, add to this as needed. Sync with utils.py:get_file_size() !! + fkeys = list(filter(lambda x: x not in ignore_keys, f.keys())) + if len(fkeys) == 1 and 'Dataset' in str(type(f[fkeys[0]])): # If the hdf5 file we're parsing has only one dataset inside it, + # ignore the arg and pick that dataset + # TODO: Consider recursing into a group to find a dataset var_name_hdf5 = fkeys[0] if extension == '.nwb': # Apparently nwb files are specially-formatted hdf5 files diff --git a/caiman/source_extraction/cnmf/utilities.py b/caiman/source_extraction/cnmf/utilities.py index 183ca73b1..8266aa35c 100644 --- a/caiman/source_extraction/cnmf/utilities.py +++ b/caiman/source_extraction/cnmf/utilities.py @@ -1014,8 +1014,9 @@ def get_file_size(file_name, var_name_hdf5='mov'): # FIXME this doesn't match the logic in movies.py:load() # Consider pulling a lot of the "data source" code out into one place with h5py.File(file_name, "r") as f: - kk = list(f.keys()) - if len(kk) == 1: + ignore_keys = ['__DATA_TYPES__'] # Known metadata that tools provide, add to this as needed. Sync with movies.my:load() !! + kk = list(filter(lambda x: x not in ignore_keys, f.keys())) + if len(kk) == 1 and 'Dataset' in str(type(f[kk[0]])): # TODO: Consider recursing into a group to find a dataset siz = f[kk[0]].shape elif var_name_hdf5 in f: if extension == '.nwb': From eb4ba348fe118af4f5afbf1e770736ae531098e8 Mon Sep 17 00:00:00 2001 From: Pat Gunn Date: Fri, 20 May 2022 15:24:33 -0400 Subject: [PATCH 7/8] cnmf/hdf5: Improve handling of serialisation of NoneType. Also fix some typos. --- caiman/source_extraction/cnmf/estimates.py | 10 +++++----- caiman/source_extraction/cnmf/spatial.py | 2 +- caiman/utils/utils.py | 7 ++++++- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/caiman/source_extraction/cnmf/estimates.py b/caiman/source_extraction/cnmf/estimates.py index 9dd2062d5..ad4fcf4f5 100644 --- a/caiman/source_extraction/cnmf/estimates.py +++ b/caiman/source_extraction/cnmf/estimates.py @@ -1421,7 +1421,7 @@ def remove_small_large_neurons(self, min_size_neuro, max_size_neuro, indeces of components with size within the acceptable range ''' if self.A_thr is None: - raise Exception('You need to compute thresolded components before calling remove_duplicates: use the threshold_components method') + raise Exception('You need to compute thresholded components before calling remove_duplicates: use the threshold_components method') A_gt_thr_bin = self.A_thr.toarray() > 0 size_neurons_gt = A_gt_thr_bin.sum(0) @@ -1451,7 +1451,7 @@ def remove_duplicates(self, predictions=None, r_values=None, dist_thr=0.1, plot_duplicates ''' if self.A_thr is None: - raise Exception('You need to compute thresolded components before calling remove_duplicates: use the threshold_components method') + raise Exception('You need to compute thresholded components before calling remove_duplicates: use the threshold_components method') A_gt_thr_bin = (self.A_thr.toarray() > 0).reshape([self.dims[0], self.dims[1], -1], order='F').transpose([2, 0, 1]) * 1. @@ -1492,7 +1492,7 @@ def masks_2_neurofinder(self, dataset_name): """ if self.A_thr is None: raise Exception( - 'You need to compute thresolded components before calling this method: use the threshold_components method') + 'You need to compute thresholded components before calling this method: use the threshold_components method') bin_masks = self.A_thr.reshape([self.dims[0], self.dims[1], -1], order='F').transpose([2, 0, 1]) return nf_masks_to_neurof_dict(bin_masks, dataset_name) @@ -1711,10 +1711,10 @@ def compare_components(estimate_gt, estimate_cmp, Cn=None, thresh_cost=.8, min_ labels=['GT', 'CMP'], plot_results=False): if estimate_gt.A_thr is None: raise Exception( - 'You need to compute thresolded components for first argument before calling remove_duplicates: use the threshold_components method') + 'You need to compute thresholded components for first argument before calling remove_duplicates: use the threshold_components method') if estimate_cmp.A_thr is None: raise Exception( - 'You need to compute thresolded components for second argument before calling remove_duplicates: use the threshold_components method') + 'You need to compute thresholded components for second argument before calling remove_duplicates: use the threshold_components method') if plot_results: plt.figure(figsize=(20, 10)) diff --git a/caiman/source_extraction/cnmf/spatial.py b/caiman/source_extraction/cnmf/spatial.py index b6943ca7c..6d0dd4c4a 100644 --- a/caiman/source_extraction/cnmf/spatial.py +++ b/caiman/source_extraction/cnmf/spatial.py @@ -455,7 +455,7 @@ def construct_ellipse_parallel(pars): return np.sqrt(np.sum([old_div((dist_cm * V[:, k]) ** 2, dkk[k]) for k in range(len(dkk))], 0)) <= dist def threshold_components(A, dims, medw=None, thr_method='max', maxthr=0.1, nrgthr=0.9999, extract_cc=True, - se=None, ss=None, dview=None): + se=None, ss=None, dview=None) -> np.ndarray: """ Post-processing of spatial components which includes the following steps diff --git a/caiman/utils/utils.py b/caiman/utils/utils.py index b9a0c26b9..c5e7d16ee 100644 --- a/caiman/utils/utils.py +++ b/caiman/utils/utils.py @@ -544,6 +544,10 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic Starting with Caiman 1.9.9 we started saving strings as attributes rather than independent datasets, which gets us a better syntax and less damage to the strings, at the cost of scanning properly for them being a little more involved. In future versions of Caiman we may store all scalars as attributes. + + There's some special casing here that should be solved in a more general way; anything serialised into + hdf5 and then deserialised should probably go back through the class constructor, and revalidated + so all the fields end up with appropriate data types. ''' ans:Dict = {} @@ -560,7 +564,6 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic ans[key] = item[()] elif key in ['dims', 'medw', 'sigma_smooth_snmf', 'dxy', 'max_shifts', 'strides', 'overlaps']: - if isinstance(item[()], np.ndarray): ans[key] = tuple(item[()]) else: @@ -570,6 +573,8 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic ans[key] = bool(item[()]) else: ans[key] = item[()] + if isinstance(ans[key], bytes) and ans[key] == b'NoneType': + ans[key] = None elif isinstance(item, h5py._hl.group.Group): if key in ('A', 'W', 'Ab', 'downscale_matrix', 'upscale_matrix'): From 7c3f4374f384bcbb5c7a60f3f914ecb469d38e6c Mon Sep 17 00:00:00 2001 From: kushalkolar Date: Tue, 24 May 2022 00:14:32 -0400 Subject: [PATCH 8/8] fixes #977, dir is prefixed onto base_name for offline mcorr --- caiman/motion_correction.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/caiman/motion_correction.py b/caiman/motion_correction.py index 795ae2161..0d636d95d 100644 --- a/caiman/motion_correction.py +++ b/caiman/motion_correction.py @@ -3125,6 +3125,8 @@ def motion_correction_piecewise(fname, splits, strides, overlaps, add_to_movie=0 if save_movie: if base_name is None: base_name = os.path.split(fname)[1][:-4] + base_name = caiman.paths.fn_relocated(base_name) + fname_tot:Optional[str] = caiman.paths.memmap_frames_filename(base_name, dims, T, order) if isinstance(fname, tuple): fname_tot = os.path.join(os.path.split(fname[0])[0], fname_tot)