diff --git a/doc/get_started/quickstart.rst b/doc/get_started/quickstart.rst index bfa4335ac1..1d532c9387 100644 --- a/doc/get_started/quickstart.rst +++ b/doc/get_started/quickstart.rst @@ -336,7 +336,7 @@ Alternatively we can pass a full dictionary containing the parameters: # parameters set by params dictionary sorting_TDC_2 = ss.run_sorter( - sorter_name="tridesclous", recording=recording_preprocessed, output_folder="tdc_output2", **other_params + sorter_name="tridesclous", recording=recording_preprocessed, folder="tdc_output2", **other_params ) print(sorting_TDC_2) diff --git a/doc/how_to/analyze_neuropixels.rst b/doc/how_to/analyze_neuropixels.rst index 1fe741ea48..04a9736b80 100644 --- a/doc/how_to/analyze_neuropixels.rst +++ b/doc/how_to/analyze_neuropixels.rst @@ -567,7 +567,7 @@ In this example: # run kilosort2.5 without drift correction params_kilosort2_5 = {'do_correction': False} - sorting = si.run_sorter('kilosort2_5', rec, output_folder=base_folder / 'kilosort2.5_output', + sorting = si.run_sorter('kilosort2_5', rec, folder=base_folder / 'kilosort2.5_output', docker_image=True, verbose=True, **params_kilosort2_5) .. code:: ipython3 diff --git a/doc/how_to/process_by_channel_group.rst b/doc/how_to/process_by_channel_group.rst index 334f83b247..0e6ae49d37 100644 --- a/doc/how_to/process_by_channel_group.rst +++ b/doc/how_to/process_by_channel_group.rst @@ -160,7 +160,7 @@ sorting objects in a dictionary for later use. sorting = run_sorter( sorter_name='kilosort2', recording=split_preprocessed_recording, - output_folder=f"folder_KS2_group{group}" + folder=f"folder_KS2_group{group}" ) sortings[group] = sorting diff --git a/doc/modules/sorters.rst b/doc/modules/sorters.rst index d8a4708236..6bf3a60e46 100644 --- a/doc/modules/sorters.rst +++ b/doc/modules/sorters.rst @@ -55,15 +55,15 @@ to easily run spike sorters: from spikeinterface.sorters import run_sorter # run Tridesclous - sorting_TDC = run_sorter(sorter_name="tridesclous", recording=recording, output_folder="/folder_TDC") + sorting_TDC = run_sorter(sorter_name="tridesclous", recording=recording, folder="/folder_TDC") # run Kilosort2.5 - sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, output_folder="/folder_KS2_5") + sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, folder="/folder_KS2_5") # run IronClust - sorting_IC = run_sorter(sorter_name="ironclust", recording=recording, output_folder="/folder_IC") + sorting_IC = run_sorter(sorter_name="ironclust", recording=recording, folder="/folder_IC") # run pyKilosort - sorting_pyKS = run_sorter(sorter_name="pykilosort", recording=recording, output_folder="/folder_pyKS") + sorting_pyKS = run_sorter(sorter_name="pykilosort", recording=recording, folder="/folder_pyKS") # run SpykingCircus - sorting_SC = run_sorter(sorter_name="spykingcircus", recording=recording, output_folder="/folder_SC") + sorting_SC = run_sorter(sorter_name="spykingcircus", recording=recording, folder="/folder_SC") Then the output, which is a :py:class:`~spikeinterface.core.BaseSorting` object, can be easily @@ -87,10 +87,10 @@ Spike-sorter-specific parameters can be controlled directly from the .. code-block:: python - sorting_TDC = run_sorter(sorter_name='tridesclous', recording=recording, output_folder="/folder_TDC", + sorting_TDC = run_sorter(sorter_name='tridesclous', recording=recording, folder="/folder_TDC", detect_threshold=8.) - sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, output_folder="/folder_KS2_5" + sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, folder="/folder_KS2_5" do_correction=False, preclust_threshold=6, freq_min=200.) @@ -193,7 +193,7 @@ The following code creates a test recording and runs a containerized spike sorte sorting = ss.run_sorter(sorter_name='kilosort3', recording=test_recording, - output_folder="kilosort3", + folder="kilosort3", singularity_image=True) print(sorting) @@ -208,7 +208,7 @@ To run in Docker instead of Singularity, use ``docker_image=True``. .. code-block:: python sorting = run_sorter(sorter_name='kilosort3', recording=test_recording, - output_folder="/tmp/kilosort3", docker_image=True) + folder="/tmp/kilosort3", docker_image=True) To use a specific image, set either ``docker_image`` or ``singularity_image`` to a string, e.g. ``singularity_image="spikeinterface/kilosort3-compiled-base:0.1.0"``. @@ -217,7 +217,7 @@ e.g. ``singularity_image="spikeinterface/kilosort3-compiled-base:0.1.0"``. sorting = run_sorter(sorter_name="kilosort3", recording=test_recording, - output_folder="kilosort3", + folder="kilosort3", singularity_image="spikeinterface/kilosort3-compiled-base:0.1.0") @@ -301,10 +301,10 @@ an :code:`engine` that supports parallel processing (such as :code:`joblib` or : another_recording = ... job_list = [ - {'sorter_name': 'tridesclous', 'recording': recording, 'output_folder': 'folder1','detect_threshold': 5.}, - {'sorter_name': 'tridesclous', 'recording': another_recording, 'output_folder': 'folder2', 'detect_threshold': 5.}, - {'sorter_name': 'herdingspikes', 'recording': recording, 'output_folder': 'folder3', 'clustering_bandwidth': 8., 'docker_image': True}, - {'sorter_name': 'herdingspikes', 'recording': another_recording, 'output_folder': 'folder4', 'clustering_bandwidth': 8., 'docker_image': True}, + {'sorter_name': 'tridesclous', 'recording': recording, 'folder': 'folder1','detect_threshold': 5.}, + {'sorter_name': 'tridesclous', 'recording': another_recording, 'folder': 'folder2', 'detect_threshold': 5.}, + {'sorter_name': 'herdingspikes', 'recording': recording, 'folder': 'folder3', 'clustering_bandwidth': 8., 'docker_image': True}, + {'sorter_name': 'herdingspikes', 'recording': another_recording, 'folder': 'folder4', 'clustering_bandwidth': 8., 'docker_image': True}, ] # run in loop @@ -380,7 +380,7 @@ In this example, we create a 16-channel recording with 4 tetrodes: # here the result is a dict of a sorting object sortings = {} for group, sub_recording in recordings.items(): - sorting = run_sorter(sorter_name='kilosort2', recording=recording, output_folder=f"folder_KS2_group{group}") + sorting = run_sorter(sorter_name='kilosort2', recording=recording, folder=f"folder_KS2_group{group}") sortings[group] = sorting **Option 2 : Automatic splitting** @@ -390,7 +390,7 @@ In this example, we create a 16-channel recording with 4 tetrodes: # here the result is one sorting that aggregates all sub sorting objects aggregate_sorting = run_sorter_by_property(sorter_name='kilosort2', recording=recording_4_tetrodes, grouping_property='group', - working_folder='working_path') + folder='working_path') Handling multi-segment recordings @@ -546,7 +546,7 @@ From the user's perspective, they behave exactly like the external sorters: .. code-block:: python - sorting = run_sorter(sorter_name="spykingcircus2", recording=recording, output_folder="/tmp/folder") + sorting = run_sorter(sorter_name="spykingcircus2", recording=recording, folder="/tmp/folder") Contributing diff --git a/examples/tutorials/core/plot_1_recording_extractor.py b/examples/tutorials/core/plot_1_recording_extractor.py index e7d773e9e6..39520f2195 100644 --- a/examples/tutorials/core/plot_1_recording_extractor.py +++ b/examples/tutorials/core/plot_1_recording_extractor.py @@ -122,7 +122,7 @@ ############################################################################## # You can also get a recording with a subset of channels (i.e. a channel slice): -recording4 = recording3.channel_slice(channel_ids=["a", "c", "e"]) +recording4 = recording3.select_channels(channel_ids=["a", "c", "e"]) print(recording4) print(recording4.get_channel_ids()) diff --git a/examples/tutorials/qualitymetrics/plot_3_quality_metrics.py b/examples/tutorials/qualitymetrics/plot_3_quality_metrics.py index bfa6880cb0..a6b0da67ac 100644 --- a/examples/tutorials/qualitymetrics/plot_3_quality_metrics.py +++ b/examples/tutorials/qualitymetrics/plot_3_quality_metrics.py @@ -9,12 +9,10 @@ import spikeinterface.core as si import spikeinterface.extractors as se -from spikeinterface.postprocessing import compute_principal_components from spikeinterface.qualitymetrics import ( compute_snrs, compute_firing_rates, compute_isi_violations, - calculate_pc_metrics, compute_quality_metrics, ) @@ -70,7 +68,7 @@ ############################################################################## # Some metrics are based on the principal component scores, so the exwtension -# need to be computed before. For instance: +# must be computed before. For instance: analyzer.compute("principal_components", n_components=3, mode="by_channel_global", whiten=True) diff --git a/installation_tips/check_your_install.py b/installation_tips/check_your_install.py index f3f80961e8..92bafd3d55 100644 --- a/installation_tips/check_your_install.py +++ b/installation_tips/check_your_install.py @@ -21,7 +21,7 @@ def _run_one_sorter_and_analyzer(sorter_name): job_kwargs = dict(n_jobs=-1, progress_bar=True, chunk_duration="1s") import spikeinterface.full as si recording = si.load_extractor('./toy_example_recording') - sorting = si.run_sorter(sorter_name, recording, output_folder=f'./sorter_with_{sorter_name}', verbose=False) + sorting = si.run_sorter(sorter_name, recording, folder=f'./sorter_with_{sorter_name}', verbose=False) sorting_analyzer = si.create_sorting_analyzer(sorting, recording, format="binary_folder", folder=f"./analyzer_with_{sorter_name}", diff --git a/src/spikeinterface/benchmark/benchmark_motion_interpolation.py b/src/spikeinterface/benchmark/benchmark_motion_interpolation.py index ab72a1f9bd..c0969ed32a 100644 --- a/src/spikeinterface/benchmark/benchmark_motion_interpolation.py +++ b/src/spikeinterface/benchmark/benchmark_motion_interpolation.py @@ -53,7 +53,7 @@ def run(self, **job_kwargs): sorting = run_sorter( sorter_name, recording, - output_folder=self.sorter_folder, + folder=self.sorter_folder, **sorter_params, delete_output_folder=False, ) diff --git a/src/spikeinterface/core/baserecording.py b/src/spikeinterface/core/baserecording.py index b32893981f..ac299f52e7 100644 --- a/src/spikeinterface/core/baserecording.py +++ b/src/spikeinterface/core/baserecording.py @@ -369,21 +369,6 @@ def get_traces( traces = traces.astype("float32", copy=False) * gains + offsets return traces - def has_scaled_traces(self) -> bool: - """Checks if the recording has scaled traces - - Returns - ------- - bool - True if the recording has scaled traces, False otherwise - """ - warnings.warn( - "`has_scaled_traces` is deprecated and will be removed in 0.103.0. Use has_scaleable_traces() instead", - category=DeprecationWarning, - stacklevel=2, - ) - return self.has_scaled() - def get_time_info(self, segment_index=None) -> dict: """ Retrieves the timing attributes for a given segment index. As with @@ -725,17 +710,6 @@ def rename_channels(self, new_channel_ids: list | np.array | tuple) -> "BaseReco return ChannelSliceRecording(self, renamed_channel_ids=new_channel_ids) - def _channel_slice(self, channel_ids, renamed_channel_ids=None): - from .channelslice import ChannelSliceRecording - - warnings.warn( - "Recording.channel_slice will be removed in version 0.103, use `select_channels` or `rename_channels` instead.", - DeprecationWarning, - stacklevel=2, - ) - sub_recording = ChannelSliceRecording(self, channel_ids, renamed_channel_ids=renamed_channel_ids) - return sub_recording - def _remove_channels(self, remove_channel_ids): from .channelslice import ChannelSliceRecording @@ -878,8 +852,6 @@ def binary_compatible_with( time_axis=None, file_paths_length=None, file_offset=None, - file_suffix=None, - file_paths_lenght=None, ): """ Check is the recording is binary compatible with some constrain on @@ -891,14 +863,6 @@ def binary_compatible_with( * file_suffix """ - # spelling typo need to fix - if file_paths_lenght is not None: - warnings.warn( - "`file_paths_lenght` is deprecated and will be removed in 0.103.0 please use `file_paths_length`" - ) - if file_paths_length is None: - file_paths_length = file_paths_lenght - if not self.is_binary_compatible(): return False diff --git a/src/spikeinterface/core/baserecordingsnippets.py b/src/spikeinterface/core/baserecordingsnippets.py index ea1f9c4542..6be1766dbc 100644 --- a/src/spikeinterface/core/baserecordingsnippets.py +++ b/src/spikeinterface/core/baserecordingsnippets.py @@ -51,14 +51,6 @@ def has_scaleable_traces(self) -> bool: else: return True - def has_scaled(self): - warn( - "`has_scaled` has been deprecated and will be removed in 0.103.0. Please use `has_scaleable_traces()`", - category=DeprecationWarning, - stacklevel=2, - ) - return self.has_scaleable_traces() - def has_probe(self) -> bool: return "contact_vector" in self.get_property_keys() @@ -69,9 +61,6 @@ def is_filtered(self): # the is_filtered is handle with annotation return self._annotations.get("is_filtered", False) - def _channel_slice(self, channel_ids, renamed_channel_ids=None): - raise NotImplementedError - def set_probe(self, probe, group_mode="by_probe", in_place=False): """ Attach a list of Probe object to a recording. @@ -234,21 +223,6 @@ def _set_probes(self, probe_or_probegroup, group_mode="by_probe", in_place=False return sub_recording - def set_probes(self, probe_or_probegroup, group_mode="by_probe", in_place=False): - - warning_msg = ( - "`set_probes` is now a private function and the public function will be " - "removed in 0.103.0. Please use `set_probe` or `set_probegroup` instead" - ) - - warn(warning_msg, category=DeprecationWarning, stacklevel=2) - - sub_recording = self._set_probes( - probe_or_probegroup=probe_or_probegroup, group_mode=group_mode, in_place=in_place - ) - - return sub_recording - def get_probe(self): probes = self.get_probes() assert len(probes) == 1, "there are several probe use .get_probes() or get_probegroup()" @@ -441,25 +415,6 @@ def planarize(self, axes: str = "xy"): return recording2d - # utils - def channel_slice(self, channel_ids, renamed_channel_ids=None): - """ - Returns a new object with sliced channels. - - Parameters - ---------- - channel_ids : np.array or list - The list of channels to keep - renamed_channel_ids : np.array or list, default: None - A list of renamed channels - - Returns - ------- - BaseRecordingSnippets - The object with sliced channels - """ - return self._channel_slice(channel_ids, renamed_channel_ids=renamed_channel_ids) - def select_channels(self, channel_ids): """ Returns a new object with sliced channels. diff --git a/src/spikeinterface/core/basesnippets.py b/src/spikeinterface/core/basesnippets.py index 872f3fa8e1..e20fe09e11 100644 --- a/src/spikeinterface/core/basesnippets.py +++ b/src/spikeinterface/core/basesnippets.py @@ -79,14 +79,6 @@ def is_aligned(self): def get_num_segments(self): return len(self._snippets_segments) - def has_scaled_snippets(self): - warn( - "`has_scaled_snippets` is deprecated and will be removed in version 0.103.0. Please use `has_scaleable_traces()` instead", - category=DeprecationWarning, - stacklevel=2, - ) - return self.has_scaleable_traces() - def get_frames(self, indices=None, segment_index: Union[int, None] = None): segment_index = self._check_segment_index(segment_index) spts = self._snippets_segments[segment_index] diff --git a/src/spikeinterface/core/sparsity.py b/src/spikeinterface/core/sparsity.py index 0e30760262..f760243fb5 100644 --- a/src/spikeinterface/core/sparsity.py +++ b/src/spikeinterface/core/sparsity.py @@ -21,8 +21,7 @@ * "snr" : threshold based on template signal-to-noise ratio. Use the "threshold" argument to specify the SNR threshold (in units of noise levels) and the "amplitude_mode" argument to specify the mode to compute the amplitude of the templates. - * "amplitude" : threshold based on the amplitude values on every channels. Use the "threshold" argument - to specify the ptp threshold (in units of amplitude) and the "amplitude_mode" argument + * "amplitude" : threshold based on the amplitude values on every channels. Use the "amplitude_mode" argument to specify the mode to compute the amplitude of the templates. * "energy" : threshold based on the expected energy that should be present on the channels, given their noise levels. Use the "threshold" argument to specify the energy threshold @@ -30,7 +29,6 @@ * "by_property" : sparsity is given by a property of the recording and sorting (e.g. "group"). In this case the sparsity for each unit is given by the channels that have the same property value as the unit. Use the "by_property" argument to specify the property name. - * "ptp: : deprecated, use the 'snr' method with the 'peak_to_peak' amplitude mode instead. peak_sign : "neg" | "pos" | "both" Sign of the template to compute best channels. @@ -39,7 +37,7 @@ radius_um : float Radius in um for "radius" method. threshold : float - Threshold for "snr", "energy" (in units of noise levels) and "ptp" methods (in units of amplitude). + Threshold for "snr" and "energy" (in units of noise levels) (in units of amplitude). For the "snr" method, the template amplitude mode is controlled by the "amplitude_mode" argument. amplitude_mode : "extremum" | "at_index" | "peak_to_peak" Mode to compute the amplitude of the templates for the "snr", "amplitude", and "best_channels" methods. @@ -454,33 +452,6 @@ def from_snr( mask[unit_ind, chan_inds] = True return cls(mask, unit_ids, channel_ids) - @classmethod - def from_ptp(cls, templates_or_sorting_analyzer, threshold, noise_levels=None): - """ - Construct sparsity from a thresholds based on template peak-to-peak values. - Use the "threshold" argument to specify the peak-to-peak threshold. - - Parameters - ---------- - templates_or_sorting_analyzer : Templates | SortingAnalyzer - A Templates or a SortingAnalyzer object. - threshold : float - Threshold for "ptp" method (in units of amplitude). - - Returns - ------- - sparsity : ChannelSparsity - The estimated sparsity. - """ - warnings.warn( - "The 'ptp' method is deprecated and will be removed in version 0.103.0. " - "Please use the 'snr' method with the 'peak_to_peak' amplitude mode instead.", - DeprecationWarning, - ) - return cls.from_snr( - templates_or_sorting_analyzer, threshold, amplitude_mode="peak_to_peak", noise_levels=noise_levels - ) - @classmethod def from_amplitude(cls, templates_or_sorting_analyzer, threshold, amplitude_mode="extremum", peak_sign="neg"): """ @@ -635,9 +606,7 @@ def create_dense(cls, sorting_analyzer): def compute_sparsity( templates_or_sorting_analyzer: "Templates | SortingAnalyzer", noise_levels: np.ndarray | None = None, - method: ( - "radius" | "best_channels" | "closest_channels" | "snr" | "amplitude" | "energy" | "by_property" | "ptp" - ) = "radius", + method: "radius" | "best_channels" | "closest_channels" | "snr" | "amplitude" | "energy" | "by_property" = "radius", peak_sign: "neg" | "pos" | "both" = "neg", num_channels: int | None = 5, radius_um: float | None = 100.0, @@ -672,7 +641,13 @@ def compute_sparsity( # to keep backward compatibility templates_or_sorting_analyzer = templates_or_sorting_analyzer.sorting_analyzer - if method in ("best_channels", "closest_channels", "radius", "snr", "amplitude", "ptp"): + if method in ( + "best_channels", + "closest_channels", + "radius", + "snr", + "amplitude", + ): assert isinstance( templates_or_sorting_analyzer, (Templates, SortingAnalyzer) ), f"compute_sparsity(method='{method}') need Templates or SortingAnalyzer" @@ -715,14 +690,6 @@ def compute_sparsity( sparsity = ChannelSparsity.from_property( templates_or_sorting_analyzer.sorting, templates_or_sorting_analyzer.recording, by_property ) - elif method == "ptp": - # TODO: remove after deprecation - assert threshold is not None, "For the 'ptp' method, 'threshold' needs to be given" - sparsity = ChannelSparsity.from_ptp( - templates_or_sorting_analyzer, - threshold, - noise_levels=noise_levels, - ) else: raise ValueError(f"compute_sparsity() method={method} does not exists") @@ -738,7 +705,7 @@ def estimate_sparsity( num_spikes_for_sparsity: int = 100, ms_before: float = 1.0, ms_after: float = 2.5, - method: "radius" | "best_channels" | "closest_channels" | "amplitude" | "snr" | "by_property" | "ptp" = "radius", + method: "radius" | "best_channels" | "closest_channels" | "amplitude" | "snr" | "by_property" = "radius", peak_sign: "neg" | "pos" | "both" = "neg", radius_um: float = 100.0, num_channels: int = 5, @@ -787,9 +754,9 @@ def estimate_sparsity( # Can't be done at module because this is a cyclic import, too bad from .template import Templates - assert method in ("radius", "best_channels", "closest_channels", "snr", "amplitude", "by_property", "ptp"), ( + assert method in ("radius", "best_channels", "closest_channels", "snr", "amplitude", "by_property"), ( f"method={method} is not available for `estimate_sparsity()`. " - "Available methods are 'radius', 'best_channels', 'snr', 'amplitude', 'by_property', 'ptp' (deprecated)" + "Available methods are 'radius', 'best_channels', 'snr', 'amplitude', 'by_property'" ) if recording.get_probes() == 1: @@ -866,14 +833,6 @@ def estimate_sparsity( sparsity = ChannelSparsity.from_amplitude( templates, threshold, amplitude_mode=amplitude_mode, peak_sign=peak_sign ) - elif method == "ptp": - # TODO: remove after deprecation - assert threshold is not None, "For the 'ptp' method, 'threshold' needs to be given" - assert noise_levels is not None, ( - "For the 'snr' method, 'noise_levels' needs to be given. You can use the " - "`get_noise_levels()` function to compute them." - ) - sparsity = ChannelSparsity.from_ptp(templates, threshold, noise_levels=noise_levels) else: raise ValueError(f"compute_sparsity() method={method} does not exists") else: diff --git a/src/spikeinterface/core/tests/test_sortinganalyzer.py b/src/spikeinterface/core/tests/test_sortinganalyzer.py index c5c4e9db63..7074c054b5 100644 --- a/src/spikeinterface/core/tests/test_sortinganalyzer.py +++ b/src/spikeinterface/core/tests/test_sortinganalyzer.py @@ -250,7 +250,7 @@ def test_SortingAnalyzer_tmp_recording(dataset): assert not sorting_analyzer_saved.has_temporary_recording() assert isinstance(sorting_analyzer_saved.recording, type(recording)) - recording_sliced = recording.channel_slice(recording.channel_ids[:-1]) + recording_sliced = recording.select_channels(recording.channel_ids[:-1]) # wrong channels with pytest.raises(ValueError): diff --git a/src/spikeinterface/core/tests/test_sparsity.py b/src/spikeinterface/core/tests/test_sparsity.py index 6ed311b5d8..c865068e4a 100644 --- a/src/spikeinterface/core/tests/test_sparsity.py +++ b/src/spikeinterface/core/tests/test_sparsity.py @@ -268,24 +268,8 @@ def test_estimate_sparsity(): progress_bar=True, n_jobs=1, ) - # ptp: just run it print(noise_levels) - with pytest.warns(DeprecationWarning): - sparsity = estimate_sparsity( - sorting, - recording, - num_spikes_for_sparsity=50, - ms_before=1.0, - ms_after=2.0, - method="ptp", - threshold=5, - noise_levels=noise_levels, - chunk_duration="1s", - progress_bar=True, - n_jobs=1, - ) - def test_compute_sparsity(): recording, sorting = get_dataset() @@ -310,8 +294,6 @@ def test_compute_sparsity(): sparsity = compute_sparsity(sorting_analyzer, method="amplitude", threshold=5, amplitude_mode="peak_to_peak") sparsity = compute_sparsity(sorting_analyzer, method="energy", threshold=5) sparsity = compute_sparsity(sorting_analyzer, method="by_property", by_property="group") - with pytest.warns(DeprecationWarning): - sparsity = compute_sparsity(sorting_analyzer, method="ptp", threshold=5) # using object Templates templates = sorting_analyzer.get_extension("templates").get_data(outputs="Templates") @@ -322,9 +304,6 @@ def test_compute_sparsity(): sparsity = compute_sparsity(templates, method="amplitude", threshold=5, amplitude_mode="peak_to_peak") sparsity = compute_sparsity(templates, method="closest_channels", num_channels=2) - with pytest.warns(DeprecationWarning): - sparsity = compute_sparsity(templates, method="ptp", noise_levels=noise_levels, threshold=5) - if __name__ == "__main__": # test_ChannelSparsity() diff --git a/src/spikeinterface/extractors/nwbextractors.py b/src/spikeinterface/extractors/nwbextractors.py index 65125efbcc..8006eb4d7f 100644 --- a/src/spikeinterface/extractors/nwbextractors.py +++ b/src/spikeinterface/extractors/nwbextractors.py @@ -53,16 +53,6 @@ def read_file_from_backend( else: raise RuntimeError(f"{file_path} is not a valid HDF5 file!") - elif stream_mode == "ros3": - import h5py - - assert file_path is not None, "file_path must be specified when using stream_mode='ros3'" - - drivers = h5py.registered_drivers() - assertion_msg = "ROS3 support not enbabled, use: install -c conda-forge h5py>=3.2 to enable streaming" - assert "ros3" in drivers, assertion_msg - open_file = h5py.File(name=file_path, mode="r", driver="ros3") - elif stream_mode == "remfile": import remfile import h5py @@ -535,13 +525,6 @@ def __init__( use_pynwb: bool = False, ): - if stream_mode == "ros3": - warnings.warn( - "The 'ros3' stream_mode is deprecated and will be removed in version 0.103.0. " - "Use 'fsspec' stream_mode instead.", - DeprecationWarning, - ) - if file_path is not None and file is not None: raise ValueError("Provide either file_path or file, not both") if file_path is None and file is None: @@ -1062,13 +1045,6 @@ def __init__( use_pynwb: bool = False, ): - if stream_mode == "ros3": - warnings.warn( - "The 'ros3' stream_mode is deprecated and will be removed in version 0.103.0. " - "Use 'fsspec' stream_mode instead.", - DeprecationWarning, - ) - self.stream_mode = stream_mode self.stream_cache_path = stream_cache_path self.electrical_series_path = electrical_series_path diff --git a/src/spikeinterface/extractors/tests/test_nwbextractors_streaming.py b/src/spikeinterface/extractors/tests/test_nwbextractors_streaming.py index 84ae3c03bf..404a598713 100644 --- a/src/spikeinterface/extractors/tests/test_nwbextractors_streaming.py +++ b/src/spikeinterface/extractors/tests/test_nwbextractors_streaming.py @@ -73,7 +73,7 @@ def test_recording_s3_nwb_remfile(): assert full_traces.shape == (num_frames, num_chans) assert full_traces.dtype == dtype - if rec.has_scaled(): + if rec.has_scaleable_traces(): trace_scaled = rec.get_traces(segment_index=segment_index, return_scaled=True, end_frame=2) assert trace_scaled.dtype == "float32" @@ -103,7 +103,7 @@ def test_recording_s3_nwb_remfile_file_like(tmp_path): assert full_traces.shape == (num_frames, num_chans) assert full_traces.dtype == dtype - if rec.has_scaled(): + if rec.has_scaleable_traces(): trace_scaled = rec.get_traces(segment_index=segment_index, return_scaled=True, end_frame=2) assert trace_scaled.dtype == "float32" diff --git a/src/spikeinterface/qualitymetrics/pca_metrics.py b/src/spikeinterface/qualitymetrics/pca_metrics.py index f4e36b24c0..9b8618f990 100644 --- a/src/spikeinterface/qualitymetrics/pca_metrics.py +++ b/src/spikeinterface/qualitymetrics/pca_metrics.py @@ -229,28 +229,6 @@ def compute_pc_metrics( return pc_metrics -def calculate_pc_metrics( - sorting_analyzer, metric_names=None, metric_params=None, unit_ids=None, seed=None, n_jobs=1, progress_bar=False -): - warnings.warn( - "The `calculate_pc_metrics` function is deprecated and will be removed in 0.103.0. Please use compute_pc_metrics instead", - category=DeprecationWarning, - stacklevel=2, - ) - - pc_metrics = compute_pc_metrics( - sorting_analyzer, - metric_names=metric_names, - metric_params=metric_params, - unit_ids=unit_ids, - seed=seed, - n_jobs=n_jobs, - progress_bar=progress_bar, - ) - - return pc_metrics - - ################################################################# # Code from spikemetrics diff --git a/src/spikeinterface/qualitymetrics/quality_metric_list.py b/src/spikeinterface/qualitymetrics/quality_metric_list.py index 23b781eb9d..f7411f6376 100644 --- a/src/spikeinterface/qualitymetrics/quality_metric_list.py +++ b/src/spikeinterface/qualitymetrics/quality_metric_list.py @@ -22,7 +22,6 @@ from .pca_metrics import ( compute_pc_metrics, - calculate_pc_metrics, # remove after 0.103.0 mahalanobis_metrics, lda_metrics, nearest_neighbors_metrics, diff --git a/src/spikeinterface/qualitymetrics/tests/test_pca_metrics.py b/src/spikeinterface/qualitymetrics/tests/test_pca_metrics.py index 287439a4f7..1491b9eac1 100644 --- a/src/spikeinterface/qualitymetrics/tests/test_pca_metrics.py +++ b/src/spikeinterface/qualitymetrics/tests/test_pca_metrics.py @@ -4,7 +4,7 @@ from spikeinterface.qualitymetrics import compute_pc_metrics, get_quality_pca_metric_list -def test_calculate_pc_metrics(small_sorting_analyzer): +def test_compute_pc_metrics(small_sorting_analyzer): import pandas as pd sorting_analyzer = small_sorting_analyzer diff --git a/src/spikeinterface/sorters/launcher.py b/src/spikeinterface/sorters/launcher.py index 137ff98cdb..a6b049c182 100644 --- a/src/spikeinterface/sorters/launcher.py +++ b/src/spikeinterface/sorters/launcher.py @@ -239,7 +239,6 @@ def run_sorter_by_property( verbose=False, docker_image=None, singularity_image=None, - working_folder: None = None, **sorter_params, ): """ @@ -301,14 +300,6 @@ def run_sorter_by_property( stacklevel=2, ) - if working_folder is not None: - warnings.warn( - "`working_folder` is deprecated and will be removed in 0.103. Please use folder instead", - category=DeprecationWarning, - stacklevel=2, - ) - folder = working_folder - working_folder = Path(folder).absolute() assert grouping_property in recording.get_property_keys(), ( diff --git a/src/spikeinterface/sorters/runsorter.py b/src/spikeinterface/sorters/runsorter.py index bd5d9b3529..5c44db2d58 100644 --- a/src/spikeinterface/sorters/runsorter.py +++ b/src/spikeinterface/sorters/runsorter.py @@ -95,8 +95,6 @@ If True, the output Sorting is returned as a Sorting delete_container_files : bool, default: True If True, the container temporary files are deleted after the sorting is done - output_folder : None, default: None - Do not use. Deprecated output function to be removed in 0.103. **sorter_params : keyword args Spike sorter specific arguments (they can be retrieved with `get_default_sorter_params(sorter_name_or_class)`) @@ -119,7 +117,6 @@ def run_sorter( singularity_image: Optional[Union[bool, str]] = False, delete_container_files: bool = True, with_output: bool = True, - output_folder: None = None, **sorter_params, ): """ @@ -132,13 +129,6 @@ def run_sorter( >>> sorting = run_sorter("tridesclous", recording) """ - if output_folder is not None and folder is None: - deprecation_msg = ( - "`output_folder` is deprecated and will be removed in version 0.103.0 Please use folder instead" - ) - folder = output_folder - warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) - common_kwargs = dict( sorter_name=sorter_name, recording=recording, @@ -210,7 +200,6 @@ def run_sorter_local( verbose=False, raise_error=True, with_output=True, - output_folder=None, **sorter_params, ): """ @@ -235,20 +224,11 @@ def run_sorter_local( If False, the process continues and the error is logged in the log file with_output : bool, default: True If True, the output Sorting is returned as a Sorting - output_folder : None, default: None - Do not use. Deprecated output function to be removed in 0.103. **sorter_params : keyword args """ if isinstance(recording, list): raise Exception("If you want to run several sorters/recordings use run_sorter_jobs(...)") - if output_folder is not None and folder is None: - deprecation_msg = ( - "`output_folder` is deprecated and will be removed in version 0.103.0 Please use folder instead" - ) - folder = output_folder - warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) - SorterClass = sorter_dict[sorter_name] # only classmethod call not instance (stateless at instance level but state is in folder) @@ -294,7 +274,6 @@ def run_sorter_container( installation_mode="auto", spikeinterface_version=None, spikeinterface_folder_source=None, - output_folder: None = None, **sorter_params, ): """ @@ -309,8 +288,6 @@ def run_sorter_container( The container mode : "docker" or "singularity" container_image : str, default: None The container image name and tag. If None, the default container image is used - output_folder : str, default: None - Path to output folder remove_existing_folder : bool, default: True If True and output_folder exists yet then delete delete_output_folder : bool, default: False @@ -345,13 +322,6 @@ def run_sorter_container( """ assert installation_mode in ("auto", "pypi", "github", "folder", "dev", "no-install") - - if output_folder is not None and folder is None: - deprecation_msg = ( - "`output_folder` is deprecated and will be removed in version 0.103.0 Please use folder instead" - ) - folder = output_folder - warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) spikeinterface_version = spikeinterface_version or si_version if extra_requirements is None: diff --git a/src/spikeinterface/sorters/tests/test_runsorter.py b/src/spikeinterface/sorters/tests/test_runsorter.py index b995520f26..1f2ec373a9 100644 --- a/src/spikeinterface/sorters/tests/test_runsorter.py +++ b/src/spikeinterface/sorters/tests/test_runsorter.py @@ -34,7 +34,7 @@ def test_run_sorter_local(generate_recording, create_cache_folder): sorting = run_sorter( "tridesclous2", recording, - output_folder=cache_folder / "sorting_tdc_local", + folder=cache_folder / "sorting_tdc_local", remove_existing_folder=True, delete_output_folder=False, verbose=True, @@ -61,7 +61,7 @@ def test_run_sorter_docker(generate_recording, create_cache_folder): sorting = run_sorter( "tridesclous", recording, - output_folder=output_folder, + folder=output_folder, remove_existing_folder=True, delete_output_folder=False, verbose=True, @@ -96,7 +96,7 @@ def test_run_sorter_singularity(generate_recording, create_cache_folder): sorting = run_sorter( "tridesclous", recording, - output_folder=output_folder, + folder=output_folder, remove_existing_folder=True, delete_output_folder=False, verbose=True,