Skip to content

Commit 8c2fd1a

Browse files
authored
Merge pull request #3993 from zm711/deps
First pass of deprecation removals for 0.103.0
2 parents 1d21a8d + d1d8fc8 commit 8c2fd1a

22 files changed

+44
-283
lines changed

doc/get_started/quickstart.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@ Alternatively we can pass a full dictionary containing the parameters:
336336
337337
# parameters set by params dictionary
338338
sorting_TDC_2 = ss.run_sorter(
339-
sorter_name="tridesclous", recording=recording_preprocessed, output_folder="tdc_output2", **other_params
339+
sorter_name="tridesclous", recording=recording_preprocessed, folder="tdc_output2", **other_params
340340
)
341341
print(sorting_TDC_2)
342342

doc/how_to/analyze_neuropixels.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -567,7 +567,7 @@ In this example:
567567
# run kilosort2.5 without drift correction
568568
params_kilosort2_5 = {'do_correction': False}
569569
570-
sorting = si.run_sorter('kilosort2_5', rec, output_folder=base_folder / 'kilosort2.5_output',
570+
sorting = si.run_sorter('kilosort2_5', rec, folder=base_folder / 'kilosort2.5_output',
571571
docker_image=True, verbose=True, **params_kilosort2_5)
572572
573573
.. code:: ipython3

doc/how_to/process_by_channel_group.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ sorting objects in a dictionary for later use.
160160
sorting = run_sorter(
161161
sorter_name='kilosort2',
162162
recording=split_preprocessed_recording,
163-
output_folder=f"folder_KS2_group{group}"
163+
folder=f"folder_KS2_group{group}"
164164
)
165165
sortings[group] = sorting
166166

doc/modules/sorters.rst

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -55,15 +55,15 @@ to easily run spike sorters:
5555
from spikeinterface.sorters import run_sorter
5656
5757
# run Tridesclous
58-
sorting_TDC = run_sorter(sorter_name="tridesclous", recording=recording, output_folder="/folder_TDC")
58+
sorting_TDC = run_sorter(sorter_name="tridesclous", recording=recording, folder="/folder_TDC")
5959
# run Kilosort2.5
60-
sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, output_folder="/folder_KS2_5")
60+
sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, folder="/folder_KS2_5")
6161
# run IronClust
62-
sorting_IC = run_sorter(sorter_name="ironclust", recording=recording, output_folder="/folder_IC")
62+
sorting_IC = run_sorter(sorter_name="ironclust", recording=recording, folder="/folder_IC")
6363
# run pyKilosort
64-
sorting_pyKS = run_sorter(sorter_name="pykilosort", recording=recording, output_folder="/folder_pyKS")
64+
sorting_pyKS = run_sorter(sorter_name="pykilosort", recording=recording, folder="/folder_pyKS")
6565
# run SpykingCircus
66-
sorting_SC = run_sorter(sorter_name="spykingcircus", recording=recording, output_folder="/folder_SC")
66+
sorting_SC = run_sorter(sorter_name="spykingcircus", recording=recording, folder="/folder_SC")
6767
6868
6969
Then the output, which is a :py:class:`~spikeinterface.core.BaseSorting` object, can be easily
@@ -87,10 +87,10 @@ Spike-sorter-specific parameters can be controlled directly from the
8787

8888
.. code-block:: python
8989
90-
sorting_TDC = run_sorter(sorter_name='tridesclous', recording=recording, output_folder="/folder_TDC",
90+
sorting_TDC = run_sorter(sorter_name='tridesclous', recording=recording, folder="/folder_TDC",
9191
detect_threshold=8.)
9292
93-
sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, output_folder="/folder_KS2_5"
93+
sorting_KS2_5 = run_sorter(sorter_name="kilosort2_5", recording=recording, folder="/folder_KS2_5"
9494
do_correction=False, preclust_threshold=6, freq_min=200.)
9595
9696
@@ -193,7 +193,7 @@ The following code creates a test recording and runs a containerized spike sorte
193193
194194
sorting = ss.run_sorter(sorter_name='kilosort3',
195195
recording=test_recording,
196-
output_folder="kilosort3",
196+
folder="kilosort3",
197197
singularity_image=True)
198198
199199
print(sorting)
@@ -208,7 +208,7 @@ To run in Docker instead of Singularity, use ``docker_image=True``.
208208
.. code-block:: python
209209
210210
sorting = run_sorter(sorter_name='kilosort3', recording=test_recording,
211-
output_folder="/tmp/kilosort3", docker_image=True)
211+
folder="/tmp/kilosort3", docker_image=True)
212212
213213
To use a specific image, set either ``docker_image`` or ``singularity_image`` to a string,
214214
e.g. ``singularity_image="spikeinterface/kilosort3-compiled-base:0.1.0"``.
@@ -217,7 +217,7 @@ e.g. ``singularity_image="spikeinterface/kilosort3-compiled-base:0.1.0"``.
217217
218218
sorting = run_sorter(sorter_name="kilosort3",
219219
recording=test_recording,
220-
output_folder="kilosort3",
220+
folder="kilosort3",
221221
singularity_image="spikeinterface/kilosort3-compiled-base:0.1.0")
222222
223223
@@ -301,10 +301,10 @@ an :code:`engine` that supports parallel processing (such as :code:`joblib` or :
301301
another_recording = ...
302302
303303
job_list = [
304-
{'sorter_name': 'tridesclous', 'recording': recording, 'output_folder': 'folder1','detect_threshold': 5.},
305-
{'sorter_name': 'tridesclous', 'recording': another_recording, 'output_folder': 'folder2', 'detect_threshold': 5.},
306-
{'sorter_name': 'herdingspikes', 'recording': recording, 'output_folder': 'folder3', 'clustering_bandwidth': 8., 'docker_image': True},
307-
{'sorter_name': 'herdingspikes', 'recording': another_recording, 'output_folder': 'folder4', 'clustering_bandwidth': 8., 'docker_image': True},
304+
{'sorter_name': 'tridesclous', 'recording': recording, 'folder': 'folder1','detect_threshold': 5.},
305+
{'sorter_name': 'tridesclous', 'recording': another_recording, 'folder': 'folder2', 'detect_threshold': 5.},
306+
{'sorter_name': 'herdingspikes', 'recording': recording, 'folder': 'folder3', 'clustering_bandwidth': 8., 'docker_image': True},
307+
{'sorter_name': 'herdingspikes', 'recording': another_recording, 'folder': 'folder4', 'clustering_bandwidth': 8., 'docker_image': True},
308308
]
309309
310310
# run in loop
@@ -380,7 +380,7 @@ In this example, we create a 16-channel recording with 4 tetrodes:
380380
# here the result is a dict of a sorting object
381381
sortings = {}
382382
for group, sub_recording in recordings.items():
383-
sorting = run_sorter(sorter_name='kilosort2', recording=recording, output_folder=f"folder_KS2_group{group}")
383+
sorting = run_sorter(sorter_name='kilosort2', recording=recording, folder=f"folder_KS2_group{group}")
384384
sortings[group] = sorting
385385
386386
**Option 2 : Automatic splitting**
@@ -390,7 +390,7 @@ In this example, we create a 16-channel recording with 4 tetrodes:
390390
# here the result is one sorting that aggregates all sub sorting objects
391391
aggregate_sorting = run_sorter_by_property(sorter_name='kilosort2', recording=recording_4_tetrodes,
392392
grouping_property='group',
393-
working_folder='working_path')
393+
folder='working_path')
394394
395395
396396
Handling multi-segment recordings
@@ -546,7 +546,7 @@ From the user's perspective, they behave exactly like the external sorters:
546546

547547
.. code-block:: python
548548
549-
sorting = run_sorter(sorter_name="spykingcircus2", recording=recording, output_folder="/tmp/folder")
549+
sorting = run_sorter(sorter_name="spykingcircus2", recording=recording, folder="/tmp/folder")
550550
551551
552552
Contributing

examples/tutorials/core/plot_1_recording_extractor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@
122122
##############################################################################
123123
# You can also get a recording with a subset of channels (i.e. a channel slice):
124124

125-
recording4 = recording3.channel_slice(channel_ids=["a", "c", "e"])
125+
recording4 = recording3.select_channels(channel_ids=["a", "c", "e"])
126126
print(recording4)
127127
print(recording4.get_channel_ids())
128128

examples/tutorials/qualitymetrics/plot_3_quality_metrics.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,10 @@
99

1010
import spikeinterface.core as si
1111
import spikeinterface.extractors as se
12-
from spikeinterface.postprocessing import compute_principal_components
1312
from spikeinterface.qualitymetrics import (
1413
compute_snrs,
1514
compute_firing_rates,
1615
compute_isi_violations,
17-
calculate_pc_metrics,
1816
compute_quality_metrics,
1917
)
2018

@@ -70,7 +68,7 @@
7068

7169
##############################################################################
7270
# Some metrics are based on the principal component scores, so the exwtension
73-
# need to be computed before. For instance:
71+
# must be computed before. For instance:
7472

7573
analyzer.compute("principal_components", n_components=3, mode="by_channel_global", whiten=True)
7674

installation_tips/check_your_install.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def _run_one_sorter_and_analyzer(sorter_name):
2121
job_kwargs = dict(n_jobs=-1, progress_bar=True, chunk_duration="1s")
2222
import spikeinterface.full as si
2323
recording = si.load_extractor('./toy_example_recording')
24-
sorting = si.run_sorter(sorter_name, recording, output_folder=f'./sorter_with_{sorter_name}', verbose=False)
24+
sorting = si.run_sorter(sorter_name, recording, folder=f'./sorter_with_{sorter_name}', verbose=False)
2525

2626
sorting_analyzer = si.create_sorting_analyzer(sorting, recording,
2727
format="binary_folder", folder=f"./analyzer_with_{sorter_name}",

src/spikeinterface/benchmark/benchmark_motion_interpolation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def run(self, **job_kwargs):
5353
sorting = run_sorter(
5454
sorter_name,
5555
recording,
56-
output_folder=self.sorter_folder,
56+
folder=self.sorter_folder,
5757
**sorter_params,
5858
delete_output_folder=False,
5959
)

src/spikeinterface/core/baserecording.py

Lines changed: 0 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -369,21 +369,6 @@ def get_traces(
369369
traces = traces.astype("float32", copy=False) * gains + offsets
370370
return traces
371371

372-
def has_scaled_traces(self) -> bool:
373-
"""Checks if the recording has scaled traces
374-
375-
Returns
376-
-------
377-
bool
378-
True if the recording has scaled traces, False otherwise
379-
"""
380-
warnings.warn(
381-
"`has_scaled_traces` is deprecated and will be removed in 0.103.0. Use has_scaleable_traces() instead",
382-
category=DeprecationWarning,
383-
stacklevel=2,
384-
)
385-
return self.has_scaled()
386-
387372
def get_time_info(self, segment_index=None) -> dict:
388373
"""
389374
Retrieves the timing attributes for a given segment index. As with
@@ -725,17 +710,6 @@ def rename_channels(self, new_channel_ids: list | np.array | tuple) -> "BaseReco
725710

726711
return ChannelSliceRecording(self, renamed_channel_ids=new_channel_ids)
727712

728-
def _channel_slice(self, channel_ids, renamed_channel_ids=None):
729-
from .channelslice import ChannelSliceRecording
730-
731-
warnings.warn(
732-
"Recording.channel_slice will be removed in version 0.103, use `select_channels` or `rename_channels` instead.",
733-
DeprecationWarning,
734-
stacklevel=2,
735-
)
736-
sub_recording = ChannelSliceRecording(self, channel_ids, renamed_channel_ids=renamed_channel_ids)
737-
return sub_recording
738-
739713
def _remove_channels(self, remove_channel_ids):
740714
from .channelslice import ChannelSliceRecording
741715

@@ -878,8 +852,6 @@ def binary_compatible_with(
878852
time_axis=None,
879853
file_paths_length=None,
880854
file_offset=None,
881-
file_suffix=None,
882-
file_paths_lenght=None,
883855
):
884856
"""
885857
Check is the recording is binary compatible with some constrain on
@@ -891,14 +863,6 @@ def binary_compatible_with(
891863
* file_suffix
892864
"""
893865

894-
# spelling typo need to fix
895-
if file_paths_lenght is not None:
896-
warnings.warn(
897-
"`file_paths_lenght` is deprecated and will be removed in 0.103.0 please use `file_paths_length`"
898-
)
899-
if file_paths_length is None:
900-
file_paths_length = file_paths_lenght
901-
902866
if not self.is_binary_compatible():
903867
return False
904868

src/spikeinterface/core/baserecordingsnippets.py

Lines changed: 0 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -51,14 +51,6 @@ def has_scaleable_traces(self) -> bool:
5151
else:
5252
return True
5353

54-
def has_scaled(self):
55-
warn(
56-
"`has_scaled` has been deprecated and will be removed in 0.103.0. Please use `has_scaleable_traces()`",
57-
category=DeprecationWarning,
58-
stacklevel=2,
59-
)
60-
return self.has_scaleable_traces()
61-
6254
def has_probe(self) -> bool:
6355
return "contact_vector" in self.get_property_keys()
6456

@@ -69,9 +61,6 @@ def is_filtered(self):
6961
# the is_filtered is handle with annotation
7062
return self._annotations.get("is_filtered", False)
7163

72-
def _channel_slice(self, channel_ids, renamed_channel_ids=None):
73-
raise NotImplementedError
74-
7564
def set_probe(self, probe, group_mode="by_probe", in_place=False):
7665
"""
7766
Attach a list of Probe object to a recording.
@@ -234,21 +223,6 @@ def _set_probes(self, probe_or_probegroup, group_mode="by_probe", in_place=False
234223

235224
return sub_recording
236225

237-
def set_probes(self, probe_or_probegroup, group_mode="by_probe", in_place=False):
238-
239-
warning_msg = (
240-
"`set_probes` is now a private function and the public function will be "
241-
"removed in 0.103.0. Please use `set_probe` or `set_probegroup` instead"
242-
)
243-
244-
warn(warning_msg, category=DeprecationWarning, stacklevel=2)
245-
246-
sub_recording = self._set_probes(
247-
probe_or_probegroup=probe_or_probegroup, group_mode=group_mode, in_place=in_place
248-
)
249-
250-
return sub_recording
251-
252226
def get_probe(self):
253227
probes = self.get_probes()
254228
assert len(probes) == 1, "there are several probe use .get_probes() or get_probegroup()"
@@ -441,25 +415,6 @@ def planarize(self, axes: str = "xy"):
441415

442416
return recording2d
443417

444-
# utils
445-
def channel_slice(self, channel_ids, renamed_channel_ids=None):
446-
"""
447-
Returns a new object with sliced channels.
448-
449-
Parameters
450-
----------
451-
channel_ids : np.array or list
452-
The list of channels to keep
453-
renamed_channel_ids : np.array or list, default: None
454-
A list of renamed channels
455-
456-
Returns
457-
-------
458-
BaseRecordingSnippets
459-
The object with sliced channels
460-
"""
461-
return self._channel_slice(channel_ids, renamed_channel_ids=renamed_channel_ids)
462-
463418
def select_channels(self, channel_ids):
464419
"""
465420
Returns a new object with sliced channels.

0 commit comments

Comments
 (0)