Skip to content

Commit f62a600

Browse files
authored
Merge branch 'main' into fix-export-to-phy-unitrefine
2 parents d091a1b + c52424f commit f62a600

File tree

19 files changed

+369
-113
lines changed

19 files changed

+369
-113
lines changed

doc/development/development.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -433,12 +433,13 @@ Where the start date is the date of the last release and the end date is the cur
433433

434434
The date of the last release can be found on `PyPI <https://pypi.org/project/spikeinterface/>`_.
435435

436-
437436
As a specific example:
437+
438438
.. code-block:: bash
439439
440440
bash auto-release-notes.sh 2025-02-19 2025-03-24
441441
442+
* Add the release to `doc/whatisnew.rst`, by adding it to the list of releases and providing a short description of the release.
442443
* Finish the release notes and merge
443444
* Locally tag the main branch with the newly merged release notes with the new version
444445
* Push the tag to the remote repository which will trigger the release action (.github/workflows/publish-to-pypi.yml)

doc/releases/0.103.1.rst

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
.. _release0.103.1:
2+
3+
SpikeInterface 0.103.1 release notes
4+
------------------------------------
5+
6+
November 3rd 2025
7+
8+
Minor release with bug fixes
9+
10+
11+
core:
12+
13+
* faster write_binary_recording() (#4171)
14+
* Add RMS method to `get_noise_levels` (#4130)
15+
16+
extractors:
17+
18+
* Update `get_neuropixels_sample_shifts_from_probe` to use `adc_sample_order` (#4184)
19+
* Add `experiment_name` argument to `OpenEphysBinaryRecordingExtractor` (#4177)
20+
* Updates for probeinterface 0.3.0 (#4092)
21+
22+
sorters:
23+
24+
* Add gpu requirements to rtsort (#4143)
25+
26+
postprocessing:
27+
28+
* Patch for memory usage in `template_similarity` (#4152)
29+
30+
qualitymetrics:
31+
32+
* Alter how quality metrics check extension dependencies (#4080)
33+
34+
curation:
35+
36+
* Future-proof hyperparameter search space for auto-curation (#4174)
37+
38+
widgets:
39+
40+
* Metric list in plot (#4137)
41+
* Fix `plot_traces` with `t_start` and no times (#4135)
42+
* Fix `plot_amplitudes` for sortingview (#4097)
43+
44+
generators:
45+
46+
* Fix bug in waveform generation + adapt parameters (#4167)
47+
48+
sortingcomponents:
49+
50+
* Reducing memory footprint for Overlaps during matching (#4157)
51+
* Big clean in components and reoraginze in folder (#4140)
52+
* Improve tdc-clustering with a merging step (#4122)
53+
* Implement isosplit6 into spikeinterface in pure numpy/numba (#4113)
54+
* Improvement of TDC2, SC2 and ISOSPLIT (#4182)
55+
56+
motion correction:
57+
58+
* dredge: Do more things in place with the larger matrices (#4119)
59+
60+
documentation:
61+
62+
* Improve docs for curation module and model (#4138)
63+
* Fix typos and buggy code in modules docs (#4105)
64+
* Improve assertion error message of SortingAnalyzer job kwargs (#4178)
65+
66+
continuous integration:
67+
68+
* Check install ci - use uv (#4123)
69+
70+
packaging:
71+
72+
* Update deprecated cuda import (#4111)
73+
74+
testing:
75+
76+
* Fix size comparison in sorting tools test case (#4112)
77+
* Fix ks4 tests (#4187)
78+
79+
deprecations:
80+
81+
* Update deprecated cuda import (#4111)
82+
* Deprecated `spikeinterface.compraison.hybrid` module (#4093)
83+
* More return_scaled > return_in_uV (#4090)
84+
85+
Contributors:
86+
87+
* @alejoe91
88+
* @chrishalcrow
89+
* @cwindolf
90+
* @emmanuel-ferdman
91+
* @h-mayorquin
92+
* @lochhh
93+
* @nwatters01
94+
* @samuelgarcia
95+
* @yger
96+
* @zm711
97+
* @MGAMZ

doc/whatisnew.rst

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ Release notes
88
.. toctree::
99
:maxdepth: 1
1010

11+
releases/0.103.1.rst
1112
releases/0.103.0.rst
1213
releases/0.102.3.rst
1314
releases/0.102.2.rst
@@ -50,6 +51,11 @@ Release notes
5051
releases/0.9.1.rst
5152

5253

54+
Version 0.103.1
55+
===============
56+
57+
* Minor release with bug fixes
58+
5359
Version 0.103.0
5460
===============
5561

pyproject.toml

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ dependencies = [
2525
"threadpoolctl>=3.0.0",
2626
"tqdm",
2727
"zarr>=2.18,<3",
28-
"neo>=0.14.1",
29-
"probeinterface>=0.3.0",
28+
"neo>=0.14.3",
29+
"probeinterface>=0.3.1",
3030
"packaging",
3131
"pydantic",
3232
"numcodecs<0.16.0", # For supporting zarr < 3
@@ -127,8 +127,8 @@ test_core = [
127127

128128
# for github test : probeinterface and neo from master
129129
# for release we need pypi, so this need to be commented
130-
"probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git",
131-
"neo @ git+https://github.com/NeuralEnsemble/python-neo.git",
130+
# "probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git",
131+
# "neo @ git+https://github.com/NeuralEnsemble/python-neo.git",
132132

133133
# for slurm jobs,
134134
"pytest-mock"
@@ -139,8 +139,8 @@ test_extractors = [
139139
"pooch>=1.8.2",
140140
"datalad>=1.0.2",
141141
# Commenting out for release
142-
"probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git",
143-
"neo @ git+https://github.com/NeuralEnsemble/python-neo.git",
142+
# "probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git",
143+
# "neo @ git+https://github.com/NeuralEnsemble/python-neo.git",
144144
]
145145

146146
test_preprocessing = [
@@ -185,8 +185,8 @@ test = [
185185

186186
# for github test : probeinterface and neo from master
187187
# for release we need pypi, so this need to be commented
188-
"probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git",
189-
"neo @ git+https://github.com/NeuralEnsemble/python-neo.git",
188+
# "probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git",
189+
# "neo @ git+https://github.com/NeuralEnsemble/python-neo.git",
190190

191191
# for slurm jobs
192192
"pytest-mock",
@@ -213,8 +213,8 @@ docs = [
213213
"huggingface_hub", # For automated curation
214214

215215
# for release we need pypi, so this needs to be commented
216-
"probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git", # We always build from the latest version
217-
"neo @ git+https://github.com/NeuralEnsemble/python-neo.git", # We always build from the latest version
216+
# "probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git", # We always build from the latest version
217+
# "neo @ git+https://github.com/NeuralEnsemble/python-neo.git", # We always build from the latest version
218218
]
219219

220220
dev = [

src/spikeinterface/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,5 +27,5 @@
2727
# This flag must be set to False for release
2828
# This avoids using versioning that contains ".dev0" (and this is a better choice)
2929
# This is mainly useful when using run_sorter in a container and spikeinterface install
30-
DEV_MODE = True
31-
# DEV_MODE = False
30+
# DEV_MODE = True
31+
DEV_MODE = False

src/spikeinterface/benchmark/benchmark_plot_tools.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -478,8 +478,9 @@ def _plot_performances_vs_metric(
478478
.get_performance()[performance_name]
479479
.to_numpy(dtype="float64")
480480
)
481-
all_xs.append(x)
482-
all_ys.append(y)
481+
mask = ~np.isnan(x) & ~np.isnan(y)
482+
all_xs.append(x[mask])
483+
all_ys.append(y[mask])
483484

484485
if with_sigmoid_fit:
485486
max_snr = max(np.max(x) for x in all_xs)

src/spikeinterface/postprocessing/template_similarity.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ def _merge_extension_data(
9494
new_sorting_analyzer.sparsity.mask[keep, :], new_unit_ids, new_sorting_analyzer.channel_ids
9595
)
9696

97-
new_similarity = compute_similarity_with_templates_array(
97+
new_similarity, _ = compute_similarity_with_templates_array(
9898
new_templates_array,
9999
all_templates_array,
100100
method=self.params["method"],
@@ -146,7 +146,7 @@ def _split_extension_data(self, split_units, new_unit_ids, new_sorting_analyzer,
146146
new_sorting_analyzer.sparsity.mask[keep, :], new_unit_ids_f, new_sorting_analyzer.channel_ids
147147
)
148148

149-
new_similarity = compute_similarity_with_templates_array(
149+
new_similarity, _ = compute_similarity_with_templates_array(
150150
new_templates_array,
151151
all_templates_array,
152152
method=self.params["method"],
@@ -188,7 +188,7 @@ def _run(self, verbose=False):
188188
self.sorting_analyzer, return_in_uV=self.sorting_analyzer.return_in_uV
189189
)
190190
sparsity = self.sorting_analyzer.sparsity
191-
similarity = compute_similarity_with_templates_array(
191+
similarity, _ = compute_similarity_with_templates_array(
192192
templates_array,
193193
templates_array,
194194
method=self.params["method"],
@@ -393,7 +393,13 @@ def get_overlapping_mask_for_one_template(template_index, sparsity, other_sparsi
393393

394394

395395
def compute_similarity_with_templates_array(
396-
templates_array, other_templates_array, method, support="union", num_shifts=0, sparsity=None, other_sparsity=None
396+
templates_array,
397+
other_templates_array,
398+
method,
399+
support="union",
400+
num_shifts=0,
401+
sparsity=None,
402+
other_sparsity=None,
397403
):
398404

399405
if method == "cosine_similarity":
@@ -432,10 +438,11 @@ def compute_similarity_with_templates_array(
432438
templates_array, other_templates_array, num_shifts, method, sparsity_mask, other_sparsity_mask, support=support
433439
)
434440

441+
lags = np.argmin(distances, axis=0) - num_shifts
435442
distances = np.min(distances, axis=0)
436443
similarity = 1 - distances
437444

438-
return similarity
445+
return similarity, lags
439446

440447

441448
def compute_template_similarity_by_pair(
@@ -445,7 +452,7 @@ def compute_template_similarity_by_pair(
445452
templates_array_2 = get_dense_templates_array(sorting_analyzer_2, return_in_uV=True)
446453
sparsity_1 = sorting_analyzer_1.sparsity
447454
sparsity_2 = sorting_analyzer_2.sparsity
448-
similarity = compute_similarity_with_templates_array(
455+
similarity, _ = compute_similarity_with_templates_array(
449456
templates_array_1,
450457
templates_array_2,
451458
method=method,

src/spikeinterface/postprocessing/tests/test_template_similarity.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,9 @@ def test_compute_similarity_with_templates_array(params):
8282
templates_array = rng.random(size=(2, 20, 5))
8383
other_templates_array = rng.random(size=(4, 20, 5))
8484

85-
similarity = compute_similarity_with_templates_array(templates_array, other_templates_array, **params)
85+
similarity, lags = compute_similarity_with_templates_array(templates_array, other_templates_array, **params)
8686
print(similarity.shape)
87+
print(lags)
8788

8889

8990
pytest.mark.skipif(not HAVE_NUMBA, reason="Numba not available")
@@ -141,5 +142,5 @@ def test_equal_results_numba(params):
141142
test.cache_folder = Path("./cache_folder")
142143
test.test_extension(params=dict(method="l2"))
143144

144-
# params = dict(method="cosine", num_shifts=8)
145-
# test_compute_similarity_with_templates_array(params)
145+
params = dict(method="cosine", num_shifts=8)
146+
test_compute_similarity_with_templates_array(params)

0 commit comments

Comments
 (0)