Skip to content

Commit 18bb2f7

Browse files
committed
fix conflicts
2 parents 55adef6 + acf848f commit 18bb2f7

File tree

170 files changed

+9100
-1465
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

170 files changed

+9100
-1465
lines changed

.github/actions/build-test-environment/action.yml

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,7 @@ runs:
1515
shell: bash
1616
run: |
1717
pip install datalad-installer
18-
wget https://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz
19-
mkdir /home/runner/work/installation
20-
mv git-annex-standalone-amd64.tar.gz /home/runner/work/installation/
21-
workdir=$(pwd)
22-
cd /home/runner/work/installation
23-
tar xvzf git-annex-standalone-amd64.tar.gz
24-
echo "$(pwd)/git-annex.linux" >> $GITHUB_PATH
25-
cd $workdir
18+
datalad-installer --sudo ok git-annex --method datalad/packages
2619
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
2720
- name: Force installation of latest dev from key-packages when running dev (not release)
2821
run: |

.github/scripts/test_kilosort4_ci.py

Lines changed: 72 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,6 @@
7070
"nearest_chans": 8,
7171
"nearest_templates": 35,
7272
"max_channel_distance": 5,
73-
"templates_from_data": False,
7473
"n_templates": 10,
7574
"n_pcs": 3,
7675
"Th_single_ch": 4,
@@ -109,6 +108,14 @@
109108
# max_peels is not affecting the results in this short dataset
110109
PARAMETERS_NOT_AFFECTING_RESULTS.append("max_peels")
111110

111+
if parse(kilosort.__version__) >= parse("4.0.33"):
112+
PARAMS_TO_TEST_DICT.update({"cluster_neighbors": 11})
113+
PARAMETERS_NOT_AFFECTING_RESULTS.append("cluster_neighbors")
114+
115+
if parse(kilosort.__version__) >= parse("4.0.37"):
116+
PARAMS_TO_TEST_DICT.update({"max_cluster_subset": 20})
117+
PARAMETERS_NOT_AFFECTING_RESULTS.append("max_cluster_subset")
118+
112119

113120
PARAMS_TO_TEST = list(PARAMS_TO_TEST_DICT.keys())
114121

@@ -178,11 +185,11 @@ def _save_ground_truth_recording(self, recording, tmp_path):
178185
"""
179186
paths = {
180187
"session_scope_tmp_path": tmp_path,
181-
"recording_path": tmp_path / "my_test_recording",
188+
"recording_path": tmp_path / "my_test_recording" / "traces_cached_seg0.raw",
182189
"probe_path": tmp_path / "my_test_probe.prb",
183190
}
184191

185-
recording.save(folder=paths["recording_path"], overwrite=True)
192+
recording.save(folder=paths["recording_path"].parent, overwrite=True)
186193

187194
probegroup = recording.get_probegroup()
188195
write_prb(paths["probe_path"].as_posix(), probegroup)
@@ -214,7 +221,7 @@ def test_default_settings_all_represented(self):
214221
tested_keys += additional_non_tested_keys
215222

216223
for param_key in DEFAULT_SETTINGS:
217-
if param_key not in ["n_chan_bin", "fs", "tmin", "tmax"]:
224+
if param_key not in ["n_chan_bin", "fs", "tmin", "tmax", "templates_from_data"]:
218225
assert param_key in tested_keys, f"param: {param_key} in DEFAULT SETTINGS but not tested."
219226

220227
def test_spikeinterface_defaults_against_kilsort(self):
@@ -234,8 +241,11 @@ def test_spikeinterface_defaults_against_kilsort(self):
234241

235242
# Testing Arguments ###
236243
def test_set_files_arguments(self):
244+
expected_arguments = ["settings", "filename", "probe", "probe_name", "data_dir", "results_dir", "bad_channels"]
245+
if parse(kilosort.__version__) >= parse("4.0.34"):
246+
expected_arguments += ["shank_idx"]
237247
self._check_arguments(
238-
set_files, ["settings", "filename", "probe", "probe_name", "data_dir", "results_dir", "bad_channels"]
248+
set_files, expected_arguments
239249
)
240250

241251
def test_initialize_ops_arguments(self):
@@ -248,6 +258,8 @@ def test_initialize_ops_arguments(self):
248258
"device",
249259
"save_preprocessed_copy",
250260
]
261+
if parse(kilosort.__version__) >= parse("4.0.37"):
262+
expected_arguments += ["gui_mode"]
251263

252264
self._check_arguments(
253265
initialize_ops,
@@ -533,33 +545,60 @@ def test_kilosort4_skip_preprocessing_correction(self, tmp_path, monkeypatch, pa
533545
kilosort_output_dir = tmp_path / "kilosort_output_dir"
534546
spikeinterface_output_dir = tmp_path / "spikeinterface_output_dir"
535547

536-
def monkeypatch_filter_function(self, X, ops=None, ibatch=None):
537-
"""
538-
This is a direct copy of the kilosort io.BinaryFiltered.filter
539-
function, with hp_filter and whitening matrix code sections, and
540-
comments removed. This is the easiest way to monkeypatch (tried a few approaches)
541-
"""
542-
if self.chan_map is not None:
543-
X = X[self.chan_map]
544-
545-
if self.invert_sign:
546-
X = X * -1
547-
548-
X = X - X.mean(1).unsqueeze(1)
549-
if self.do_CAR:
550-
X = X - torch.median(X, 0)[0]
551-
552-
if self.hp_filter is not None:
553-
pass
554-
555-
if self.artifact_threshold < np.inf:
556-
if torch.any(torch.abs(X) >= self.artifact_threshold):
557-
return torch.zeros_like(X)
558-
559-
if self.whiten_mat is not None:
560-
pass
561-
return X
562-
548+
if parse(kilosort.__version__) >= parse("4.0.33"):
549+
def monkeypatch_filter_function(self, X, ops=None, ibatch=None, skip_preproc=False):
550+
"""
551+
This is a direct copy of the kilosort io.BinaryFiltered.filter
552+
function, with hp_filter and whitening matrix code sections, and
553+
comments removed. This is the easiest way to monkeypatch (tried a few approaches)
554+
"""
555+
if self.chan_map is not None:
556+
X = X[self.chan_map]
557+
558+
if self.invert_sign:
559+
X = X * -1
560+
561+
X = X - X.mean(1).unsqueeze(1)
562+
if self.do_CAR:
563+
X = X - torch.median(X, 0)[0]
564+
565+
if self.hp_filter is not None:
566+
pass
567+
568+
if self.artifact_threshold < np.inf:
569+
if torch.any(torch.abs(X) >= self.artifact_threshold):
570+
return torch.zeros_like(X)
571+
572+
if self.whiten_mat is not None:
573+
pass
574+
return X
575+
else:
576+
def monkeypatch_filter_function(self, X, ops=None, ibatch=None):
577+
"""
578+
This is a direct copy of the kilosort io.BinaryFiltered.filter
579+
function, with hp_filter and whitening matrix code sections, and
580+
comments removed. This is the easiest way to monkeypatch (tried a few approaches)
581+
"""
582+
if self.chan_map is not None:
583+
X = X[self.chan_map]
584+
585+
if self.invert_sign:
586+
X = X * -1
587+
588+
X = X - X.mean(1).unsqueeze(1)
589+
if self.do_CAR:
590+
X = X - torch.median(X, 0)[0]
591+
592+
if self.hp_filter is not None:
593+
pass
594+
595+
if self.artifact_threshold < np.inf:
596+
if torch.any(torch.abs(X) >= self.artifact_threshold):
597+
return torch.zeros_like(X)
598+
599+
if self.whiten_mat is not None:
600+
pass
601+
return X
563602
monkeypatch.setattr("kilosort.io.BinaryFiltered.filter", monkeypatch_filter_function)
564603

565604
ks_settings, _, ks_format_probe = self._get_kilosort_native_settings(recording, paths, param_key, param_value)
@@ -620,7 +659,7 @@ def _get_kilosort_native_settings(self, recording, paths, param_key, param_value
620659
are through the function, these are split here.
621660
"""
622661
settings = {
623-
"data_dir": paths["recording_path"],
662+
"filename": paths["recording_path"],
624663
"n_chan_bin": recording.get_num_channels(),
625664
"fs": recording.get_sampling_frequency(),
626665
}

.github/workflows/all-tests.yml

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -107,14 +107,7 @@ jobs:
107107
run: |
108108
pip install datalad-installer
109109
if [ ${{ runner.os }} = 'Linux' ]; then
110-
wget https://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz
111-
mkdir /home/runner/work/installation
112-
mv git-annex-standalone-amd64.tar.gz /home/runner/work/installation/
113-
workdir=$(pwd)
114-
cd /home/runner/work/installation
115-
tar xvzf git-annex-standalone-amd64.tar.gz
116-
echo "$(pwd)/git-annex.linux" >> $GITHUB_PATH
117-
cd $workdir
110+
datalad-installer --sudo ok git-annex --method datalad/packages
118111
elif [ ${{ runner.os }} = 'macOS' ]; then
119112
datalad-installer --sudo ok git-annex --method brew
120113
elif [ ${{ runner.os }} = 'Windows' ]; then

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,4 +134,4 @@ If you find SpikeInterface useful in your research, please cite:
134134
```
135135

136136
Please also cite other relevant papers for the specific components you use.
137-
For a ful list of references, please check the [references](https://spikeinterface.readthedocs.io/en/latest/references.html) page.
137+
For a full list of references, please check the [references](https://spikeinterface.readthedocs.io/en/latest/references.html) page.

doc/api.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,7 @@ Non-NEO-based
153153
.. autofunction:: toy_example
154154
.. autofunction:: read_tridesclous
155155
.. autofunction:: read_waveclus
156+
.. autofunction:: read_whitematter
156157
.. autofunction:: read_yass
157158

158159

@@ -336,6 +337,7 @@ spikeinterface.exporters
336337
.. automodule:: spikeinterface.exporters
337338

338339
.. autofunction:: export_to_phy
340+
.. autofunction:: export_to_ibl_gui
339341
.. autofunction:: export_report
340342

341343

doc/development/development.rst

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -397,3 +397,52 @@ After this you need to add a block in `Install Sorters <https://github.com/Spike
397397
to describe your sorter.
398398

399399
Finally, make a pull request so we can review the code and incorporate into the sorters module of SpikeInterface!
400+
401+
402+
403+
How to make a release
404+
---------------------
405+
406+
Checklist
407+
^^^^^^^^^
408+
* pyproject.toml: check that the version is ahead of current release. Also, comment out the @ (git dependencies)
409+
* In the top level ``__init__`` (located at ``src/spikeinterface/__init__.py``) set ``DEV_MODE`` to ``False`` (this is used for the docker installations)
410+
* Create a new release note for the appropriate version on doc/releases/new_version_tag.
411+
412+
There can be large releases like:
413+
414+
``doc/releases/0.101.0.rst``
415+
416+
Which contain a section called "Main Changes" and minor releases which include only bug fixes like:
417+
418+
``doc/releases/0.101.2.rst``
419+
420+
To collect all the PRs and bug fixes we have a script in:
421+
``doc/scripts/``
422+
called ``auto-release-notes.sh``. Run it with ``bash auto-release-notes.sh`` and it will create the release notes for the module specific changes.
423+
424+
The first time you run the script, GitHub will guide you through an authorization process if you've not already done so.
425+
426+
The signature of the script is:
427+
428+
.. code-block:: bash
429+
430+
bash auto-release-notes.sh <start_date> <end_date>
431+
432+
Where the start date is the date of the last release and the end date is the current date. Dates are in YYYY-MM-DD format
433+
434+
The date of the last release can be found on `PyPI <https://pypi.org/project/spikeinterface/>`_.
435+
436+
437+
As a specific example:
438+
.. code-block:: bash
439+
440+
bash auto-release-notes.sh 2025-02-19 2025-03-24
441+
442+
* Finish the release notes and merge
443+
* Locally tag the main branch with the newly merged release notes with the new version
444+
* Push the tag to the remote repository which will trigger the release action (.github/workflows/publish-to-pypi.yml)
445+
* Do an after-release `PR <https://github.com/SpikeInterface/spikeinterface/pull/3828/files>`_:
446+
- Uncomment the git installs in pyproject
447+
- Set ``DEV_MODE`` to ``True`` in the top level ``__init__`` (located at ``src/spikeinterface/__init__.py``)
448+
- Update `pyproject.toml` version one patch ahead or one minor if it is larger one.

doc/how_to/combine_recordings.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ Combine recordings in SpikeInterface
44
In this tutorial we will walk through combining multiple recording objects. Sometimes this occurs due to hardware
55
settings (e.g. Intan software has a default setting of new files every 1 minute) or the experimenter decides to
66
split their recording into multiple files for different experimental conditions. If the probe has not been moved,
7-
however, then during sorting it would likely make sense to combine these individual reocrding objects into one
7+
however, then during sorting it would likely make sense to combine these individual recording objects into one
88
recording object.
99

1010
**Why Combine?**

0 commit comments

Comments
 (0)