Skip to content

Commit

Permalink
Merge branch 'sc2_recording_slices' of github.com:yger/spikeinterface…
Browse files Browse the repository at this point in the history
… into sc2_recording_slices
  • Loading branch information
yger committed Dec 19, 2024
2 parents b9af739 + 06711d1 commit d18c2ef
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 29 deletions.
35 changes: 12 additions & 23 deletions .github/actions/build-test-environment/action.yml
Original file line number Diff line number Diff line change
@@ -1,41 +1,20 @@
name: Install packages
description: This action installs the package and its dependencies for testing

inputs:
python-version:
description: 'Python version to set up'
required: false
os:
description: 'Operating system to set up'
required: false

runs:
using: "composite"
steps:
- name: Install dependencies
run: |
sudo apt install git
git config --global user.email "CI@example.com"
git config --global user.name "CI Almighty"
python -m venv ${{ github.workspace }}/test_env # Environment used in the caching step
python -m pip install -U pip # Official recommended way
source ${{ github.workspace }}/test_env/bin/activate
pip install tabulate # This produces summaries at the end
pip install -e .[test,extractors,streaming_extractors,test_extractors,full]
shell: bash
- name: Force installation of latest dev from key-packages when running dev (not release)
run: |
source ${{ github.workspace }}/test_env/bin/activate
spikeinterface_is_dev_version=$(python -c "import spikeinterface; print(spikeinterface.DEV_MODE)")
if [ $spikeinterface_is_dev_version = "True" ]; then
echo "Running spikeinterface dev version"
pip install --no-cache-dir git+https://github.com/NeuralEnsemble/python-neo
pip install --no-cache-dir git+https://github.com/SpikeInterface/probeinterface
fi
echo "Running tests for release, using pyproject.toml versions of neo and probeinterface"
- name: Install git-annex
shell: bash
- name: git-annex install
run: |
pip install datalad-installer
wget https://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz
mkdir /home/runner/work/installation
mv git-annex-standalone-amd64.tar.gz /home/runner/work/installation/
Expand All @@ -44,4 +23,14 @@ runs:
tar xvzf git-annex-standalone-amd64.tar.gz
echo "$(pwd)/git-annex.linux" >> $GITHUB_PATH
cd $workdir
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
- name: Force installation of latest dev from key-packages when running dev (not release)
run: |
spikeinterface_is_dev_version=$(python -c "import spikeinterface; print(spikeinterface.DEV_MODE)")
if [ $spikeinterface_is_dev_version = "True" ]; then
echo "Running spikeinterface dev version"
pip install --no-cache-dir git+https://github.com/NeuralEnsemble/python-neo
pip install --no-cache-dir git+https://github.com/SpikeInterface/probeinterface
fi
echo "Running tests for release, using pyproject.toml versions of neo and probeinterface"
shell: bash
2 changes: 1 addition & 1 deletion .github/workflows/all-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
echo "$file was changed"
done
- name: Set testing environment # This decides which tests are run and whether to install especial dependencies
- name: Set testing environment # This decides which tests are run and whether to install special dependencies
shell: bash
run: |
changed_files="${{ steps.changed-files.outputs.all_changed_files }}"
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/full-test-with-codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ jobs:
env:
HDF5_PLUGIN_PATH: ${{ github.workspace }}/hdf5_plugin_path_maxwell
run: |
source ${{ github.workspace }}/test_env/bin/activate
pytest -m "not sorters_external" --cov=./ --cov-report xml:./coverage.xml -vv -ra --durations=0 | tee report_full.txt; test ${PIPESTATUS[0]} -eq 0 || exit 1
echo "# Timing profile of full tests" >> $GITHUB_STEP_SUMMARY
python ./.github/scripts/build_job_summary.py report_full.txt >> $GITHUB_STEP_SUMMARY
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ extractors = [
]

streaming_extractors = [
"ONE-api>=2.7.0", # alf sorter and streaming IBL
"ONE-api>=2.7.0,<2.10.0", # alf sorter and streaming IBL
"ibllib>=2.36.0", # streaming IBL
# Following dependencies are for streaming with nwb files
"pynwb>=2.6.0",
Expand Down
4 changes: 4 additions & 0 deletions src/spikeinterface/extractors/nwbextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,6 +599,8 @@ def __init__(
else:
gains, offsets, locations, groups = self._fetch_main_properties_backend()
self.extra_requirements.append("h5py")
if stream_mode is not None:
self.extra_requirements.append(stream_mode)
self.set_channel_gains(gains)
self.set_channel_offsets(offsets)
if locations is not None:
Expand Down Expand Up @@ -1100,6 +1102,8 @@ def __init__(
for property_name, property_values in properties.items():
values = [x.decode("utf-8") if isinstance(x, bytes) else x for x in property_values]
self.set_property(property_name, values)
if stream_mode is not None:
self.extra_requirements.append(stream_mode)

if stream_mode is None and file_path is not None:
file_path = str(Path(file_path).resolve())
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/postprocessing/unit_locations.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class ComputeUnitLocations(AnalyzerExtension):
----------
sorting_analyzer : SortingAnalyzer
A SortingAnalyzer object
method : "center_of_mass" | "monopolar_triangulation" | "grid_convolution", default: "center_of_mass"
method : "monopolar_triangulation" | "center_of_mass" | "grid_convolution", default: "monopolar_triangulation"
The method to use for localization
**method_kwargs : dict, default: {}
Kwargs which are passed to the method function. These can be found in the docstrings of `compute_center_of_mass`, `compute_grid_convolution` and `compute_monopolar_triangulation`.
Expand Down
3 changes: 2 additions & 1 deletion src/spikeinterface/preprocessing/silence_periods.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,8 @@ def __init__(self, recording, list_periods, mode="zeros", noise_levels=None, see
rec_segment = SilencedPeriodsRecordingSegment(parent_segment, periods, mode, noise_generator, seg_index)
self.add_recording_segment(rec_segment)

self._kwargs = dict(recording=recording, list_periods=list_periods, mode=mode, noise_generator=noise_generator)
self._kwargs = dict(recording=recording, list_periods=list_periods, mode=mode, seed=seed)
self._kwargs.update(random_chunk_kwargs)


class SilencedPeriodsRecordingSegment(BasePreprocessorSegment):
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/widgets/unit_waveforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,7 +565,7 @@ def _update_plot(self, change):
channel_locations = self.sorting_analyzer.get_channel_locations()
else:
unit_indices = [list(self.templates.unit_ids).index(unit_id) for unit_id in unit_ids]
templates = self.templates.templates_array[unit_indices]
templates = self.templates.get_dense_templates()[unit_indices]
templates_shadings = None
channel_locations = self.templates.get_channel_locations()

Expand Down

0 comments on commit d18c2ef

Please sign in to comment.