Skip to content

Commit

Permalink
Merge branch 'release/2.29.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
oliche committed Jan 26, 2024
2 parents 68c69df + 043441f commit f1d3a10
Show file tree
Hide file tree
Showing 7 changed files with 86 additions and 9 deletions.
19 changes: 19 additions & 0 deletions .github/workflows/ibllib_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,28 @@ on:
branches: [ master, develop ]

jobs:
detect-outstanding-prs: # Don't run builds for push events if associated with PR
runs-on: ubuntu-latest
env:
GH_TOKEN: ${{ github.token }}
outputs:
abort: ${{ steps.debounce.outputs.abort }}
steps:
- name: Debounce
if: github.event_name == 'push'
id: debounce
run: |
pr_branches=$(gh pr list --json headRefName --repo $GITHUB_REPOSITORY)
if [[ $(echo "$pr_branches" | jq -r --arg GITHUB_REF '.[].headRefName | select(. == $GITHUB_REF)') ]]; then
echo "This push is associated with a pull request. Skipping the job."
echo "abort=true" >> "$GITHUB_OUTPUT"
fi
build:
name: build (${{ matrix.python-version }}, ${{ matrix.os }})
runs-on: ${{ matrix.os }}
needs: debounce
if: needs.debounce.outputs.abort != 'true'
strategy:
fail-fast: false # Whether to stop execution of other instances
max-parallel: 2
Expand Down
51 changes: 50 additions & 1 deletion brainbox/io/one.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from ibllib.plots import vertical_lines

import brainbox.plot
from brainbox.io.spikeglx import Streamer
from brainbox.ephys_plots import plot_brain_regions
from brainbox.metrics.single_units import quick_unit_metrics
from brainbox.behavior.wheel import interpolate_position, velocity_filtered
Expand Down Expand Up @@ -793,6 +794,7 @@ class SpikeSortingLoader:
datasets: list = None # list of all datasets belonging to the session
# the following properties are the outcome of a reading function
files: dict = None
raw_data_files: list = None # list of raw ap and lf files corresponding to the recording
collection: str = ''
histology: str = '' # 'alf', 'resolved', 'aligned' or 'traced'
spike_sorter: str = 'pykilosort'
Expand Down Expand Up @@ -829,6 +831,7 @@ def __post_init__(self):
if self.atlas is None:
self.atlas = AllenAtlas()
self.files = {}
self.raw_data_files = []

def _load_object(self, *args, **kwargs):
"""
Expand Down Expand Up @@ -881,6 +884,11 @@ def load_spike_sorting_object(self, obj, *args, **kwargs):
self.download_spike_sorting_object(obj, *args, **kwargs)
return self._load_object(self.files[obj])

def get_version(self, spike_sorter='pykilosort'):
collection = self._get_spike_sorting_collection(spike_sorter=spike_sorter)
dset = self.one.alyx.rest('datasets', 'list', session=self.eid, collection=collection, name='spikes.times.npy')
return dset[0]['version'] if len(dset) else 'unknown'

def download_spike_sorting_object(self, obj, spike_sorter='pykilosort', dataset_types=None, collection=None,
missing='raise', **kwargs):
"""
Expand Down Expand Up @@ -919,6 +927,46 @@ def download_spike_sorting(self, **kwargs):
self.download_spike_sorting_object(obj=obj, **kwargs)
self.spike_sorting_path = self.files['spikes'][0].parent

def download_raw_electrophysiology(self, band='ap'):
"""
Downloads raw electrophysiology data files on local disk.
:param band: "ap" (default) or "lf" for LFP band
:return: list of raw data files full paths (ch, meta and cbin files)
"""
raw_data_files = []
for suffix in [f'*.{band}.ch', f'*.{band}.meta', f'*.{band}.cbin']:
try:
# FIXME: this will fail if multiple LFP segments are found
raw_data_files.append(self.one.load_dataset(
self.eid,
download_only=True,
collection=f'raw_ephys_data/{self.pname}',
dataset=suffix,
check_hash=False,
))
except ALFObjectNotFound:
_logger.debug(f"{self.session_path} can't locate raw data collection raw_ephys_data/{self.pname}, file {suffix}")
self.raw_data_files = list(set(self.raw_data_files + raw_data_files))
return raw_data_files

def raw_electrophysiology(self, stream=True, band='ap', **kwargs):
"""
Returns a reader for the raw electrophysiology data
By default it is a streamer object, but if stream is False, it will return a spikeglx.Reader after having
downloaded the raw data file if necessary
:param stream:
:param band:
:param kwargs:
:return:
"""
if stream:
return Streamer(pid=self.pid, one=self.one, typ=band, **kwargs)
else:
raw_data_files = self.download_raw_electrophysiology(band=band)
cbin_file = next(filter(lambda f: f.name.endswith(f'.{band}.cbin'), raw_data_files), None)
if cbin_file is not None:
return spikeglx.Reader(cbin_file)

def load_channels(self, **kwargs):
"""
Loads channels
Expand Down Expand Up @@ -1282,7 +1330,8 @@ def load_trials(self):
"""
# itiDuration frequently has a mismatched dimension, and we don't need it, exclude using regex
self.one.wildcards = False
self.trials = self.one.load_object(self.eid, 'trials', collection='alf', attribute=r'(?!itiDuration).*').to_df()
self.trials = self.one.load_object(
self.eid, 'trials', collection='alf', attribute=r'(?!itiDuration).*').to_df()
self.one.wildcards = True
self.data_info.loc[self.data_info['name'] == 'trials', 'is_loaded'] = True

Expand Down
3 changes: 2 additions & 1 deletion ibllib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
import logging
import warnings

__version__ = '2.28.2'
__version__ = '2.29.0'

warnings.filterwarnings('always', category=DeprecationWarning, module='ibllib')

# if this becomes a full-blown library we should let the logging configuration to the discretion of the dev
Expand Down
2 changes: 1 addition & 1 deletion ibllib/atlas/genes.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Gene expression maps."""

from iblatlas import genes
from iblatlas.genomics import genes
from ibllib.atlas import deprecated_decorator


Expand Down
5 changes: 4 additions & 1 deletion ibllib/io/session_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,10 @@ def get_task_protocol(sess_params, task_collection=None):
"""
collections = get_collections({'tasks': sess_params.get('tasks')})
if task_collection is None:
return set(collections.keys()) # Return all protocols
if len(collections) == 0:
return None
else:
return set(collections.keys()) # Return all protocols
else:
return next((k for k, v in collections.items() if v == task_collection), None)

Expand Down
5 changes: 5 additions & 0 deletions release_notes.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## Release Notes 2.29

### features
- Added raw data loaders and synchronisation tools in brainbox.io.one.SpikeSortingLoader, method `ssl.raw_electrophysiology()`

## Release Notes 2.28

### features
Expand Down
10 changes: 5 additions & 5 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@ flake8>=3.7.8
globus-sdk
graphviz
matplotlib>=3.0.3
mtscomp>=1.0.1
numba>=0.56
numpy>=1.18
nptdms
opencv-python-headless
pandas
phylib>=2.4
pyarrow
pynrrd>=0.4.0
pytest
Expand All @@ -23,11 +21,13 @@ sparse
seaborn>=0.9.0
tqdm>=4.32.1
# ibl libraries
iblatlas>=0.4.0
ibl-neuropixel>=0.8.1
iblutil>=1.7.0
labcams # widefield extractor
ONE-api>=2.5
mtscomp>=1.0.1
ONE-api>=2.6
phylib>=2.4
psychofit
slidingRP>=1.0.0 # steinmetz lab refractory period metrics
wfield==0.3.7 # widefield extractor frozen for now (2023/07/15) until Joao fixes latest version
psychofit
iblatlas

0 comments on commit f1d3a10

Please sign in to comment.