Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ibllib_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@ jobs:
- name: Install deps
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest flake8-docstrings
python -m pip install flake8 ruff flake8-docstrings
pip install -r requirements.txt
pip install -e .
- name: Flake8
- name: Flake8 & ruff
run: |
python -m flake8
python -m ruff check .
python -m flake8 --select D --ignore E ibllib/qc/camera.py ibllib/qc/task_metrics.py
- name: Brainbox tests
run: |
Expand Down
4 changes: 2 additions & 2 deletions brainbox/metrics/single_units.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ def unit_stability(units_b, units=None, feat_names=['amps'], dist='norm', test='
"""

# Get units.
if not (units is None): # we're using a subset of all units
if units is not None: # we're using a subset of all units
unit_list = list(units_b[feat_names[0]].keys())
# for each `feat` and unit in `unit_list`, remove unit from `units_b` if not in `units`
for feat in feat_names:
[units_b[feat].pop(unit) for unit in unit_list if not (int(unit) in units)]
[units_b[feat].pop(unit) for unit in unit_list if int(unit) not in units]
unit_list = list(units_b[feat_names[0]].keys()) # get new `unit_list` after removing units

# Initialize `p_vals` and `variances`.
Expand Down
4 changes: 2 additions & 2 deletions brainbox/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,10 @@ def feat_vars(units_b, units=None, feat_name='amps', dist='norm', test='ks', cma
'''

# Get units.
if not (units is None): # we're using a subset of all units
if units is not None: # we're using a subset of all units
unit_list = list(units_b['depths'].keys())
# For each unit in `unit_list`, remove unit from `units_b` if not in `units`.
[units_b['depths'].pop(unit) for unit in unit_list if not (int(unit) in units)]
[units_b['depths'].pop(unit) for unit in unit_list if int(unit) not in units]
unit_list = list(units_b['depths'].keys()) # get new `unit_list` after removing unit

# Calculate coefficients of variation for all units
Expand Down
8 changes: 2 additions & 6 deletions ibllib/__init__.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,14 @@
"""Library implementing the International Brain Laboratory data pipeline."""
import logging
import warnings
import os

__version__ = '3.4.1'
warnings.filterwarnings('always', category=DeprecationWarning, module='ibllib')

# if this becomes a full-blown library we should let the logging configuration to the discretion of the dev
# who uses the library. However since it can also be provided as an app, the end-users should be provided
# with a useful default logging in standard output without messing with the complex python logging system
USE_LOGGING = True
#%(asctime)s,%(msecs)d
if USE_LOGGING:
if os.environ.get('IBLLIB_USE_LOGGING', '1').casefold() in ('1', 'true', 'yes'):
from iblutil.util import setup_logger
setup_logger(name='ibllib', level=logging.INFO)
else:
# deactivate all log calls for use as a library
logging.getLogger('ibllib').addHandler(logging.NullHandler())
4 changes: 2 additions & 2 deletions ibllib/io/extractors/camera.py
Original file line number Diff line number Diff line change
Expand Up @@ -683,13 +683,13 @@ def groom_pin_state(gpio, ttl, ts, tolerance=2., display=False, take='first', mi
_logger.warning('Some onsets but not offsets (or vice versa) were not assigned; '
'this may be a sign of faulty wiring or clock drift')
# Find indices of GPIO upticks where only the downtick was marked for removal
orphaned_onsets, = np.where(~to_remove.reshape(-1, 2)[:, 0] & orphaned)
orphaned_onsets, = np.where(~to_remove.reshape(-1, 2)[:, 0] & orphaned)
# The onsets_ array already has the other TTLs removed (same size as to_remove ==
# False) so subtract the number of removed elements from index.
for i, v in enumerate(orphaned_onsets):
orphaned_onsets[i] -= to_remove.reshape(-1, 2)[:v, 0].sum()
# Same for offsets...
orphaned_offsets, = np.where(~to_remove.reshape(-1, 2)[:, 1] & orphaned)
orphaned_offsets, = np.where(~to_remove.reshape(-1, 2)[:, 1] & orphaned)
for i, v in enumerate(orphaned_offsets):
orphaned_offsets[i] -= to_remove.reshape(-1, 2)[:v, 1].sum()
# Remove orphaned ttl onsets and offsets
Expand Down
Loading
Loading