Skip to content

Commit

Permalink
Hotfix/2.34.1 (#758)
Browse files Browse the repository at this point in the history
* Resolves iblenv#364

* Resolves #757

* Bump version

* Bump version

* pre-generated sequences extraction bugfix

* Download required ap.meta files when building pipeline for task_qc command

---------

Co-authored-by: olivier <[email protected]>
  • Loading branch information
k1o0 and oliche authored Apr 23, 2024
1 parent 462e8fb commit d63e62b
Show file tree
Hide file tree
Showing 9 changed files with 149 additions and 74 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/ibllib_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,13 @@ jobs:
- name: Install deps
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest
python -m pip install flake8 pytest flake8-docstrings
pip install -r requirements.txt
pip install -e .
- name: Flake8
run: |
python -m flake8
python -m flake8 --select D --ignore E ibllib/qc/camera.py
- name: Brainbox tests
run: |
cd brainbox
Expand Down
2 changes: 1 addition & 1 deletion ibllib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
import warnings

__version__ = '2.34.0'
__version__ = '2.34.1'
warnings.filterwarnings('always', category=DeprecationWarning, module='ibllib')

# if this becomes a full-blown library we should let the logging configuration to the discretion of the dev
Expand Down
7 changes: 4 additions & 3 deletions ibllib/io/extractors/biased_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,10 @@ def _extract(self, **kwargs):

@staticmethod
def get_pregenerated_events(bpod_trials, settings):
num = settings.get("PRELOADED_SESSION_NUM", None)
if num is None:
num = settings.get("PREGENERATED_SESSION_NUM", None)
for k in ['PRELOADED_SESSION_NUM', 'PREGENERATED_SESSION_NUM', 'SESSION_TEMPLATE_ID']:
num = settings.get(k, None)
if num is not None:
break
if num is None:
fn = settings.get('SESSION_LOADED_FILE_PATH', '')
fn = PureWindowsPath(fn).name
Expand Down
5 changes: 2 additions & 3 deletions ibllib/oneibl/data_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from one.api import ONE
from one.webclient import AlyxClient
from one.util import filter_datasets
from one.util import filter_datasets, ensure_list
from one.alf.files import add_uuid_string, session_path_parts
from ibllib.oneibl.registration import register_dataset, get_lab, get_local_data_repository
from ibllib.oneibl.patcher import FTPPatcher, SDSCPatcher, SDSC_ROOT_PATH, SDSC_PATCH_PATH
Expand Down Expand Up @@ -140,8 +140,7 @@ def uploadData(self, outputs, version, clobber=False, **kwargs):
versions = super().uploadData(outputs, version)
data_repo = get_local_data_repository(self.one.alyx)
# If clobber = False, do not re-upload the outputs that have already been processed
if not isinstance(outputs, list):
outputs = [outputs]
outputs = ensure_list(outputs)
to_upload = list(filter(None if clobber else lambda x: x not in self.processed, outputs))
records = register_dataset(to_upload, one=self.one, versions=versions, repository=data_repo, **kwargs) or []
if kwargs.get('dry', False):
Expand Down
3 changes: 3 additions & 0 deletions ibllib/pipes/dynamic_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,6 +474,9 @@ def get_trials_tasks(session_path, one=None):
# Check for an experiment.description file; ensure downloaded if possible
if one and one.to_eid(session_path): # to_eid returns None if session not registered
one.load_datasets(session_path, ['_ibl_experiment.description'], download_only=True, assert_present=False)
# NB: meta files only required to build neuropixel tasks in make_pipeline
if meta_files := one.list_datasets(session_path, '*.ap.meta', collection='raw_ephys_data*'):
one.load_datasets(session_path, meta_files, download_only=True, assert_present=False)
experiment_description = sess_params.read_params(session_path)

# If experiment description file then use this to make the pipeline
Expand Down
2 changes: 1 addition & 1 deletion ibllib/pipes/mesoscope_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ def _run(self, run_suite2p=True, rename_files=True, use_badframes=True, **kwargs
""" Bad frames """
qc_paths = (self.session_path.joinpath(f[1], 'exptQC.mat')
for f in self.input_files if f[0] == 'exptQC.mat')
qc_paths = map(str, filter(Path.exists, qc_paths))
qc_paths = sorted(map(str, filter(Path.exists, qc_paths)))
exptQC = [loadmat(p, squeeze_me=True, simplify_cells=True) for p in qc_paths]
if len(exptQC) > 0:
frameQC, frameQC_names, bad_frames = self._consolidate_exptQC(exptQC)
Expand Down
21 changes: 13 additions & 8 deletions ibllib/qc/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,12 @@


class QC:
"""A base class for data quality control"""
"""A base class for data quality control."""

def __init__(self, endpoint_id, one=None, log=None, endpoint='sessions'):
"""
A base class for data quality control.
:param endpoint_id: Eid for endpoint. If using sessions can also be a session path
:param log: A logging.Logger instance, if None the 'ibllib' logger is used
:param one: An ONE instance for fetching and setting the QC on Alyx
Expand All @@ -38,15 +40,17 @@ def __init__(self, endpoint_id, one=None, log=None, endpoint='sessions'):

@abstractmethod
def run(self):
"""Run the QC tests and return the outcome
"""Run the QC tests and return the outcome.
:return: One of "CRITICAL", "FAIL", "WARNING" or "PASS"
"""
pass

@abstractmethod
def load_data(self):
"""Load the data required to compute the QC
Subclasses may implement this for loading raw data
"""Load the data required to compute the QC.
Subclasses may implement this for loading raw data.
"""
pass

Expand Down Expand Up @@ -85,7 +89,8 @@ def overall_outcome(outcomes: iter, agg=max) -> spec.QC:
return agg(map(spec.QC.validate, outcomes))

def _set_eid_or_path(self, session_path_or_eid):
"""Parse a given eID or session path
"""Parse a given eID or session path.
If a session UUID is given, resolves and stores the local path and vice versa
:param session_path_or_eid: A session eid or path
:return:
Expand Down Expand Up @@ -215,16 +220,16 @@ def update_extended_qc(self, data):
return out

def compute_outcome_from_extended_qc(self) -> str:
"""
Returns the session outcome computed from aggregating the extended QC
"""
"""Return the session outcome computed from aggregating the extended QC."""
details = self.one.alyx.get(f'/{self.endpoint}/{self.eid}', clobber=True)
extended_qc = details['json']['extended_qc'] if self.json else details['extended_qc']
return self.overall_outcome(v for k, v in extended_qc.items() or {} if k[0] != '_')


def sign_off_dict(exp_dec, sign_off_categories=None):
"""
Create sign off dictionary.
Creates a dict containing 'sign off' keys for each device and task protocol in the provided
experiment description.
Expand Down
Loading

0 comments on commit d63e62b

Please sign in to comment.