Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Protected view #750

Merged
merged 9 commits into from
Apr 4, 2024
Merged
2 changes: 1 addition & 1 deletion ibllib/oneibl/patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def _patch_dataset(self, path, dset_id=None, revision=None, dry=False, ftp=False
assert is_uuid_string(dset_id)
# If the revision is not None then we need to add the revision into the path. Note the moving of the file
# is handled by one registration client
if revision is not None:
if revision is not None and f'#{revision}' not in str(path):
path = path.parent.joinpath(f'#{revision}#', path.name)
assert path.exists()
dset = self.one.alyx.rest('datasets', 'read', id=dset_id)
Expand Down
20 changes: 20 additions & 0 deletions ibllib/oneibl/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from one.converters import ConversionMixin
import one.alf.exceptions as alferr
from one.util import datasets2records, ensure_list
from one.api import ONE

import ibllib
import ibllib.io.extractors.base
Expand Down Expand Up @@ -78,6 +79,25 @@ def register_dataset(file_list, one=None, exists=False, versions=None, **kwargs)
assert all(Path(f).exists() for f in file_list)

client = IBLRegistrationClient(one)

# Check for protected datasets
# Account for cases where we are connected to cortex lab database
if one.alyx.base_url == 'https://alyx.cortexlab.net':
protected_status = IBLRegistrationClient(
ONE(base_url='https://alyx.internationalbrainlab.org', mode='remote')).check_protected_files(file_list)
else:
protected_status = client.check_protected_files(file_list)

if isinstance(protected_status, list):
protected = any(d['status_code'] == 403 for d in protected_status)
else:
protected = protected_status['status_code'] == 403

# If we find a protected dataset, and we don't have a force=True flag, raise an error
if protected and not kwargs.pop('force', False):
raise FileExistsError('Protected datasets were found in the file list. To force the registration of datasets '
'add the force=True argument.')

# If the repository is specified then for the registration client we want server_only=True to
# make sure we don't make any other repositories for the lab
if kwargs.get('repository') and not kwargs.get('server_only', False):
Expand Down
39 changes: 23 additions & 16 deletions ibllib/pipes/histology.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,24 +237,22 @@ def register_chronic_track(chronic_id, picks=None, one=None, overwrite=False, ch
:return:
"""
assert one
brain_locations, insertion_histology = register_track(chronic_id, picks=picks, one=one, overwrite=overwrite,
channels=channels, brain_atlas=brain_atlas,
endpoint='chronic-insertions')

# Update all the associated probe insertions with the relevant QC and xyz_picks
chronic = one.alyx.rest('chronic-insertions', 'list', id=chronic_id)[0]
for probe_id in chronic['probe_insertion']:
pid = probe_id['id']
if picks is None or picks.size == 0:
hist_qc = base.QC(pid, one=one, endpoint='insertions')
hist_qc.update_extended_qc({'tracing_exists': False})
hist_qc.update('CRITICAL', namespace='tracing')
else:
one.alyx.json_field_update(endpoint='insertions', uuid=pid, field_name='json',
data={'xyz_picks': np.int32(picks * 1e6).tolist()})
# Update the insertion qc to register tracing exits
hist_qc = base.QC(pid, one=one, endpoint='insertions')
hist_qc.update_extended_qc({'tracing_exists': True})
brain_locations, insertion_histology = register_track(pid, picks=picks, one=one, overwrite=overwrite,
channels=channels, brain_atlas=brain_atlas)

if picks is None or picks.size == 0:
hist_qc = base.QC(chronic_id, one=one, endpoint='chronic-insertions')
hist_qc.update_extended_qc({'tracing_exists': False})
hist_qc.update('CRITICAL', namespace='tracing')
else:
one.alyx.json_field_update(endpoint='chronic-insertions', uuid=chronic_id, field_name='json',
data={'xyz_picks': np.int32(picks * 1e6).tolist()})
# Update the insertion qc to register tracing exits
hist_qc = base.QC(chronic_id, one=one, endpoint='chronic-insertions')
hist_qc.update_extended_qc({'tracing_exists': True})

return brain_locations, insertion_histology

Expand Down Expand Up @@ -291,7 +289,14 @@ def register_track(probe_id, picks=None, one=None, overwrite=False, channels=Tru
insertion_histology = None
# Here need to change the track qc to critical and also extended qc to zero
else:
brain_locations, insertion_histology = get_brain_regions(picks, brain_atlas=brain_atlas)
try:
eid, pname = one.pid2eid(probe_id)
chan_pos = one.load_dataset(eid, 'channels.localCoordinates.npy', collection=f'alf/{pname}/pykilosort')
except Exception:
chan_pos = None

brain_locations, insertion_histology = get_brain_regions(picks, channels_positions=chan_pos,
brain_atlas=brain_atlas)
# 1) update the alyx models, first put the picked points in the insertion json
one.alyx.json_field_update(endpoint=endpoint, uuid=probe_id, field_name='json',
data={'xyz_picks': np.int32(picks * 1e6).tolist()})
Expand Down Expand Up @@ -391,8 +396,10 @@ def create_trajectory_dict(probe_id, insertion, provenance, endpoint='insertions
}
if endpoint == 'chronic-insertions':
tdict['chronic_insertion'] = probe_id
tdict['probe_insertion'] = None
else:
tdict['probe_insertion'] = probe_id
tdict['chronic_insertion'] = None

return tdict

Expand Down
6 changes: 5 additions & 1 deletion ibllib/plots/figures.py
Original file line number Diff line number Diff line change
Expand Up @@ -785,7 +785,11 @@ def dlc_qc_plot(session_path, one=None, device_collection='raw_video_data',
# Load session level data
for alf_object in ['trials', 'wheel', 'licks']:
try:
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(trials_collection), alf_object) # load locally
if alf_object == 'licks':
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath('alf'),
alf_object) # load locally
else:
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(trials_collection), alf_object) # load locally
continue
except ALFObjectNotFound:
pass
Expand Down
2 changes: 1 addition & 1 deletion ibllib/qc/alignment_qc.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def upload_channels(self, alignment_key, upload_alyx, upload_flatiron):
ephys_traj = self.one.alyx.get(f'/trajectories?&probe_insertion={self.eid}'
'&provenance=Ephys aligned histology track',
clobber=True)
patch_dict = {'json': self.alignments}
patch_dict = {'probe_insertion': self.eid, 'json': self.alignments}
self.one.alyx.rest('trajectories', 'partial_update', id=ephys_traj[0]['id'],
data=patch_dict)

Expand Down
15 changes: 9 additions & 6 deletions ibllib/tests/qc/test_alignment_qc.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ def _02_one_alignment(self):
self.alignments['2020-06-26T16:40:14_Karolina_Socha']}
trajectory = copy.deepcopy(self.trajectory)
trajectory.update({'json': alignments})
trajectory.update({'chronic_insertion': None})
_ = one.alyx.rest('trajectories', 'create', data=trajectory)
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
align_qc.run(update=True, upload_alyx=True, upload_flatiron=False)
Expand All @@ -199,8 +200,8 @@ def _03_alignments_disagree(self):
self.alignments['2020-06-26T16:40:14_Karolina_Socha'],
'2020-06-12T00:39:15_nate': self.alignments['2020-06-12T00:39:15_nate']}
trajectory = copy.deepcopy(self.trajectory)
trajectory.update({'json': alignments})
traj = one.alyx.rest('trajectories', 'update', id=self.prev_traj_id, data=trajectory)
trajectory.update({'probe_insertion': self.probe_id, 'json': alignments})
traj = one.alyx.rest('trajectories', 'partial_update', id=self.prev_traj_id, data=trajectory)
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
align_qc.load_data(prev_alignments=traj['json'], xyz_picks=np.array(self.xyz_picks) / 1e6,
cluster_chns=self.cluster_chns, depths=SITES_COORDINATES[:, 1],
Expand All @@ -216,8 +217,8 @@ def _04_alignments_agree(self):
self.alignments['2020-06-19T10:52:36_noam.roth'],
'2020-06-12T00:39:15_nate': self.alignments['2020-06-12T00:39:15_nate']}
trajectory = copy.deepcopy(self.trajectory)
trajectory.update({'json': alignments})
traj = one.alyx.rest('trajectories', 'update', id=self.prev_traj_id, data=trajectory)
trajectory.update({'probe_insertion': self.probe_id, 'json': alignments})
traj = one.alyx.rest('trajectories', 'partial_update', id=self.prev_traj_id, data=trajectory)
self.assertEqual(self.prev_traj_id, traj['id'])
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
align_qc.load_data(cluster_chns=self.cluster_chns, depths=SITES_COORDINATES[:, 1],
Expand All @@ -230,8 +231,8 @@ def _04_alignments_agree(self):
def _05_not_latest_alignments_agree(self):
alignments = copy.deepcopy(self.alignments)
trajectory = copy.deepcopy(self.trajectory)
trajectory.update({'json': alignments})
traj = one.alyx.rest('trajectories', 'update', id=self.prev_traj_id, data=trajectory)
trajectory.update({'probe_insertion': self.probe_id, 'json': alignments})
traj = one.alyx.rest('trajectories', 'partial_update', id=self.prev_traj_id, data=trajectory)
self.assertEqual(self.prev_traj_id, traj['id'])
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
align_qc.load_data(prev_alignments=traj['json'], xyz_picks=np.array(self.xyz_picks) / 1e6,
Expand Down Expand Up @@ -277,6 +278,7 @@ def setUpClass(cls) -> None:
cls.probe_id = probe_insertion['id']
cls.trajectory = data['trajectory'].tolist()
cls.trajectory.update({'probe_insertion': cls.probe_id})
cls.trajectory.update({'chronic_insertion': None})
cls.trajectory.update({'json': cls.alignments})
cls.traj = one.alyx.rest('trajectories', 'create', data=cls.trajectory)

Expand Down Expand Up @@ -415,6 +417,7 @@ def setUpClass(cls) -> None:
cls.probe_name = probe_insertion['name']
cls.trajectory = data['trajectory'].tolist()
cls.trajectory.update({'probe_insertion': cls.probe_id})
cls.trajectory.update({'chronic_insertion': None})
cls.trajectory.update({'json': cls.alignments})
cls.traj = one.alyx.rest('trajectories', 'create', data=cls.trajectory)

Expand Down
2 changes: 1 addition & 1 deletion ibllib/tests/qc/test_task_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class TestAggregateOutcome(unittest.TestCase):
def test_deprecation_warning(self):
"""Remove TaskQC.compute_session_status_from_dict after 2024-04-01."""
from datetime import datetime
self.assertFalse(datetime.now() > datetime(2024, 4, 1), 'remove TaskQC.compute_session_status_from_dict method.')
self.assertFalse(datetime.now() > datetime(2024, 4, 10), 'remove TaskQC.compute_session_status_from_dict method.')
qc_dict = {'_task_iti_delays': .99}
with self.assertWarns(DeprecationWarning), self.assertLogs(qcmetrics.__name__, 'WARNING'):
out = qcmetrics.TaskQC.compute_session_status_from_dict(qc_dict)
Expand Down
15 changes: 10 additions & 5 deletions ibllib/tests/test_oneibl.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def test_patch_datasets(self):

# Mock the post method of AlyxClient and assert that it was called during registration
with mock.patch.object(self.one.alyx, 'post') as rest_mock:
rest_mock.side_effect = responses
rest_mock.side_effect = [[r] for r in responses]
self.globus_patcher.patch_datasets(file_list)
self.assertEqual(rest_mock.call_count, 2)
for call, file in zip(rest_mock.call_args_list, file_list):
Expand Down Expand Up @@ -332,9 +332,14 @@ def test_registration_datasets(self):
self.one.alyx.rest('datasets', 'partial_update',
id=d['url'][-36:], data={'tags': [self.tag['name']]})

# Check that we get an exception error unless force=True
flist = list(self.rev_path.glob('*.npy'))
with self.assertRaises(FileExistsError):
registration.register_dataset(file_list=flist, one=self.one)

# Test registering with a revision already in the file path, should use this rather than create one with today's date
flist = list(self.rev_path.glob('*.npy'))
r = registration.register_dataset(file_list=flist, one=self.one)
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
self.assertTrue(all(d['revision'] == self.revision for d in r))
self.assertTrue(all(d['default'] for d in r))
self.assertTrue(all(d['collection'] == 'alf' for d in r))
Expand All @@ -348,7 +353,7 @@ def test_registration_datasets(self):
# Register again with revision in file path, it should register to self.revision + a
flist = list(self.rev_path.glob('*.npy'))

r = registration.register_dataset(file_list=flist, one=self.one)
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
self.assertTrue(all(d['revision'] == f'{self.revision}a' for d in r))
self.assertTrue(self.alf_path.joinpath(f'#{self.revision}a#', 'spikes.times.npy').exists())
self.assertTrue(self.alf_path.joinpath(f'#{self.revision}a#', 'spikes.amps.npy').exists())
Expand All @@ -357,7 +362,7 @@ def test_registration_datasets(self):

# When we re-register the original it should move them into revision with today's date
flist = list(self.alf_path.glob('*.npy'))
r = registration.register_dataset(file_list=flist, one=self.one)
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
self.assertTrue(all(d['revision'] == self.today_revision for d in r))
self.assertTrue(self.alf_path.joinpath(f'#{self.today_revision}#', 'spikes.times.npy').exists())
self.assertTrue(self.alf_path.joinpath(f'#{self.today_revision}#', 'spikes.amps.npy').exists())
Expand All @@ -375,7 +380,7 @@ def test_registration_datasets(self):
np.save(self.alf_path.joinpath('spikes.times.npy'), np.random.random(500))
np.save(self.alf_path.joinpath('spikes.amps.npy'), np.random.random(500))
flist = list(self.alf_path.glob('*.npy'))
r = registration.register_dataset(file_list=flist, one=self.one)
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
self.assertTrue(all(d['revision'] == self.today_revision + 'a' for d in r))

def _write_settings_file(self):
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pynrrd>=0.4.0
pytest
requests>=2.22.0
scikit-learn>=0.22.1
scipy>=1.7.0
scipy>=1.7.0,<1.13 # scipy gaussian missing April 2024
scikit-image # this is a widefield requirement missing as of July 2023, we may remove it once wfield has this figured out
sparse
seaborn>=0.9.0
Expand Down
Loading