Skip to content

Commit

Permalink
Minor fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
k1o0 committed Oct 30, 2024
1 parent ba625e1 commit 85b4e1a
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 16 deletions.
6 changes: 1 addition & 5 deletions one/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,13 +150,10 @@ def load_cache(self, tables_dir=None, **kwargs):

self._cache[table] = cache

if len(self._cache) == 1:
if meta['loaded_time'] is None:
# No tables present
meta['expired'] = True
meta['raw'] = {}
self._cache.update({
'datasets': pd.DataFrame(columns=DATASETS_COLUMNS).set_index(['eid', 'id']),
'sessions': pd.DataFrame(columns=SESSIONS_COLUMNS).set_index('id')})
if self.offline: # In online mode, the cache tables should be downloaded later
warnings.warn(f'No cache tables found in {self._tables_dir}')
created = [datetime.fromisoformat(x['date_created'])
Expand Down Expand Up @@ -502,7 +499,6 @@ def sort_fcn(itm):
search_terms = self.search_terms(query_type='local')
queries = {util.autocomplete(k, search_terms): v for k, v in kwargs.items()}
for key, value in sorted(queries.items(), key=sort_fcn):
# key = util.autocomplete(key) # Validate and get full name
# No matches; short circuit
if sessions.size == 0:
return ([], None) if details else []
Expand Down
23 changes: 12 additions & 11 deletions one/tests/test_one.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import one.params
import one.alf.exceptions as alferr
from one.alf import spec
from one.alf.files import get_alf_path
from . import util
from . import OFFLINE_ONLY, TEST_DB_1, TEST_DB_2 # 1 = TestAlyx; 2 = OpenAlyx

Expand Down Expand Up @@ -1659,18 +1660,16 @@ def setUp(self) -> None:
new=partial(util.get_file, self.tempdir.name))
self.patch.start()
self.one = OneAlyx(**TEST_DB_2, cache_dir=self.tempdir.name, mode='auto')
self.fid = '17ab5b57-aaf6-4016-9251-66daadc200c7' # File record of channels.brainLocation
self.fid = '6f175a7a-e20b-4622-81fc-08947a4fd1d3' # File record of wiring.json
self.eid = 'aad23144-0e52-4eac-80c5-c4ee2decb198'
self.did = 'd693fbf9-2f90-4123-839e-41474c44742d'

def test_download_datasets(self):
"""Test OneAlyx._download_dataset, _download_file and _dset2url."""
det = self.one.get_details(self.eid, True)
rec = next(x for x in det['data_dataset_session_related']
if 'channels.brainLocation' in x['dataset_type'])
rec = next(x for x in det['data_dataset_session_related'] if x['id'] == self.did)
# FIXME hack because data_url may be AWS
from one.alf.files import get_alf_path
rec['data_url'] = self.one.alyx.rel_path2url(get_alf_path(rec['data_url']))
# FIXME order may not be stable, this only works
file = self.one._download_dataset(rec)
self.assertIsInstance(file, Path)
self.assertTrue(file.exists())
Expand Down Expand Up @@ -1709,18 +1708,18 @@ def test_download_datasets(self):
if fi != 0:
rec['file_records'] = [rec['file_records'].pop(fi), *rec['file_records']]
file = self.one._download_dataset(rec, keep_uuid=True)
self.assertEqual(str(file).split('.')[2], rec['url'].split('/')[-1])
self.assertEqual(file.stem.split('.')[-1], rec['url'].split('/')[-1])

# Check list input
recs = [rec, sorted(det['data_dataset_session_related'], key=lambda x: x['file_size'])[0]]
recs = [rec, sorted(det['data_dataset_session_related'], key=lambda x: x['file_size'])[1]]
files = self.one._download_dataset(recs)
self.assertIsInstance(files, list)
self.assertTrue(all(isinstance(x, Path) for x in files))

# Check Series input
r_ = datasets2records(rec).squeeze()
file = self.one._download_dataset(r_)
self.assertIn('channels.brainLocation', file.as_posix())
self.assertIn('imec0.wiring.json', file.name)

# Check behaviour when URL invalid
did = rec['url'].split('/')[-1]
Expand All @@ -1742,10 +1741,11 @@ def test_download_datasets(self):
file = self.one._download_dataset(path)
self.assertIsNone(file)

# Check data frame record
rec = self.one.list_datasets(self.eid, details=True)
rec = rec[rec.rel_path.str.contains('00/pykilosort/channels.brainLocation')]
rec['exists_aws'] = False # Ensure we use FlatIron for this
rec = pd.concat({str(self.eid): rec}, names=['eid'])
rec = rec[rec.rel_path.str.contains('00/_spikeglx_ephysData_g0_t0.imec0.wiring')]
rec.loc[self.did, 'exist_aws'] = False # Ensure we use FlatIron for this
rec = pd.concat({str(self.eid): rec}, names=['eid']) # Add back eid index

files = self.one._download_datasets(rec)
self.assertFalse(None in files)
Expand All @@ -1760,6 +1760,7 @@ def test_download_aws(self):
dsets = self.one.list_datasets(
self.eid, filename='*wiring.json', collection='raw_ephys_data/probe??', details=True)
dsets = pd.concat({str(self.eid): dsets}, names=['eid'])
assert len(dsets) == 2

file = self.one.eid2path(self.eid) / dsets['rel_path'].values[0]
with mock.patch('one.remote.aws.get_s3_from_alyx', return_value=(None, None)), \
Expand Down

0 comments on commit 85b4e1a

Please sign in to comment.