Skip to content

Commit

Permalink
Resolves #141 (#142)
Browse files Browse the repository at this point in the history
* Resolves #141
* Update changelog
  • Loading branch information
k1o0 authored Oct 11, 2024
1 parent a55b006 commit 20aaa20
Show file tree
Hide file tree
Showing 3 changed files with 83 additions and 2 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ This version improves behaviour of loading revisions and loading datasets from l
- bugfix in one.params.setup: remove all extrenuous parameters (i.e. TOKEN) when running setup in silent mode
- warn user to reauthenticate when password is None in silent mode
- always force authentication when password passed, even when token cached
- bugfix: negative indexing of paginated response objects now functions correctly

### Added

Expand Down
77 changes: 77 additions & 0 deletions one/tests/test_alyxclient.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Unit tests for the one.webclient module"""
import unittest
from unittest import mock
import urllib.parse
import random
import os
import one.webclient as wc
Expand Down Expand Up @@ -479,6 +480,82 @@ def test_cache_dir_setter(self):
finally:
ac._par = ac._par.set('CACHE_DIR', prev_path)

def test_paginated_response(self):
"""Test the _PaginatedResponse class."""
alyx = mock.Mock(spec_set=ac)
N, lim = 2000, 250 # 2000 results, 250 records per page
url = ac.base_url + f'/?foo=bar&offset={lim}&limit={lim}'
res = {'count': N, 'next': url, 'previous': None, 'results': []}
res['results'] = [{'id': i} for i in range(lim)]
alyx._generic_request.return_value = res
# Check initialization
pg = wc._PaginatedResponse(alyx, res, cache_args=dict(clobber=True))
self.assertEqual(pg.count, N)
self.assertEqual(len(pg), N)
self.assertEqual(pg.limit, lim)
self.assertEqual(len(pg._cache), N)
self.assertEqual(pg._cache[:lim], res['results'])
self.assertTrue(not any(pg._cache[lim:]))
self.assertIs(pg.alyx, alyx)

# Check fetching cached item with +ve int
self.assertEqual({'id': 1}, pg[1])
alyx._generic_request.assert_not_called()
# Check fetching cached item with +ve slice
self.assertEqual([{'id': 1}, {'id': 2}], pg[1:3])
alyx._generic_request.assert_not_called()
# Check fetching cached item with -ve int
self.assertEqual({'id': 100}, pg[-1900])
alyx._generic_request.assert_not_called()
# Check fetching cached item with -ve slice
self.assertEqual([{'id': 100}, {'id': 101}], pg[-1900:-1898])
alyx._generic_request.assert_not_called()
# Check fetching uncached item with +ve int
n = offset = lim
res['results'] = [{'id': i} for i in range(offset, offset + lim)]
assert not any(pg._cache[offset:offset + lim])
self.assertEqual({'id': lim}, pg[n])
self.assertEqual(res['results'], pg._cache[offset:offset + lim])
alyx._generic_request.assert_called_once_with(requests.get, mock.ANY, clobber=True)
self._check_get_query(alyx._generic_request.call_args, lim, offset)
# Check fetching uncached item with -ve int
offset = lim * 3
res['results'] = [{'id': i} for i in range(offset, offset + lim)]
n = offset - N + 2
assert not any(pg._cache[offset:offset + lim])
self.assertEqual({'id': N + n}, pg[n])
self.assertEqual(res['results'], pg._cache[offset:offset + lim])
alyx._generic_request.assert_called_with(requests.get, mock.ANY, clobber=True)
self._check_get_query(alyx._generic_request.call_args, lim, offset)
# Check fetching uncached item with +ve slice
offset = lim * 5
res['results'] = [{'id': i} for i in range(offset, offset + lim)]
n = offset + 20
assert not any(pg._cache[offset:offset + lim])
self.assertEqual([{'id': n}, {'id': n + 1}], pg[n:n + 2])
self.assertEqual(res['results'], pg._cache[offset:offset + lim])
alyx._generic_request.assert_called_with(requests.get, mock.ANY, clobber=True)
self._check_get_query(alyx._generic_request.call_args, lim, offset)
# Check fetching uncached item with -ve slice
offset = N - lim
res['results'] = [{'id': i} for i in range(offset, offset + lim)]
assert not any(pg._cache[offset:offset + lim])
self.assertEqual([{'id': N - 2}, {'id': N - 1}], pg[-2:])
self.assertEqual(res['results'], pg._cache[offset:offset + lim])
alyx._generic_request.assert_called_with(requests.get, mock.ANY, clobber=True)
self._check_get_query(alyx._generic_request.call_args, lim, offset)
# At this point, there should be a certain number of None values left
self.assertEqual(expected_calls := 4, alyx._generic_request.call_count)
self.assertEqual((expected_calls + 1) * lim, sum(list(map(bool, pg._cache))))

def _check_get_query(self, call_args, limit, offset):
"""Check URL get query contains the expected limit and offset params."""
(_, url), _ = call_args
self.assertTrue(url.startswith(ac.base_url))
query = urllib.parse.parse_qs(urllib.parse.urlparse(url).query)
expected = {'foo': ['bar'], 'offset': [str(offset)], 'limit': [str(limit)]}
self.assertDictEqual(query, expected)


if __name__ == '__main__':
unittest.main(exit=False, verbosity=2)
7 changes: 5 additions & 2 deletions one/webclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,9 +213,12 @@ def __len__(self):
def __getitem__(self, item):
if isinstance(item, slice):
while None in self._cache[item]:
self.populate(item.start + self._cache[item].index(None))
# If slice start index is -ve, convert to +ve index
i = self.count + item.start if item.start < 0 else item.start
self.populate(i + self._cache[item].index(None))
elif self._cache[item] is None:
self.populate(item)
# If index is -ve, convert to +ve
self.populate(self.count + item if item < 0 else item)
return self._cache[item]

def populate(self, idx):
Expand Down

0 comments on commit 20aaa20

Please sign in to comment.