From a05621b01c104cb74a46bcb42512b9a79fdbf6b5 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Fri, 8 Nov 2024 14:56:34 +0200 Subject: [PATCH] Resolves #156; fix release notes typos and quotation mark inconsistencies --- CHANGELOG.md | 22 ++++++++++++++-------- one/__init__.py | 2 +- one/api.py | 4 +++- one/tests/test_one.py | 5 +++++ one/webclient.py | 15 ++++++++------- 5 files changed, 31 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 691cf9b9..91c2c6da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,12 @@ # Changelog -## [Latest](https://github.com/int-brain-lab/ONE/commits/main) [2.11.0] -This version deprecates one.alf.files in preperation for replacing with one.alf.path in version 3. +## [Latest](https://github.com/int-brain-lab/ONE/commits/main) [2.11.1] + +### Modified + +- HOTFIX: consistent behaviour in OneAlyx.list_datasets when keep_eid_index == True + +## [2.11.0] +This version deprecates one.alf.files in preparation for replacing with one.alf.path in version 3. ### Modified @@ -24,9 +30,9 @@ This version improves behaviour of loading revisions and loading datasets from l - bugfix of spurious error raised when loading dataset with a revision provided - default_revisions_only parameter in One.list_datasets filters non-default datasets - permit data frame input to One.load_datasets and load precise relative paths provided (instead of default revisions) -- redundent session_path column has been dropped from the datasets cache table +- redundant session_path column has been dropped from the datasets cache table - bugfix in one.params.setup: suggest previous cache dir if available instead of always the default -- bugfix in one.params.setup: remove all extrenuous parameters (i.e. TOKEN) when running setup in silent mode +- bugfix in one.params.setup: remove all extraneous parameters (i.e. TOKEN) when running setup in silent mode - warn user to reauthenticate when password is None in silent mode - always force authentication when password passed, even when token cached - bugfix: negative indexing of paginated response objects now functions correctly @@ -102,7 +108,7 @@ This version of ONE adds support for Alyx 2.0.0 and pandas 3.0.0 with dataset QC - One.load_dataset - add an option to skip computing hash for existing files when loading datasets `check_hash=False` - - check filesize before computing hash for performance + - check file size before computing hash for performance ## [2.5.5] @@ -320,7 +326,7 @@ This version of ONE adds support for Alyx 2.0.0 and pandas 3.0.0 with dataset QC - RegistrationClient.find_files is now itself a generator method (previously returned a generator) - exists kwarg in RegistrationClient.register_files -- support for loading 'table' attribute as dataframe with extra ALF parts +- support for loading 'table' attribute as data frame with extra ALF parts - bugfix: tag assertion should expect list of tags in cache info ## [1.17.0] @@ -361,7 +367,7 @@ This version of ONE adds support for Alyx 2.0.0 and pandas 3.0.0 with dataset QC ### Modified -- squeeze pandas dataframe on csv_read +- squeeze pandas data frame on csv_read - load_cache now public method; option to load specific remote cache with tag arg - fix incorrect error message in one.alf.exceptions.ALFMultipleObjectsFound @@ -537,7 +543,7 @@ This version of ONE adds support for Alyx 2.0.0 and pandas 3.0.0 with dataset QC ### Modified -- rest command loging includes the whole json field on error +- rest command logging includes the whole json field on error - added silent option to instantiate One on local files ## [1.6.0] diff --git a/one/__init__.py b/one/__init__.py index 71fa4429..59a7140d 100644 --- a/one/__init__.py +++ b/one/__init__.py @@ -1,2 +1,2 @@ """The Open Neurophysiology Environment (ONE) API.""" -__version__ = '2.11.0' +__version__ = '2.11.1' diff --git a/one/api.py b/one/api.py index d9f467a5..52fffd50 100644 --- a/one/api.py +++ b/one/api.py @@ -1855,8 +1855,10 @@ def list_datasets( return self._cache['datasets'].iloc[0:0] if details else [] # Return empty assert set(datasets.index.unique('eid')) == {eid} del filters['default_revisions_only'] + if not keep_eid_index and 'eid' in datasets.index.names: + datasets = datasets.droplevel('eid') datasets = util.filter_datasets( - datasets.droplevel('eid'), assert_unique=False, wildcards=self.wildcards, **filters) + datasets, assert_unique=False, wildcards=self.wildcards, **filters) # Return only the relative path return datasets if details else datasets['rel_path'].sort_values().values.tolist() diff --git a/one/tests/test_one.py b/one/tests/test_one.py index dd15fd38..974f7a53 100644 --- a/one/tests/test_one.py +++ b/one/tests/test_one.py @@ -1448,6 +1448,11 @@ def test_list_datasets(self): self.assertEqual(183, len(dsets)) # this may change after a BWM release or patch self.assertEqual(1, dsets.index.nlevels, 'details data frame should be without eid index') + # Test keep_eid_index + dsets = self.one.list_datasets( + self.eid, details=True, query_type='remote', keep_eid_index=True) + self.assertEqual(2, dsets.index.nlevels, 'details data frame should be with eid index') + # Test missing eid dsets = self.one.list_datasets('FMR019/2021-03-18/008', details=True, query_type='remote') self.assertIsInstance(dsets, pd.DataFrame) diff --git a/one/webclient.py b/one/webclient.py index d6b749d7..7bfe6fa0 100644 --- a/one/webclient.py +++ b/one/webclient.py @@ -589,7 +589,7 @@ def _generic_request(self, reqfunction, rest_query, data=None, files=None): rest_query = rest_query.replace(self.base_url, '') if not rest_query.startswith('/'): rest_query = '/' + rest_query - _logger.debug(f"{self.base_url + rest_query}, headers: {self._headers}") + _logger.debug(f'{self.base_url + rest_query}, headers: {self._headers}') headers = self._headers.copy() if files is None: data = json.dumps(data) if isinstance(data, dict) or isinstance(data, list) else data @@ -678,8 +678,8 @@ def authenticate(self, username=None, password=None, cache_token=True, force=Fal rep = requests.post(self.base_url + '/auth-token', data=credentials) except requests.exceptions.ConnectionError: raise ConnectionError( - f"Can't connect to {self.base_url}.\n" + - "Check your internet connections and Alyx database firewall" + f'Can\'t connect to {self.base_url}.\n' + + 'Check your internet connections and Alyx database firewall' ) # Assign token or raise exception on auth error if rep.ok: @@ -839,6 +839,7 @@ def download_cache_tables(self, source=None, destination=None): def _validate_file_url(self, url): """Asserts that URL matches HTTP_DATA_SERVER parameter. + Currently only one remote HTTP server is supported for a given AlyxClient instance. If the URL contains only the relative path part, the full URL is returned. @@ -1069,7 +1070,7 @@ def rest(self, url=None, action=None, id=None, data=None, files=None, if 'django' in kwargs.keys(): kwargs['django'] = kwargs['django'] + ',' else: - kwargs['django'] = "" + kwargs['django'] = '' kwargs['django'] = f"{kwargs['django']}pk,{id}" # otherwise, look for a dictionary of filter terms if kwargs: @@ -1135,7 +1136,7 @@ def json_field_write( # Prepare data to patch patch_dict = {field_name: data} # Upload new extended_qc to session - ret = self.rest(endpoint, "partial_update", id=uuid, data=patch_dict) + ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict) return ret[field_name] def json_field_update( @@ -1181,7 +1182,7 @@ def json_field_update( if not isinstance(current, dict): _logger.warning( - f'Current json field {field_name} does not contains a dict, aborting update' + f'Current json field "{field_name}" does not contains a dict, aborting update' ) return current @@ -1236,7 +1237,7 @@ def json_field_remove_key( f'{key}: Key not found in endpoint {endpoint} field {field_name}' ) return current - _logger.info(f"Removing key from dict: '{key}'") + _logger.info(f'Removing key from dict: "{key}"') current.pop(key) # Re-write contents without removed key written = self.json_field_write(