-
Notifications
You must be signed in to change notification settings - Fork 22
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #423 from int-brain-lab/hotfix/6.6.3
Hotfix/6.6.3
- Loading branch information
Showing
17 changed files
with
233 additions
and
94 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,87 +1,82 @@ | ||
#!/usr/bin/env python | ||
# @Author: Niccolò Bonacchi | ||
# @Creation_Date: Thursday, March 28th 2019, 7:53:44 pm | ||
# @Editor: Michele Fabbri | ||
# @Edit_Date: 2022-02-01 | ||
""" | ||
Purge data from RIG | ||
- Find all files by rglob | ||
- Find all sessions of the found files | ||
- Check Alyx if corresponding datasetTypes have been registered as existing | ||
sessions and files on Flatiron | ||
- Delete local raw file if found on Flatiron | ||
- looks for datasets matching filename pattern | ||
- datasets that exist in ONE cache are removed | ||
""" | ||
import argparse | ||
import logging | ||
from fnmatch import fnmatch | ||
from pathlib import Path | ||
|
||
import one | ||
from one.alf.files import get_session_path | ||
from one.api import ONE | ||
|
||
log = logging.getLogger("iblrig") | ||
log = logging.getLogger('iblrig') | ||
|
||
try: # Verify ONE-api is at v1.13.0 or greater | ||
assert(tuple(map(int, one.__version__.split('.'))) >= (1, 13, 0)) | ||
from one.alf.cache import iter_datasets, iter_sessions | ||
except (AssertionError, ImportError) as e: | ||
if e is AssertionError: | ||
log.error("The found version of ONE needs to be updated to run this script, please run a 'pip install -U ONE-api' from " | ||
"the appropriate anaconda environment") | ||
raise | ||
|
||
def session_name(path) -> str: | ||
"""Returns the session name (subject/date/number) string for any filepath | ||
using session_path""" | ||
return "/".join(get_session_path(path).parts[-3:]) | ||
|
||
def session_name(path, lab=None) -> str: | ||
""" | ||
Returns the session name (subject/date/number) string for a given session path. If lab is given | ||
returns lab/Subjects/subject/date/number. | ||
""" | ||
lab = f'{lab}/Subjects/' if lab else '' | ||
return lab + '/'.join(get_session_path(path).parts[-3:]) | ||
|
||
def purge_local_data(local_folder, file_name, lab=None, dry=False): | ||
# Figure out datasetType from file_name or file path | ||
file_name = Path(file_name).name | ||
alf_parts = file_name.split(".") | ||
dstype = ".".join(alf_parts[:2]) | ||
print(f"Looking for file <{file_name}> in folder <{local_folder}>") | ||
# Get all paths for file_name in local folder | ||
|
||
def local_alf_paths(root_dir, filename): | ||
"""Yield session path and relative paths of ALFs that match filename pattern""" | ||
for session_path in iter_sessions(root_dir): | ||
for dataset in iter_datasets(session_path): | ||
if fnmatch(dataset, filename): | ||
yield session_path, dataset | ||
|
||
|
||
def purge_local_data(local_folder, filename='*', lab=None, dry=False, one=None): | ||
# Figure out datasetType from filename or file path | ||
local_folder = Path(local_folder) | ||
files = list(local_folder.rglob(f"*{file_name}")) | ||
print(f"Found {len(files)} files") | ||
print(f"Checking on Flatiron for datsetType: {dstype}...") | ||
# Get all sessions and details from Alyx that have the dstype | ||
one = ONE(cache_rest=None) | ||
if lab is None: | ||
eid, det = one.search(dataset_types=[dstype], details=True) | ||
else: | ||
eid, det = one.search(dataset_types=[dstype], lab=lab, details=True) | ||
urls = [] | ||
for d in det: | ||
urls.extend( | ||
[ | ||
x["data_url"] | ||
for x in d["data_dataset_session_related"] | ||
if x["dataset_type"] == dstype | ||
] | ||
) | ||
# Remove None answers when session is registered but dstype not htere yet | ||
urls = [u for u in urls if u is not None] | ||
print(f"Found files on Flatiron: {len(urls)}") | ||
|
||
# Get matching files that exist in ONE cache | ||
to_remove = [] | ||
for f in files: | ||
sess_name = session_name(f) | ||
for u in urls: | ||
if sess_name in u: | ||
to_remove.append(f) | ||
print(f"Local files to remove: {len(to_remove)}") | ||
for f in to_remove: | ||
print(f) | ||
if dry: | ||
one = one or ONE() | ||
for session_path, dataset in local_alf_paths(local_folder, filename): | ||
session = session_name(session_path, lab=lab) | ||
eid = one.to_eid(session) | ||
if not eid: | ||
continue | ||
matching = one.list_datasets(eid, dataset.as_posix()) | ||
if not matching: | ||
continue | ||
else: | ||
assert len(matching) == 1 | ||
to_remove.append(local_folder.joinpath(session_path, dataset)) | ||
|
||
log.info(f'Local files to remove: {len(to_remove)}') | ||
for f in to_remove: | ||
log.info(f'DELETE: {f}') | ||
if not dry: | ||
f.unlink() | ||
return | ||
return to_remove | ||
|
||
|
||
if __name__ == "__main__": | ||
parser = argparse.ArgumentParser(description="Delete files from rig") | ||
parser.add_argument("folder", help="Local iblrig_data folder") | ||
parser.add_argument("file", help="File name to search and destroy for every session") | ||
if __name__ == '__main__': | ||
parser = argparse.ArgumentParser(description='Delete files from rig') | ||
parser.add_argument('folder', help='Local iblrig_data folder') | ||
parser.add_argument('file', help='File name to search and destroy for every session') | ||
parser.add_argument( | ||
"-lab", required=False, default=None, help="Lab name, search on Alyx faster. default: None", | ||
'-lab', required=False, default=None, help='Lab name, in case sessions conflict between labs. default: None', | ||
) | ||
parser.add_argument( | ||
"--dry", required=False, default=False, action="store_true", help="Dry run? default: False", | ||
'--dry', required=False, default=False, action='store_true', help='Dry run? default: False', | ||
) | ||
args = parser.parse_args() | ||
purge_local_data(args.folder, args.file, lab=args.lab, dry=args.dry) | ||
print("Done\n") | ||
print('Done\n') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
OPENALYX_PARAMETERS = { | ||
"base_url": "https://openalyx.internationalbrainlab.org", | ||
"username": "intbrainlab", | ||
"password": "international", | ||
"silent": True | ||
} |
Oops, something went wrong.