diff --git a/deploy/serverpc/crontab/large_jobs.py b/deploy/serverpc/crontab/large_jobs.py index c88e969..0d69b0c 100644 --- a/deploy/serverpc/crontab/large_jobs.py +++ b/deploy/serverpc/crontab/large_jobs.py @@ -2,6 +2,7 @@ import time import logging from pathlib import Path +import multiprocessing from one.api import ONE from ibllib.pipes.local_server import task_queue @@ -11,21 +12,28 @@ subjects_path = Path('/mnt/s0/Data/Subjects/') sleep_time = 3600 -try: - one = ONE(cache_rest=None) - waiting_tasks = task_queue(mode='large', lab=None, alyx=one.alyx) - if len(waiting_tasks) == 0: - _logger.info(f'No large tasks in the queue, retrying in {int(sleep_time / 60)} min') - # Query again only in 60min if queue is empty - time.sleep(sleep_time) - else: - tdict = waiting_tasks[0] - _logger.info(f"Running task {tdict['name']} for session {tdict['session']}") - ses = one.alyx.rest('sessions', 'list', django=f"pk,{tdict['session']}")[0] - session_path = Path(subjects_path).joinpath( - Path(ses['subject'], ses['start_time'][:10], str(ses['number']).zfill(3))) - run_alyx_task(tdict=tdict, session_path=session_path, one=one) -except Exception: - _logger.error(f'Error running large task queue \n {traceback.format_exc()}') - time.sleep(int(sleep_time / 2)) +def run_large(): + try: + one = ONE(cache_rest=None) + waiting_tasks = task_queue(mode='large', lab=None, alyx=one.alyx) + + if len(waiting_tasks) == 0: + _logger.info(f"No large tasks in the queue, retrying in {int(sleep_time / 60)} min") + # Query again only in 60min if queue is empty + time.sleep(sleep_time) + else: + tdict = waiting_tasks[0] + _logger.info(f"Running task {tdict['name']} for session {tdict['session']}") + ses = one.alyx.rest('sessions', 'list', django=f"pk,{tdict['session']}")[0] + session_path = Path(subjects_path).joinpath( + Path(ses['subject'], ses['start_time'][:10], str(ses['number']).zfill(3))) + run_alyx_task(tdict=tdict, session_path=session_path, one=one) + except BaseException: + _logger.error(f"Error running large task queue \n {traceback.format_exc()}") + time.sleep(int(sleep_time / 2)) + + +if __name__ == '__main__': + multiprocessing.freeze_support() + run_large()