diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b34e083f..42db4f23 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -10,7 +10,7 @@ repos: - id: trailing-whitespace files: \.py$ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.5 + rev: v0.5.0 hooks: - id: ruff args: [--fix] diff --git a/nazurin/models/file.py b/nazurin/models/file.py index c88fbc73..2467f46c 100644 --- a/nazurin/models/file.py +++ b/nazurin/models/file.py @@ -57,12 +57,10 @@ async def size(self) -> Optional[int]: return None async def exists(self) -> bool: - if ( + return ( os.path.exists(self.path) and (await aiofiles.os.stat(self.path)).st_size != 0 - ): - return True - return False + ) @network_retry async def download(self, session: NazurinRequestSession) -> Optional[int]: diff --git a/nazurin/storage/googledrive.py b/nazurin/storage/googledrive.py index 42b4eeae..fd26785a 100644 --- a/nazurin/storage/googledrive.py +++ b/nazurin/storage/googledrive.py @@ -76,9 +76,7 @@ async def store(self, files: List[File]): tasks = [self.create_folders(destination) for destination in destinations] logger.info("Creating folders: {}", destinations) folder_ids = await asyncio.gather(*tasks) - folders = {} - for destination, folder_id in zip(destinations, folder_ids): - folders[destination] = folder_id + folders = dict(zip(destinations, folder_ids)) tasks = [self.upload(item, folders) for item in files] await run_in_pool(tasks, MAX_PARALLEL_UPLOAD) diff --git a/nazurin/storage/mega.py b/nazurin/storage/mega.py index 035f5c3a..df93f2ea 100644 --- a/nazurin/storage/mega.py +++ b/nazurin/storage/mega.py @@ -92,9 +92,7 @@ async def store(self, files: List[File]): tasks = [self.ensure_existence(destination) for destination in destinations] logger.info("Creating folders: {}", destinations) folder_ids = await asyncio.gather(*tasks) - folders = {} - for destination, folder_id in zip(destinations, folder_ids): - folders[destination] = folder_id + folders = dict(zip(destinations, folder_ids)) tasks = [self.upload(file, folders) for file in files] await run_in_pool(tasks, MAX_PARALLEL_UPLOAD)