Skip to content

Commit

Permalink
Storage/S3: Add event file import handler
Browse files Browse the repository at this point in the history
  • Loading branch information
ThiefMaster committed Nov 11, 2024
1 parent 1794b8b commit 1dad919
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 5 deletions.
5 changes: 5 additions & 0 deletions storage_s3/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@ to local storage (but it would of course be possible to write a script for this)

## Changelog

### 3.3.1

- Adapt to Indico 3.3.5 changes
- Add file import handler for event export/import operations that require copying many files on S3

### 3.3

- Support (and require) Python 3.12
Expand Down
57 changes: 57 additions & 0 deletions storage_s3/indico_storage_s3/import_event_files.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# This file is part of the Indico plugins.
# Copyright (C) 2002 - 2024 CERN
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.

# XXX: This module is not as unused as it may appear. It can be referenced in
# the `indico event import-files` command when all files are on S3 and one wants
# to do a fast copy using rclone

import subprocess
import sys

import click

from indico.core.storage.backend import Storage, get_storage

from indico.util.console import verbose_iterator
from indico_storage_s3.storage import S3StorageBase


def copy_files(mapping, backends):
target_backend_names = list({x[1][0] for x in mapping})
assert len(target_backend_names) == 1
target_backend: Storage = get_storage(target_backend_names[0])
click.echo(f'Target backend: {target_backend_names[0]} ({target_backend.name})')
if not isinstance(target_backend, S3StorageBase):
click.echo('Target backend is not S3; aborting')
sys.exit(1)
if not all(isinstance(x, S3StorageBase) for x in backends.values()):
click.echo('Not all source backends are S3; aborting')
sys.exit(1)
rclone_remote_name = click.prompt('rclone remote name')
mapping.sort()
iterator = verbose_iterator(mapping, len(mapping), get_title=lambda x: x[1][1], print_every=1,
print_total_time=True)
for (source_backend_name, source_file_id), (__, target_file_id) in iterator:
source_backend: S3StorageBase = backends[source_backend_name]
_run_rclone(
rclone_remote_name,
'/'.join(source_backend._parse_file_id(source_file_id)),
'/'.join(target_backend._parse_file_id(target_file_id)),
)


def _run_rclone(remote, src, dst):
subprocess.check_call(
[
'rclone',
'--s3-no-check-bucket',
'--s3-acl=bucket-owner-full-control',
'copyto',
f'{remote}:{src}',
f'{remote}:{dst}',
]
)
8 changes: 4 additions & 4 deletions storage_s3/indico_storage_s3/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,10 +204,10 @@ def _get_current_bucket_name(self):
def _parse_file_id(self, file_id):
return self.bucket_name, file_id

def save(self, name, content_type, filename, fileobj):
def save(self, name, content_type, filename, fileobj, *, dry_run=False):
try:
bucket = self._get_current_bucket_name()
checksum = self._save(bucket, name, content_type, fileobj)
checksum = self._save(bucket, name, content_type, fileobj) if not dry_run else None
return name, checksum
except Exception as exc:
raise StorageError(f'Could not save "{name}": {exc}') from exc
Expand Down Expand Up @@ -250,10 +250,10 @@ def _replace_bucket_placeholders(self, name, date):
name = name.replace('<month>', date.strftime('%m'))
return name.replace('<week>', date.strftime('%W'))

def save(self, name, content_type, filename, fileobj):
def save(self, name, content_type, filename, fileobj, *, dry_run=False):
try:
bucket = self._get_current_bucket_name()
checksum = self._save(bucket, name, content_type, fileobj)
checksum = self._save(bucket, name, content_type, fileobj) if not dry_run else None
file_id = f'{bucket}//{name}'
return file_id, checksum
except Exception as exc:
Expand Down
2 changes: 1 addition & 1 deletion storage_s3/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
name = 'indico-plugin-storage-s3'
description = 'S3 storage backend for Indico'
readme = 'README.md'
version = '3.3'
version = '3.3.1'
license = 'MIT'
authors = [{ name = 'Indico Team', email = '[email protected]' }]
classifiers = [
Expand Down

0 comments on commit 1dad919

Please sign in to comment.