Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ability to restore schain from local snapshot #738

Merged
merged 14 commits into from
Sep 25, 2023
17 changes: 15 additions & 2 deletions node_cli/cli/schains.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from node_cli.core.schains import (
describe,
get_schain_firewall_rules,
restore_schain_from_snapshot,
show_config,
show_dkg_info,
show_schains,
Expand All @@ -43,8 +44,13 @@ def schains() -> None:


@schains.command(help="List of sChains served by connected node")
def ls() -> None:
show_schains()
@click.option(
'--names',
help='Shows only chain names',
is_flag=True
)
def ls(names) -> None:
show_schains(only_names=names)


@schains.command(help="DKG statuses for each sChain on the node")
Expand Down Expand Up @@ -95,3 +101,10 @@ def repair(schain_name: str, snapshot_from: Optional[str] = None) -> None:
)
def info_(schain_name: str, json_format: bool) -> None:
describe(schain_name, raw=json_format)


@schains.command('restore', help='Restore schain from local snapshot')
@click.argument('schain_name')
@click.argument('snapshot_path')
def restore(schain_name: str, snapshot_path: str) -> None:
restore_schain_from_snapshot(schain_name, snapshot_path)
4 changes: 3 additions & 1 deletion node_cli/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
SKALE_TMP_DIR = os.path.join(SKALE_DIR, '.tmp')

NODE_DATA_PATH = os.path.join(SKALE_DIR, 'node_data')
NODE_CONFIG_PATH = os.path.join(NODE_DATA_PATH, 'node_config.json')
CONTAINER_CONFIG_PATH = os.path.join(SKALE_DIR, 'config')
CONTAINER_CONFIG_TMP_PATH = os.path.join(SKALE_TMP_DIR, 'config')
CONTRACTS_PATH = os.path.join(SKALE_DIR, 'contracts_info')
Expand All @@ -52,7 +53,8 @@
SGX_CERTIFICATES_DIR_NAME = 'sgx_certs'

COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml')
STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml')
STATIC_PARAMS_FILEPATH = os.path.join(
CONTAINER_CONFIG_PATH, 'static_params.yaml')
NGINX_TEMPLATE_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'nginx.conf.j2')
NGINX_CONFIG_FILEPATH = os.path.join(NODE_DATA_PATH, 'nginx.conf')

Expand Down
3 changes: 2 additions & 1 deletion node_cli/configs/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@
'DEFAULT_GAS_PRICE_WEI': '',
'DISABLE_IMA': '',
'SKIP_DOCKER_CONFIG': '',
'SKIP_DOCKER_CLEANUP': ''
'SKIP_DOCKER_CLEANUP': '',
'NO_CONTAINERS': ''
}


Expand Down
26 changes: 15 additions & 11 deletions node_cli/core/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,26 +357,30 @@ def is_base_containers_alive():
return len(skale_containers) >= BASE_CONTAINERS_AMOUNT


def get_node_info(format):
def get_node_info_plain():
status, payload = get_request(
blueprint=BLUEPRINT_NAME,
method='info'
)
if status == 'ok':
node_info = payload['node_info']
if format == 'json':
print(node_info)
elif node_info['status'] == NodeStatuses.NOT_CREATED.value:
print(TEXTS['service']['node_not_registered'])
else:
print_node_info(
node_info,
get_node_status(int(node_info['status']))
)
return payload['node_info']
else:
error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE)


def get_node_info(format):
node_info = get_node_info_plain()
if format == 'json':
print(node_info)
elif node_info['status'] == NodeStatuses.NOT_CREATED.value:
print(TEXTS['service']['node_not_registered'])
else:
print_node_info(
node_info,
get_node_status(int(node_info['status']))
)


def get_node_status(status):
node_status = NodeStatuses(status).name
return TEXTS['node']['status'][node_status]
Expand Down
93 changes: 90 additions & 3 deletions node_cli/core/schains.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
import logging
import os
import pprint
import shutil
from pathlib import Path

from typing import Optional
from typing import Dict, Optional

from node_cli.configs import NODE_CONFIG_PATH
from node_cli.utils.helper import get_request, post_request, error_exit
from node_cli.utils.exit_codes import CLIExitCodes
from node_cli.utils.print_formatters import (
Expand All @@ -11,6 +15,8 @@
print_schain_info,
print_schains
)
from node_cli.utils.helper import read_json, run_cmd
from lvmpy.src.core import mount, volume_mountpoint


logger = logging.getLogger(__name__)
Expand All @@ -30,7 +36,7 @@ def get_schain_firewall_rules(schain: str) -> None:
error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE)


def show_schains() -> None:
def show_schains(only_names=False) -> None:
status, payload = get_request(
blueprint=BLUEPRINT_NAME,
method='list'
Expand All @@ -40,7 +46,11 @@ def show_schains() -> None:
if not schains:
print('No sChains found')
return
print_schains(schains)
if only_names:
names = [schain['name'] for schain in schains]
print('\n'.join(names))
else:
print_schains(schains)
else:
error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE)

Expand Down Expand Up @@ -98,3 +108,80 @@ def describe(schain: str, raw=False) -> None:
print_schain_info(payload, raw=raw)
else:
error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE)


def btrfs_set_readonly_false(subvolume_path: str) -> None:
run_cmd(['btrfs', 'property', 'set', '-ts', subvolume_path, 'ro', 'false'])


def btrfs_receive_binary(src_path: str, binary_path: str) -> None:
run_cmd(['btrfs', 'receive', '-f', binary_path, src_path])


def get_block_number_from_path(snapshot_path: str) -> int:
stem = Path(snapshot_path).stem
bn = -1
try:
bn = int(stem.split('-')[-1])
except ValueError:
return -1
return bn


def get_node_config() -> Dict:
return read_json(NODE_CONFIG_PATH)


def get_node_id() -> int:
info = get_node_config()
return info['node_id']


def migrate_prices_and_blocks(path: str, node_id: int) -> None:
db_suffix = '.db'
for sname in os.listdir(path):
subvolume_path = os.path.join(path, sname)
logger.debug('Processing %s', sname)
btrfs_set_readonly_false(subvolume_path)
if sname.endswith(db_suffix):
subvolume_path = os.path.join(path, sname)
dbname = sname.split('_')[0]
new_path = os.path.join(path, f'{dbname}_{node_id}{db_suffix}')
logger.debug('New path for %s %s', sname, new_path)
shutil.move(subvolume_path, new_path)


def make_btrfs_snapshot(src: str, dst: str) -> None:
run_cmd(['btrfs', 'subvolume', 'snapshot', src, dst])


def fillin_snapshot_folder(src_path: str, block_number: int) -> None:
snapshots_dirname = 'snapshots'
snapshot_folder_path = os.path.join(
src_path, snapshots_dirname, str(block_number))
os.makedirs(snapshot_folder_path, exist_ok=True)
for subvolume in os.listdir(src_path):
if subvolume != snapshots_dirname:
logger.debug('Copying %s to %s', subvolume, snapshot_folder_path)
subvolume_path = os.path.join(src_path, subvolume)
subvolume_snapshot_path = os.path.join(
snapshot_folder_path, subvolume)
make_btrfs_snapshot(subvolume_path, subvolume_snapshot_path)


def restore_schain_from_snapshot(schain: str, snapshot_path: str) -> None:
block_number = get_block_number_from_path(snapshot_path)
if block_number == -1:
logger.error('Invalid snapshot path format')
return
node_id = get_node_id()

mount(schain)
src_path = volume_mountpoint(schain)
logger.info('Unpacking binary')
btrfs_receive_binary(src_path, snapshot_path)
logger.info('Migrating suvolumes')
migrate_prices_and_blocks(src_path, node_id)
migrate_prices_and_blocks(src_path, node_id)
logger.info('Recreating snapshot folder')
fillin_snapshot_folder(src_path, block_number)
8 changes: 4 additions & 4 deletions node_cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from node_cli.utils.helper import safe_load_texts, init_default_logger
from node_cli.configs import LONG_LINE
from node_cli.core.host import init_logs_dir
from node_cli.utils.helper import error_exit

TEXTS = safe_load_texts()

Expand Down Expand Up @@ -109,8 +110,7 @@ def handle_exception(exc_type, exc_value, exc_traceback):
try:
cmd_collection()
except Exception as err:
print(f'Command execution failed with {err}. Recheck your inputs')
traceback.print_exc()
logger.exception(f'Command failed with {err}')
finally:
logger.debug(f'execution time: {time.time() - start_time} seconds')
logger.debug('Execution time: %d seconds', time.time() - start_time)
error_exit(err)
logger.debug('Execution time: %d seconds', time.time() - start_time)
1 change: 1 addition & 0 deletions node_cli/utils/docker_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,7 @@ def get_up_compose_cmd(services):

def compose_up(env):
logger.info('Running base set of containers')
logger.debug('ENV for docker-compose.yml %s', env)

if 'SGX_CERTIFICATES_DIR_NAME' not in env:
env['SGX_CERTIFICATES_DIR_NAME'] = SGX_CERTIFICATES_DIR_NAME
Expand Down
Loading