diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 41e6a727..bd164016 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -7,6 +7,7 @@ on:
- develop
- beta
- stable
+ - v*.*.*
jobs:
create_release:
@@ -29,9 +30,11 @@ jobs:
run: |
sudo apt-get update
sudo apt-get install python-setuptools
+
- name: Set Versions
run: |
bash ./scripts/set_versions_ga.sh
+
- name: Set release
run: |
if [[ "$BRANCH" == "stable" ]]; then
@@ -56,7 +59,8 @@ jobs:
run: |
echo "::set-output name=version::$VERSION"
echo "::set-output name=branch::$BRANCH"
- build_and_publish:
+
+ build_and_publish_normal:
if: github.event.pull_request.merged
needs: create_release
name: Build and publish for ${{ matrix.os }}
@@ -81,11 +85,11 @@ jobs:
- name: Checkout submodules
run: git submodule update --init
- - name: Build binary
+ - name: Build normal binary
run: |
- mkdir ./dist
+ mkdir -p ./dist
docker build . -t node-cli-builder
- docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }}
+ docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} normal
ls -altr /home/ubuntu/dist/
docker rm -f $(docker ps -aq)
@@ -114,3 +118,62 @@ jobs:
asset_path: /home/ubuntu/dist/sha512sum
asset_name: ${{ matrix.asset_name }}.sha512
asset_content_type: text/plain
+
+ build_and_publish_sync:
+ if: github.event.pull_request.merged
+ needs: create_release
+ name: Build and publish for ${{ matrix.os }}
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ include:
+ - os: ubuntu-20.04
+ asset_name: skale-${{ needs.create_release.outputs.version }}-Linux-x86_64-sync
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v1
+ with:
+ python-version: 3.11
+
+ - name: Install ubuntu dependencies
+ if: matrix.os == 'ubuntu-20.04'
+ run: |
+ sudo apt-get update
+
+ - name: Checkout submodules
+ run: git submodule update --init
+
+ - name: Build sync release binary
+ run: |
+ mkdir -p ./dist
+ docker build . -t node-cli-builder
+ docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} sync
+ ls -altr /home/ubuntu/dist/
+ docker rm -f $(docker ps -aq)
+
+ - name: Save sha512sum
+ run: |
+ sudo sha512sum /home/ubuntu/dist/${{ matrix.asset_name }} | sudo tee > /dev/null /home/ubuntu/dist/sha512sum
+
+ - name: Upload release sync CLI
+ id: upload-sync-release-asset
+ uses: actions/upload-release-asset@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ needs.create_release.outputs.upload_url }}
+ asset_path: /home/ubuntu/dist/${{ matrix.asset_name }}
+ asset_name: ${{ matrix.asset_name }}
+ asset_content_type: application/octet-stream
+
+ - name: Upload release sync CLI checksum
+ id: upload-sync-release-checksum
+ uses: actions/upload-release-asset@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ needs.create_release.outputs.upload_url }}
+ asset_path: /home/ubuntu/dist/sha512sum
+ asset_name: ${{ matrix.asset_name }}.sha512
+ asset_content_type: text/plain
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 3e45e8d8..0b1e24ef 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -36,22 +36,38 @@ jobs:
run: |
flake8 .
- - name: Build binary in Ubuntu 18.04 environment
+ - name: Build binary in Ubuntu 18.04 environment - normal
run: |
- mkdir ./dist
+ mkdir -p ./dist
docker build . -t node-cli-builder
- docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test
+ docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test normal
docker rm -f $(docker ps -aq)
- - name: Check build
+ - name: Check build - normal
run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64
- - name: Build binary in Ubuntu 20.04 environment
+ - name: Build binary in Ubuntu 20.04 environment - normal
run: |
- scripts/build.sh test test
+ scripts/build.sh test test normal
- - name: Check build
+ - name: Check build - sync
run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64
+ - name: Build sync binary in Ubuntu 18.04 environment
+ run: |
+ mkdir -p ./dist
+ docker build . -t node-cli-builder
+ docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test sync
+ docker rm -f $(docker ps -aq)
+
+ - name: Check build - sync
+ run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-sync
+
+ - name: Build sync binary in Ubuntu 20.04 environment
+ run: |
+ scripts/build.sh test test sync
+
+ - name: Check build - sync
+ run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-sync
- name: Run tests
run: bash ./scripts/run_tests.sh
diff --git a/.gitignore b/.gitignore
index 707381fd..de02cf68 100644
--- a/.gitignore
+++ b/.gitignore
@@ -116,4 +116,7 @@ meta.json
disk_mountpoint.txt
sgx_server_url.txt
resource_allocation.json
-conf.json
\ No newline at end of file
+conf.json
+test-env
+
+nginx.conf
\ No newline at end of file
diff --git a/README.md b/README.md
index 0d9e31e3..e178c238 100644
--- a/README.md
+++ b/README.md
@@ -19,8 +19,11 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line
2.7 [Logs](#logs-commands)
2.8 [Resources allocation](#resources-allocation-commands)
2.9 [Validate](#validate-commands)
-3. [Exit codes](#exit-codes)
-4. [Development](#development)
+3. [Sync CLI usage](#sync-cli-usage)
+ 3.1 [Top level commands](#top-level-commands-sync)
+ 3.2 [Sync node commands](#sync-node-commands)
+4. [Exit codes](#exit-codes)
+5. [Development](#development)
## Installation
@@ -34,10 +37,10 @@ Ensure that the following package is installed: **docker**, **docker-compose** (
VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m` > /usr/local/bin/skale"
```
-For versions `<1.1.0`:
+For Sync node version:
```shell
-VERSION_NUM=0.0.0 && sudo -E bash -c "curl -L https://skale-cli.sfo2.cdn.digitaloceanspaces.com/skale-$VERSION_NUM-`uname -s`-`uname -m` > /usr/local/bin/skale"
+VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-sync > /usr/local/bin/skale"
```
- Apply executable permissions to the downloaded binary:
@@ -104,7 +107,7 @@ skale node init [ENV_FILE]
Arguments:
-- `ENV_FILE` - path to .env file (required parameters are listed in the `skale init` command)
+- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command)
You should specify the following environment variables:
@@ -112,11 +115,11 @@ You should specify the following environment variables:
- `DISK_MOUNTPOINT` - disk mount point for storing sChains data
- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use
- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use
-- `IMA_ENDPOINT` - IMA endpoint to connect
- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed
- `MANAGER_CONTRACTS_ABI_URL` - URL to SKALE Manager contracts ABI and addresses
- `IMA_CONTRACTS_ABI_URL` - URL to IMA contracts ABI and addresses
- `FILEBEAT_URL` - URL to the Filebeat log server
+- `ENV_TYPE` - environement type (mainnet, testnet, etc)
Optional variables:
@@ -136,7 +139,7 @@ skale node restore [BACKUP_PATH] [ENV_FILE]
Arguments:
- `BACKUP_PATH` - path to the archive with backup data generated by `skale node backup` command
-- `ENV_FILE` - path to .env file (required parameters are listed in the `skale init` command)
+- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command)
#### Node backup
@@ -177,7 +180,7 @@ skale node update [ENV_FILEPATH]
Options:
-- `--yes` - remove without additional confirmation
+- `--yes` - update without additional confirmation
Arguments:
@@ -197,7 +200,7 @@ skale node turn-off
Options:
- `--maintenance-on` - set SKALE node into maintenance mode before turning off
-- `--yes` - remove without additional confirmation
+- `--yes` - turn off without additional confirmation
#### Node turn-on
@@ -210,7 +213,7 @@ skale node turn-on [ENV_FILEPATH]
Options:
- `--maintenance-off` - turn off maintenance mode after turning on the node
-- `--yes` - remove without additional confirmation
+- `--yes` - turn on without additional confirmation
Arguments:
@@ -477,7 +480,7 @@ skale resources-allocation generate [ENV_FILE]
Arguments:
-- `ENV_FILE` - path to .env file (required parameters are listed in the `skale init` command)
+- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command)
Options:
@@ -500,6 +503,83 @@ Options:
- `--json` - show validation result in json format
+
+## Sync CLI usage
+
+### Top level commands sync
+
+#### Info
+
+Print build info
+
+```shell
+skale info
+```
+
+#### Version
+
+Print version number
+
+```shell
+skale version
+```
+
+Options:
+
+- `--short` - prints version only, without additional text.
+
+### Sync node commands
+
+> Prefix: `skale sync-node`
+
+#### Sync node initialization
+
+Initialize full sync SKALE node on current machine
+
+```shell
+skale sync-node init [ENV_FILE]
+```
+
+Arguments:
+
+- `ENV_FILE` - path to .env file (required parameters are listed in the `skale sync-node init` command)
+
+You should specify the following environment variables:
+
+- `DISK_MOUNTPOINT` - disk mount point for storing sChains data
+- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use
+- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use
+- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed
+- `MANAGER_CONTRACTS_ABI_URL` - URL to SKALE Manager contracts ABI and addresses
+- `IMA_CONTRACTS_ABI_URL` - URL to IMA contracts ABI and addresses
+- `SCHAIN_NAME` - name of the SKALE chain to sync
+- `ENV_TYPE` - environement type (mainnet, testnet, etc)
+
+
+Options:
+
+- `--archive` - Run sync node in an archive node (disable block rotation)
+- `--historic-state` - Enable historic state (works only in pair with --archive flag)
+- `--catchup` - Add a flag to start sync node in catchup mode
+
+#### Sync node update
+
+Update full sync SKALE node on current machine
+
+```shell
+skale sync-node update [ENV_FILEPATH]
+```
+
+Options:
+
+- `--yes` - update without additional confirmation
+
+Arguments:
+
+- `ENV_FILEPATH` - path to env file where parameters are defined
+
+> NOTE: You can just update a file with environment variables used during `skale sync-node init`.
+
## Exit codes
Exit codes conventions for SKALE CLI tools
diff --git a/helper-scripts b/helper-scripts
index dc21eb10..2541831d 160000
--- a/helper-scripts
+++ b/helper-scripts
@@ -1 +1 @@
-Subproject commit dc21eb1005ac0d4b45d29d89a8a7783eba6ecf20
+Subproject commit 2541831d3a8bf6691d994f37f379ac36d760c0a4
diff --git a/lvmpy b/lvmpy
index 8ef6f707..0d796233 160000
--- a/lvmpy
+++ b/lvmpy
@@ -1 +1 @@
-Subproject commit 8ef6f7070f73c1ab1aeb1405d90ededcb9ee4bd4
+Subproject commit 0d7962335f1e60797fdd89d3c9d1e750e0355275
diff --git a/node_cli/cli/__init__.py b/node_cli/cli/__init__.py
index c01e9839..a5b99a0f 100644
--- a/node_cli/cli/__init__.py
+++ b/node_cli/cli/__init__.py
@@ -1,4 +1,4 @@
-__version__ = '2.3.1'
+__version__ = '2.4.0'
if __name__ == "__main__":
print(__version__)
diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py
index e407f8d2..95437b6b 100644
--- a/node_cli/cli/node.py
+++ b/node_cli/cli/node.py
@@ -2,7 +2,7 @@
#
# This file is part of node-cli
#
-# Copyright (C) 2019 SKALE Labs
+# Copyright (C) 2019-Present SKALE Labs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
diff --git a/node_cli/cli/ssl.py b/node_cli/cli/ssl.py
index 2d0bfdfa..89b81b51 100644
--- a/node_cli/cli/ssl.py
+++ b/node_cli/cli/ssl.py
@@ -21,9 +21,9 @@
from terminaltables import SingleTable
from node_cli.utils.exit_codes import CLIExitCodes
-from node_cli.utils.helper import get_request, safe_load_texts, error_exit
-from node_cli.configs import DEFAULT_SSL_CHECK_PORT, SSL_CERT_FILEPATH, SSL_KEY_FILEPATH
-from node_cli.core.ssl import check_cert, upload_cert
+from node_cli.utils.helper import safe_load_texts, error_exit
+from node_cli.configs.ssl import DEFAULT_SSL_CHECK_PORT, SSL_CERT_FILEPATH, SSL_KEY_FILEPATH
+from node_cli.core.ssl import check_cert, upload_cert, cert_status
TEXTS = safe_load_texts()
@@ -42,10 +42,7 @@ def ssl():
@ssl.command(help="Status of the SSL certificates on the node")
def status():
- status, payload = get_request(
- blueprint=BLUEPRINT_NAME,
- method='status'
- )
+ status, payload = cert_status()
if status == 'ok':
if payload.get('is_empty'):
print(TEXTS['ssl']['no_cert'])
@@ -79,7 +76,7 @@ def upload(key_path, cert_path, force):
if status == 'ok':
print(TEXTS['ssl']['uploaded'])
else:
- error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE)
+ error_exit(payload, exit_code=CLIExitCodes.FAILURE)
@ssl.command(help="Check certificates")
diff --git a/node_cli/cli/sync_node.py b/node_cli/cli/sync_node.py
new file mode 100644
index 00000000..bca887b6
--- /dev/null
+++ b/node_cli/cli/sync_node.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import click
+
+from node_cli.core.node import init_sync, update_sync
+from node_cli.utils.helper import (
+ abort_if_false,
+ safe_load_texts,
+ streamed_cmd,
+ error_exit
+)
+from node_cli.utils.exit_codes import CLIExitCodes
+
+
+G_TEXTS = safe_load_texts()
+TEXTS = G_TEXTS['sync_node']
+
+
+@click.group()
+def sync_node_cli():
+ pass
+
+
+@sync_node_cli.group(help="SKALE sync node commands")
+def sync_node():
+ pass
+
+
+@sync_node.command('init', help=TEXTS['init']['help'])
+@click.argument('env_file')
+@click.option(
+ '--archive',
+ help=TEXTS['init']['archive'],
+ is_flag=True
+)
+@click.option(
+ '--catchup',
+ help=TEXTS['init']['catchup'],
+ is_flag=True
+)
+@click.option(
+ '--historic-state',
+ help=TEXTS['init']['historic_state'],
+ is_flag=True
+)
+@streamed_cmd
+def _init_sync(env_file, archive, catchup, historic_state):
+ if historic_state and not archive:
+ error_exit(
+ '--historic-state can be used only is combination with --archive',
+ exit_code=CLIExitCodes.FAILURE
+ )
+ init_sync(env_file, archive, catchup, historic_state)
+
+
+@sync_node.command('update', help='Update sync node from .env file')
+@click.option('--yes', is_flag=True, callback=abort_if_false,
+ expose_value=False,
+ prompt='Are you sure you want to update SKALE node software?')
+@click.argument('env_file')
+@streamed_cmd
+def _update_sync(env_file):
+ update_sync(env_file)
diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py
index 579cbe58..1b43dab5 100644
--- a/node_cli/configs/__init__.py
+++ b/node_cli/configs/__init__.py
@@ -34,7 +34,8 @@
SKALE_STATE_DIR = '/var/lib/skale'
FILESTORAGE_MAPPING = os.path.join(SKALE_STATE_DIR, 'filestorage')
SNAPSHOTS_SHARED_VOLUME = 'shared-space'
-SCHAINS_MNT_DIR = '/mnt'
+SCHAINS_MNT_DIR_REGULAR = '/mnt'
+SCHAINS_MNT_DIR_SYNC = '/var/lib/skale/schains'
VOLUME_GROUP = 'schains'
SKALE_DIR = os.path.join(G_CONF_HOME, '.skale')
@@ -54,10 +55,12 @@
SGX_CERTIFICATES_DIR_NAME = 'sgx_certs'
COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml')
-STATIC_PARAMS_FILEPATH = os.path.join(
- CONTAINER_CONFIG_PATH, 'static_params.yaml')
+SYNC_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-sync.yml')
+STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml')
+
NGINX_TEMPLATE_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'nginx.conf.j2')
NGINX_CONFIG_FILEPATH = os.path.join(NODE_DATA_PATH, 'nginx.conf')
+NGINX_CONTAINER_NAME = 'skale_nginx'
LOG_PATH = os.path.join(NODE_DATA_PATH, 'log')
REMOVED_CONTAINERS_FOLDER_NAME = '.removed_containers'
@@ -120,10 +123,7 @@ def _get_env():
TEXT_FILE = os.path.join(PROJECT_DIR, 'text.yml')
DATAFILES_FOLDER = os.path.join(PARDIR, 'datafiles')
-SKALED_SSL_TEST_SCRIPT = os.path.join(DATAFILES_FOLDER, 'skaled-ssl-test')
-
-ALLOCATION_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH,
- 'schain_allocation.yml')
+ALLOCATION_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'schain_allocation.yml')
REDIS_DATA_PATH = os.path.join(NODE_DATA_PATH, 'redis-data')
@@ -137,11 +137,6 @@ def _get_env():
BACKUP_ARCHIVE_NAME = 'skale-node-backup'
-SSL_FOLDER_PATH = os.path.join(NODE_DATA_PATH, 'ssl')
-SSL_CERT_FILEPATH = os.path.join(SSL_FOLDER_PATH, 'ssl_cert')
-SSL_KEY_FILEPATH = os.path.join(SSL_FOLDER_PATH, 'ssl_key')
-
-
TM_INIT_TIMEOUT = 20
RESTORE_SLEEP_TIMEOUT = 20
@@ -150,8 +145,6 @@ def _get_env():
META_FILEPATH = os.path.join(NODE_DATA_PATH, 'meta.json')
-DEFAULT_SSL_CHECK_PORT = 4536
-
SKALE_NODE_REPO_URL = 'https://github.com/skalenetwork/skale-node.git'
DOCKER_LVMPY_REPO_URL = 'https://github.com/skalenetwork/docker-lvmpy.git'
diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py
index 7df08056..f534a2e2 100644
--- a/node_cli/configs/env.py
+++ b/node_cli/configs/env.py
@@ -9,9 +9,7 @@
ALLOWED_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet']
-
-base_params = {
- 'IMA_ENDPOINT': '',
+REQUIRED_PARAMS = {
'CONTAINER_CONFIGS_STREAM': '',
'ENDPOINT': '',
'MANAGER_CONTRACTS_ABI_URL': '',
@@ -19,13 +17,22 @@
'FILEBEAT_HOST': '',
'DISK_MOUNTPOINT': '',
'SGX_SERVER_URL': '',
- 'CONTAINER_CONFIGS_DIR': '',
'DOCKER_LVMPY_STREAM': '',
'ENV_TYPE': '',
}
+REQUIRED_PARAMS_SYNC = {
+ 'SCHAIN_NAME': '',
+ 'CONTAINER_CONFIGS_STREAM': '',
+ 'ENDPOINT': '',
+ 'MANAGER_CONTRACTS_ABI_URL': '',
+ 'IMA_CONTRACTS_ABI_URL': '',
+ 'DISK_MOUNTPOINT': '',
+ 'DOCKER_LVMPY_STREAM': '',
+ 'ENV_TYPE': ''
+}
-optional_params = {
+OPTIONAL_PARAMS = {
'MONITORING_CONTAINERS': '',
'TG_API_KEY': '',
'TG_CHAT_ID': '',
@@ -33,23 +40,23 @@
'DISABLE_DRY_RUN': '',
'DEFAULT_GAS_LIMIT': '',
'DEFAULT_GAS_PRICE_WEI': '',
- 'DISABLE_IMA': '',
'SKIP_DOCKER_CONFIG': '',
+ 'ENFORCE_BTRFS': '',
'SKIP_DOCKER_CLEANUP': ''
}
def absent_params(params):
return list(filter(
- lambda key: key not in optional_params and not params[key],
+ lambda key: key not in OPTIONAL_PARAMS and not params[key],
params)
)
-def get_env_config(env_filepath: str = SKALE_DIR_ENV_FILEPATH):
+def get_env_config(env_filepath: str = SKALE_DIR_ENV_FILEPATH, sync_node: bool = False):
load_dotenv(dotenv_path=env_filepath)
- params = base_params.copy()
- params.update(optional_params)
+ params = REQUIRED_PARAMS_SYNC.copy() if sync_node else REQUIRED_PARAMS.copy()
+ params.update(OPTIONAL_PARAMS)
for option_name in params:
env_param = os.getenv(option_name)
if env_param is not None:
diff --git a/node_cli/configs/resource_allocation.py b/node_cli/configs/resource_allocation.py
index 003d64e3..8a51729d 100644
--- a/node_cli/configs/resource_allocation.py
+++ b/node_cli/configs/resource_allocation.py
@@ -20,6 +20,7 @@
import os
from node_cli.configs import NODE_DATA_PATH
+
LARGE_DIVIDER = 1
MEDIUM_DIVIDER = 8
TEST_DIVIDER = 8
diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py
index 19e7847e..87fac8f5 100644
--- a/node_cli/configs/routes.py
+++ b/node_cli/configs/routes.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# This file is part of SKALE Admin
+# This file is part of node-cli
#
# Copyright (C) 2020 SKALE Labs
#
diff --git a/node_cli/configs/ssl.py b/node_cli/configs/ssl.py
new file mode 100644
index 00000000..670c3746
--- /dev/null
+++ b/node_cli/configs/ssl.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022-Present SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import os
+
+from node_cli.configs import NODE_DATA_PATH, DATAFILES_FOLDER
+
+
+DEFAULT_SSL_CHECK_PORT = 4536
+SKALED_SSL_TEST_SCRIPT = os.path.join(DATAFILES_FOLDER, 'skaled-ssl-test')
+
+SSL_FOLDER_PATH = os.path.join(NODE_DATA_PATH, 'ssl')
+SSL_CERT_FILEPATH = os.path.join(SSL_FOLDER_PATH, 'ssl_cert')
+SSL_KEY_FILEPATH = os.path.join(SSL_FOLDER_PATH, 'ssl_key')
+
+CERTS_UPLOADED_ERR_MSG = 'SSL Certificates are already uploaded'
+CERTS_INVALID_FORMAT = 'Certificates have invalid format'
diff --git a/node_cli/core/host.py b/node_cli/core/host.py
index 2dbdcc14..e9285b40 100644
--- a/node_cli/core/host.py
+++ b/node_cli/core/host.py
@@ -34,11 +34,10 @@
SCHAINS_DATA_PATH, LOG_PATH,
REMOVED_CONTAINERS_FOLDER_PATH,
IMA_CONTRACTS_FILEPATH, MANAGER_CONTRACTS_FILEPATH,
- SKALE_RUN_DIR, SKALE_TMP_DIR
+ SKALE_RUN_DIR, SKALE_STATE_DIR, SKALE_TMP_DIR
)
from node_cli.configs.resource_allocation import (
- RESOURCE_ALLOCATION_FILEPATH,
- SGX_SERVER_URL_FILEPATH
+ RESOURCE_ALLOCATION_FILEPATH
)
from node_cli.configs.cli_logger import LOG_DATA_PATH
from node_cli.configs.env import SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH
@@ -70,14 +69,17 @@ def get_flask_secret_key():
return key_file.read().strip()
-def prepare_host(env_filepath, disk_mountpoint, sgx_server_url, env_type,
- allocation=False):
- logger.info(f'Preparing host started, disk_mountpoint: {disk_mountpoint}')
+def prepare_host(
+ env_filepath: str,
+ env_type: str,
+ allocation: bool = False
+):
+ logger.info('Preparing host started')
make_dirs()
save_env_params(env_filepath)
- save_sgx_server_url(sgx_server_url)
+
if allocation:
- update_resource_allocation(disk_mountpoint, env_type)
+ update_resource_allocation(env_type)
def is_node_inited():
@@ -91,17 +93,11 @@ def make_dirs():
REMOVED_CONTAINERS_FOLDER_PATH,
SGX_CERTS_PATH, SCHAINS_DATA_PATH, LOG_PATH,
REPORTS_PATH, REDIS_DATA_PATH,
- SKALE_RUN_DIR, SKALE_TMP_DIR
+ SKALE_RUN_DIR, SKALE_STATE_DIR, SKALE_TMP_DIR
):
safe_mkdir(dir_path)
-def save_sgx_server_url(sgx_server_url):
- logger.info(f'Saving sgx_server_url option to {SGX_SERVER_URL_FILEPATH}')
- with open(SGX_SERVER_URL_FILEPATH, 'w') as f:
- f.write(sgx_server_url)
-
-
def save_env_params(env_filepath):
copyfile(env_filepath, SKALE_DIR_ENV_FILEPATH)
diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py
index d3d9b60f..e87b17db 100644
--- a/node_cli/core/nginx.py
+++ b/node_cli/core/nginx.py
@@ -1,17 +1,38 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022-Present SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
import logging
import os.path
+from node_cli.utils.docker_utils import restart_nginx_container, docker_client
from node_cli.configs import NODE_CERTS_PATH, NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH
from node_cli.utils.helper import process_template
logger = logging.getLogger(__name__)
+
SSL_KEY_NAME = 'ssl_key'
SSL_CRT_NAME = 'ssl_cert'
-def generate_nginx_config():
+def generate_nginx_config() -> None:
ssl_on = check_ssl_certs()
template_data = {
'ssl': ssl_on,
@@ -24,3 +45,9 @@ def check_ssl_certs():
crt_path = os.path.join(NODE_CERTS_PATH, SSL_CRT_NAME)
key_path = os.path.join(NODE_CERTS_PATH, SSL_KEY_NAME)
return os.path.exists(crt_path) and os.path.exists(key_path)
+
+
+def reload_nginx() -> None:
+ dutils = docker_client()
+ generate_nginx_config()
+ restart_nginx_container(dutils=dutils)
diff --git a/node_cli/core/node.py b/node_cli/core/node.py
index ce369bc8..9d5c83f7 100644
--- a/node_cli/core/node.py
+++ b/node_cli/core/node.py
@@ -30,15 +30,17 @@
import docker
from node_cli.configs import (
+ BACKUP_ARCHIVE_NAME,
CONTAINER_CONFIG_PATH,
FILESTORAGE_MAPPING,
- SKALE_DIR,
INIT_ENV_FILEPATH,
- BACKUP_ARCHIVE_NAME,
+ LOG_PATH,
RESTORE_SLEEP_TIMEOUT,
- SCHAINS_MNT_DIR,
- TM_INIT_TIMEOUT,
- LOG_PATH
+ SCHAINS_MNT_DIR_REGULAR,
+ SCHAINS_MNT_DIR_SYNC,
+ SKALE_DIR,
+ SKALE_STATE_DIR,
+ TM_INIT_TIMEOUT
)
from node_cli.configs.env import get_env_config
from node_cli.configs.cli_logger import LOG_DATA_PATH as CLI_LOG_DATA_PATH
@@ -51,7 +53,12 @@
from node_cli.core.resources import update_resource_allocation
from node_cli.operations import (
update_op,
- init_op, turn_off_op, turn_on_op, restore_op
+ init_op,
+ turn_off_op,
+ turn_on_op,
+ restore_op,
+ init_sync_op,
+ update_sync_op
)
from node_cli.utils.print_formatters import (
print_failed_requirements_checks, print_node_cmd_error, print_node_info
@@ -70,6 +77,7 @@
logger = logging.getLogger(__name__)
TEXTS = Texts()
+SYNC_BASE_CONTAINERS_AMOUNT = 2
BASE_CONTAINERS_AMOUNT = 5
BLUEPRINT_NAME = 'node'
@@ -128,7 +136,6 @@ def init(env_filepath):
'Init operation failed',
exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR
)
- return
logger.info('Waiting for containers initialization')
time.sleep(TM_INIT_TIMEOUT)
if not is_base_containers_alive():
@@ -136,9 +143,8 @@ def init(env_filepath):
'Containers are not running',
exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR
)
- return
logger.info('Generating resource allocation file ...')
- update_resource_allocation(env['DISK_MOUNTPOINT'], env['ENV_TYPE'])
+ update_resource_allocation(env['ENV_TYPE'])
logger.info('Init procedure finished')
@@ -160,36 +166,91 @@ def restore(backup_path, env_filepath, no_snapshot=False, config_only=False):
'Restore operation failed',
exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR
)
- return
time.sleep(RESTORE_SLEEP_TIMEOUT)
logger.info('Generating resource allocation file ...')
- update_resource_allocation(env['DISK_MOUNTPOINT'], env['ENV_TYPE'])
+ update_resource_allocation(env['ENV_TYPE'])
print('Node is restored from backup')
+def init_sync(
+ env_filepath: str,
+ archive: bool,
+ catchup: bool,
+ historic_state: bool
+) -> None:
+ configure_firewall_rules()
+ env = get_node_env(env_filepath, sync_node=True)
+ if env is None:
+ return
+ inited_ok = init_sync_op(
+ env_filepath,
+ env,
+ archive,
+ catchup,
+ historic_state
+ )
+ if not inited_ok:
+ error_exit(
+ 'Init operation failed',
+ exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR
+ )
+ logger.info('Waiting for containers initialization')
+ time.sleep(TM_INIT_TIMEOUT)
+ if not is_base_containers_alive(sync_node=True):
+ error_exit(
+ 'Containers are not running',
+ exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR
+ )
+ logger.info('Sync node initialized successfully')
+
+
+@check_inited
+@check_user
+def update_sync(env_filepath):
+ logger.info('Node update started')
+ configure_firewall_rules()
+ env = get_node_env(env_filepath, sync_node=True)
+ update_ok = update_sync_op(env_filepath, env)
+ if update_ok:
+ logger.info('Waiting for containers initialization')
+ time.sleep(TM_INIT_TIMEOUT)
+ alive = is_base_containers_alive(sync_node=True)
+ if not update_ok or not alive:
+ print_node_cmd_error()
+ return
+ else:
+ logger.info('Node update finished')
+
+
def get_node_env(
env_filepath,
inited_node=False,
sync_schains=None,
- pull_config_for_schain=None
+ pull_config_for_schain=None,
+ sync_node=False
):
if env_filepath is not None:
- env_params = extract_env_params(env_filepath)
- if env_params is None:
- return
+ env_params = extract_env_params(
+ env_filepath,
+ sync_node=sync_node,
+ raise_for_status=True
+ )
save_env_params(env_filepath)
else:
- env_params = extract_env_params(INIT_ENV_FILEPATH)
+ env_params = extract_env_params(INIT_ENV_FILEPATH, sync_node=sync_node)
+
+ mnt_dir = SCHAINS_MNT_DIR_SYNC if sync_node else SCHAINS_MNT_DIR_REGULAR
env = {
'SKALE_DIR': SKALE_DIR,
- 'SCHAINS_MNT_DIR': SCHAINS_MNT_DIR,
+ 'SCHAINS_MNT_DIR': mnt_dir,
'FILESTORAGE_MAPPING': FILESTORAGE_MAPPING,
+ 'SKALE_LIB_PATH': SKALE_STATE_DIR,
**env_params
}
- if inited_node:
+ if inited_node and not sync_node:
flask_secret_key = get_flask_secret_key()
env['FLASK_SECRET_KEY'] = flask_secret_key
- if sync_schains:
+ if sync_schains and not sync_node:
env['BACKUP_RUN'] = 'True'
if pull_config_for_schain:
env['PULL_CONFIG_FOR_SCHAIN'] = pull_config_for_schain
@@ -348,13 +409,14 @@ def turn_on(maintenance_off, sync_schains, env_file):
set_maintenance_mode_off()
-def is_base_containers_alive():
+def is_base_containers_alive(sync_node: bool = False):
dclient = docker.from_env()
containers = dclient.containers.list()
skale_containers = list(filter(
lambda c: c.name.startswith('skale_'), containers
))
- return len(skale_containers) >= BASE_CONTAINERS_AMOUNT
+ containers_amount = SYNC_BASE_CONTAINERS_AMOUNT if sync_node else BASE_CONTAINERS_AMOUNT
+ return len(skale_containers) >= containers_amount
def get_node_info_plain():
diff --git a/node_cli/core/node_options.py b/node_cli/core/node_options.py
index 0bb74ed0..70573a65 100644
--- a/node_cli/core/node_options.py
+++ b/node_cli/core/node_options.py
@@ -42,5 +42,29 @@ def _set(self, field_name: str, field_value) -> None:
config[field_name] = field_value
write_json(self.filepath, config)
+ @property
+ def archive(self) -> bool:
+ return self._get('archive')
+
+ @archive.setter
+ def archive(self, archive: bool) -> None:
+ return self._set('archive', archive)
+
+ @property
+ def catchup(self) -> bool:
+ return self._get('catchup')
+
+ @catchup.setter
+ def catchup(self, catchup: bool) -> None:
+ return self._set('catchup', catchup)
+
+ @property
+ def historic_state(self) -> bool:
+ return self._get('historic_state')
+
+ @historic_state.setter
+ def historic_state(self, historic_state: bool) -> None:
+ return self._set('historic_state', historic_state)
+
def all(self) -> dict:
return read_json(self.filepath)
diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py
index f0e51f06..f47ef792 100644
--- a/node_cli/core/resources.py
+++ b/node_cli/core/resources.py
@@ -68,7 +68,6 @@ def get_resource_allocation_info():
def compose_resource_allocation_config(
- disk_device: str,
env_type: str,
params_by_env_type: Dict = None
) -> Dict:
@@ -105,7 +104,6 @@ def generate_resource_allocation_config(env_file, force=False) -> None:
logger.info('Generating resource allocation file ...')
try:
update_resource_allocation(
- env_params['DISK_MOUNTPOINT'],
env_params['ENV_TYPE']
)
except Exception as e:
@@ -118,10 +116,8 @@ def generate_resource_allocation_config(env_file, force=False) -> None:
)
-def update_resource_allocation(disk_device: str, env_type: str) -> None:
- resource_allocation_config = compose_resource_allocation_config(
- disk_device, env_type
- )
+def update_resource_allocation(env_type: str) -> None:
+ resource_allocation_config = compose_resource_allocation_config(env_type)
write_json(RESOURCE_ALLOCATION_FILEPATH, resource_allocation_config)
diff --git a/node_cli/core/ssl/__init__.py b/node_cli/core/ssl/__init__.py
new file mode 100644
index 00000000..75ec1911
--- /dev/null
+++ b/node_cli/core/ssl/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022-Present SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+from .upload import upload_cert # noqa
+from .check import check_cert # noqa
+from .status import cert_status # noqa
diff --git a/node_cli/core/ssl.py b/node_cli/core/ssl/check.py
similarity index 66%
rename from node_cli/core/ssl.py
rename to node_cli/core/ssl/check.py
index 5cd511f2..8f498d15 100644
--- a/node_cli/core/ssl.py
+++ b/node_cli/core/ssl/check.py
@@ -1,98 +1,130 @@
-import json
-import os
-import socket
-import subprocess
-import time
-from contextlib import contextmanager
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022-Present SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+import time
+import socket
import logging
+from contextlib import contextmanager
-from node_cli.configs import (
+from node_cli.core.ssl.utils import detached_subprocess
+from node_cli.utils.helper import err_result
+from node_cli.configs.ssl import (
DEFAULT_SSL_CHECK_PORT,
SKALED_SSL_TEST_SCRIPT,
SSL_CERT_FILEPATH,
SSL_KEY_FILEPATH
)
-from node_cli.utils.helper import post_request
-from node_cli.utils.helper import run_cmd
-
-logger = logging.getLogger(__name__)
-
-COMMUNICATION_TIMEOUT = 3
-
-
-def read_file_bytes(path, mode='rb'):
- with open(path, mode) as f:
- return f
-def load_ssl_files(key_path, cert_path):
- return {
- 'ssl_key': (os.path.basename(key_path),
- read_file_bytes(key_path), 'application/octet-stream'),
- 'ssl_cert': (os.path.basename(cert_path),
- read_file_bytes(cert_path), 'application/octet-stream')
- }
+logger = logging.getLogger(__name__)
-def run_simple_openssl_server(certfile, keyfile, port=DEFAULT_SSL_CHECK_PORT):
- cmd = [
- 'openssl', 's_server',
- '-cert', certfile,
- '-key', keyfile,
- '-WWW',
- '-port', str(port),
- '-verify_return_error', '-Verify', '1'
- ]
- run_cmd(cmd)
+def check_cert(
+ cert_path=SSL_CERT_FILEPATH,
+ key_path=SSL_KEY_FILEPATH,
+ port=DEFAULT_SSL_CHECK_PORT,
+ check_type='all',
+ no_client=False,
+ no_wss=False
+):
+ if check_type in ('all', 'openssl'):
+ try:
+ check_cert_openssl(
+ cert_path, key_path,
+ host='127.0.0.1', port=port, no_client=no_client
+ )
+ except Exception as err:
+ logger.exception('Cerificate/key pair is incorrect')
+ return err_result(f'Certificate check failed. {err}')
+ if check_type in ('all', 'skaled'):
+ try:
+ check_cert_skaled(
+ cert_path, key_path,
+ host='127.0.0.1', port=port, no_wss=no_wss
+ )
+ except Exception as err:
+ logger.exception('Certificate/key pair is incorrect for skaled')
+ return 'error', f'Skaled ssl check failed. {err}'
-@contextmanager
-def detached_subprocess(cmd, expose_output=False):
- logger.debug(f'Starting detached subprocess: {cmd}')
- p = subprocess.Popen(
- cmd,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- encoding='utf-8'
- )
- try:
- yield p
- finally:
- p.terminate()
- output = p.stdout.read()
- if expose_output:
- print(output)
- logger.debug(f'Detached process {cmd} output:\n{output}')
+ return 'ok', None
-class SSLHealthcheckError(Exception):
- pass
+def check_cert_openssl(
+ cert_path,
+ key_path,
+ host='127.0.0.1',
+ port=DEFAULT_SSL_CHECK_PORT,
+ no_client=False,
+ silent=False
+):
+ with openssl_server(
+ host, port, cert_path,
+ key_path, silent=silent
+ ) as serv:
+ time.sleep(1)
+ code = serv.poll()
+ if code is not None:
+ logger.error('Healthcheck server failed to start')
+ raise SSLHealthcheckError('OpenSSL server was failed to start')
+ logger.info('Server successfully started')
-def check_endpoint(host, port):
- with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
- result = sock.connect_ex((host, port))
- logger.info('Checking healthcheck endpoint ...')
- if result != 0:
- logger.error('Port is closed')
- return False
- return True
+ # Connect to ssl server
+ if not no_client:
+ if not check_endpoint(host, port):
+ raise SSLHealthcheckError(
+ f'Healthcheck port is closed on {host}:{port}'
+ )
+ check_ssl_connection(host, port, silent=silent)
+ logger.info('Healthcheck connection passed')
-def check_ssl_connection(host, port, silent=False):
- logger.info(f'Connecting to public ssl endpoint {host}:{port} ...')
- ssl_check_cmd = [
- 'openssl', 's_client',
- '-connect', f'{host}:{port}',
- '-verify_return_error', '-verify', '2'
+@contextmanager
+def openssl_server(host, port, cert_path, key_path, silent=False):
+ ssl_server_cmd = [
+ 'openssl', 's_server',
+ '-cert', cert_path,
+ '-cert_chain', cert_path,
+ '-key', key_path,
+ '-WWW',
+ '-accept', f'{host}:{port}',
+ '-verify_return_error', '-verify', '1'
]
+ logger.info(f'Staring healthcheck server on port {port} ...')
expose_output = not silent
- with detached_subprocess(ssl_check_cmd, expose_output=expose_output) as dp:
- time.sleep(1)
- code = dp.poll()
- if code != 0 and code is not None:
- logger.error('Healthcheck connection failed with code %d', code)
- raise SSLHealthcheckError('OpenSSL connection verification failed')
+ with detached_subprocess(
+ ssl_server_cmd, expose_output=expose_output
+ ) as dp:
+ yield dp
+
+
+def check_cert_skaled(
+ cert_path,
+ key_path,
+ host='127.0.0.1',
+ port=DEFAULT_SSL_CHECK_PORT,
+ no_wss=False
+):
+ run_skaled_https_healthcheck(cert_path, key_path, host, port)
+ if not no_wss:
+ run_skaled_wss_healthcheck(cert_path, key_path, host, port)
def run_skaled_https_healthcheck(
@@ -146,122 +178,31 @@ def run_skaled_wss_healthcheck(
logger.info('Skaled wss check server successfully started')
-def check_cert_skaled(
- cert_path,
- key_path,
- host='127.0.0.1',
- port=DEFAULT_SSL_CHECK_PORT,
- no_wss=False
-):
- run_skaled_https_healthcheck(cert_path, key_path, host, port)
- if not no_wss:
- run_skaled_wss_healthcheck(cert_path, key_path, host, port)
+class SSLHealthcheckError(Exception):
+ pass
-@contextmanager
-def openssl_server(host, port, cert_path, key_path, silent=False):
- ssl_server_cmd = [
- 'openssl', 's_server',
- '-cert', cert_path,
- '-cert_chain', cert_path,
- '-key', key_path,
- '-WWW',
- '-accept', f'{host}:{port}',
- '-verify_return_error', '-verify', '1'
- ]
- logger.info(f'Staring healthcheck server on port {port} ...')
- expose_output = not silent
- with detached_subprocess(
- ssl_server_cmd, expose_output=expose_output
- ) as dp:
- yield dp
+def check_endpoint(host, port):
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
+ result = sock.connect_ex((host, port))
+ logger.info('Checking healthcheck endpoint ...')
+ if result != 0:
+ logger.error('Port is closed')
+ return False
+ return True
-def check_cert_openssl(
- cert_path,
- key_path,
- host='127.0.0.1',
- port=DEFAULT_SSL_CHECK_PORT,
- no_client=False,
- silent=False
-):
- with openssl_server(
- host, port, cert_path,
- key_path, silent=silent
- ) as serv:
+def check_ssl_connection(host, port, silent=False):
+ logger.info(f'Connecting to public ssl endpoint {host}:{port} ...')
+ ssl_check_cmd = [
+ 'openssl', 's_client',
+ '-connect', f'{host}:{port}',
+ '-verify_return_error', '-verify', '2'
+ ]
+ expose_output = not silent
+ with detached_subprocess(ssl_check_cmd, expose_output=expose_output) as dp:
time.sleep(1)
- code = serv.poll()
+ code = dp.poll()
if code is not None:
- logger.error('Healthcheck server failed to start')
- raise SSLHealthcheckError('OpenSSL server was failed to start')
-
- logger.info('Server successfully started')
-
- # Connect to ssl server
- if not no_client:
- if not check_endpoint(host, port):
- raise SSLHealthcheckError(
- f'Healthcheck port is closed on {host}:{port}'
- )
- check_ssl_connection(host, port, silent=silent)
- logger.info('Healthcheck connection passed')
-
-
-def send_saving_cert_request(key_path, cert_path, force):
- with open(key_path, 'rb') as key_file, open(cert_path, 'rb') as cert_file:
- files_data = {
- 'ssl_key': (os.path.basename(key_path), key_file,
- 'application/octet-stream'),
- 'ssl_cert': (os.path.basename(cert_path), cert_file,
- 'application/octet-stream')
- }
- files_data['json'] = (
- None, json.dumps({'force': force}),
- 'application/json'
- )
- return post_request(
- blueprint='ssl',
- method='upload',
- files=files_data
- )
-
-
-def upload_cert(cert_path, key_path, force, no_client=False):
- try:
- check_cert_openssl(
- cert_path, key_path, silent=True, no_client=no_client)
- except Exception as err:
- logger.exception('Certificate/key pair is incorrect')
- return 'error', f'Certificate check failed. {err}'
- return send_saving_cert_request(key_path, cert_path, force)
-
-
-def check_cert(
- cert_path=SSL_CERT_FILEPATH,
- key_path=SSL_KEY_FILEPATH,
- port=DEFAULT_SSL_CHECK_PORT,
- check_type='all',
- no_client=False,
- no_wss=False
-):
- if check_type in ('all', 'openssl'):
- try:
- check_cert_openssl(
- cert_path, key_path,
- host='127.0.0.1', port=port, no_client=no_client
- )
- except Exception as err:
- logger.exception('Cerificate/key pair is incorrect')
- return 'error', f'Certificate check failed. {err}'
-
- if check_type in ('all', 'skaled'):
- try:
- check_cert_skaled(
- cert_path, key_path,
- host='127.0.0.1', port=port, no_wss=no_wss
- )
- except Exception as err:
- logger.exception('Certificate/key pair is incorrect for skaled')
- return 'error', f'Skaled ssl check failed. {err}'
-
- return 'ok', None
+ logger.error('Healthcheck connection failed')
+ raise SSLHealthcheckError('OpenSSL connection verification failed')
diff --git a/node_cli/core/ssl/status.py b/node_cli/core/ssl/status.py
new file mode 100644
index 00000000..31ab4b9a
--- /dev/null
+++ b/node_cli/core/ssl/status.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022-Present SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import logging
+from dateutil import parser
+
+from OpenSSL import crypto
+
+from node_cli.core.ssl.utils import is_ssl_folder_empty, cert_from_file
+from node_cli.configs.ssl import SSL_CERT_FILEPATH, CERTS_INVALID_FORMAT
+from node_cli.utils.helper import ok_result, err_result
+
+logger = logging.getLogger(__name__)
+
+
+def cert_status():
+ if is_ssl_folder_empty():
+ return ok_result({'is_empty': True})
+
+ cert = cert_from_file(SSL_CERT_FILEPATH)
+ status, info = get_cert_info(cert)
+ if status == 'error':
+ return err_result(CERTS_INVALID_FORMAT)
+ else:
+ return ok_result(payload={
+ 'issued_to': info['issued_to'],
+ 'expiration_date': info['expiration_date']
+ })
+
+
+def get_cert_info(cert):
+ try:
+ crypto_cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
+ subject = crypto_cert.get_subject()
+ issued_to = subject.CN
+ expiration_date_raw = crypto_cert.get_notAfter()
+ expiration_date = parser.parse(
+ expiration_date_raw
+ ).strftime('%Y-%m-%dT%H:%M:%S')
+ except Exception as err:
+ logger.exception('Error during parsing certs')
+ return err_result(str(err))
+ return ok_result({
+ 'subject': subject,
+ 'issued_to': issued_to,
+ 'expiration_date': expiration_date
+ })
diff --git a/node_cli/core/ssl/upload.py b/node_cli/core/ssl/upload.py
new file mode 100644
index 00000000..5da9b20d
--- /dev/null
+++ b/node_cli/core/ssl/upload.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022-Present SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import logging
+from node_cli.configs.ssl import CERTS_UPLOADED_ERR_MSG
+
+from node_cli.core.ssl.check import check_cert_openssl
+from node_cli.core.ssl.utils import is_ssl_folder_empty, copy_cert_key_pair
+from node_cli.utils.helper import ok_result, err_result
+from node_cli.core.nginx import reload_nginx
+
+
+logger = logging.getLogger(__name__)
+
+
+def upload_cert(cert_path, key_path, force, no_client=False):
+ try:
+ check_cert_openssl(cert_path, key_path, silent=True, no_client=no_client)
+ except Exception as err:
+ logger.exception('Certificate/key pair is incorrect')
+ return err_result(f'Certificate check failed. {err}')
+ if not is_ssl_folder_empty() and not force:
+ return err_result(CERTS_UPLOADED_ERR_MSG)
+ copy_cert_key_pair(cert_path, key_path)
+ reload_nginx()
+ return ok_result()
diff --git a/node_cli/core/ssl/utils.py b/node_cli/core/ssl/utils.py
new file mode 100644
index 00000000..a2e71e1f
--- /dev/null
+++ b/node_cli/core/ssl/utils.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2022-Present SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import os
+import shutil
+import logging
+import subprocess
+from contextlib import contextmanager
+from node_cli.configs.ssl import SSL_CERT_FILEPATH, SSL_KEY_FILEPATH, SSL_FOLDER_PATH
+
+
+logger = logging.getLogger(__name__)
+
+
+def copy_cert_key_pair(cert, key):
+ shutil.copyfile(cert, SSL_CERT_FILEPATH)
+ shutil.copyfile(key, SSL_KEY_FILEPATH)
+
+
+def cert_from_file(cert_filepath):
+ if not os.path.isfile(cert_filepath):
+ logger.warning(f'Trying to read cert that does not exist: {cert_filepath}')
+ return None
+ with open(cert_filepath) as cert_file:
+ return cert_file.read()
+
+
+def is_ssl_folder_empty(ssl_path=SSL_FOLDER_PATH):
+ return len(os.listdir(ssl_path)) == 0
+
+
+@contextmanager
+def detached_subprocess(cmd, expose_output=False):
+ logger.debug(f'Starting detached subprocess: {cmd}')
+ p = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ encoding='utf-8'
+ )
+ try:
+ yield p
+ finally:
+ p.terminate()
+ output = p.stdout.read()
+ if expose_output:
+ print(output)
+ logger.debug(f'Detached process {cmd} output:\n{output}')
diff --git a/node_cli/main.py b/node_cli/main.py
index 10009022..6fefa7b9 100644
--- a/node_cli/main.py
+++ b/node_cli/main.py
@@ -22,12 +22,13 @@
import logging
import inspect
import traceback
+from typing import List
import click
from node_cli.cli import __version__
from node_cli.cli.health import health_cli
-from node_cli.cli.info import BUILD_DATETIME, COMMIT, BRANCH, OS, VERSION
+from node_cli.cli.info import BUILD_DATETIME, COMMIT, BRANCH, OS, VERSION, TYPE
from node_cli.cli.logs import logs_cli
from node_cli.cli.lvmpy import lvmpy_cli
from node_cli.cli.node import node_cli
@@ -37,6 +38,8 @@
from node_cli.cli.exit import exit_cli
from node_cli.cli.validate import validate_cli
from node_cli.cli.resources_allocation import resources_allocation_cli
+from node_cli.cli.sync_node import sync_node_cli
+
from node_cli.utils.helper import safe_load_texts, init_default_logger
from node_cli.configs import LONG_LINE
from node_cli.core.host import init_logs_dir
@@ -75,6 +78,26 @@ def info():
'''))
+def get_sources_list() -> List[click.MultiCommand]:
+ if TYPE == 'sync':
+ return [cli, sync_node_cli, ssl_cli]
+ else:
+ return [
+ cli,
+ health_cli,
+ schains_cli,
+ logs_cli,
+ resources_allocation_cli,
+ node_cli,
+ sync_node_cli,
+ wallet_cli,
+ ssl_cli,
+ exit_cli,
+ validate_cli,
+ lvmpy_cli
+ ]
+
+
def handle_exception(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
@@ -92,21 +115,9 @@ def handle_exception(exc_type, exc_value, exc_traceback):
args = sys.argv
# todo: hide secret variables (passwords, private keys)
logger.debug(f'cmd: {" ".join(str(x) for x in args)}, v.{__version__}')
+ sources = get_sources_list()
+ cmd_collection = click.CommandCollection(sources=sources)
- cmd_collection = click.CommandCollection(
- sources=[
- cli,
- health_cli,
- schains_cli,
- logs_cli,
- lvmpy_cli,
- resources_allocation_cli,
- node_cli,
- wallet_cli,
- ssl_cli,
- exit_cli,
- validate_cli
- ])
try:
cmd_collection()
except Exception as err:
diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py
index e511af44..11d3dd4d 100644
--- a/node_cli/operations/__init__.py
+++ b/node_cli/operations/__init__.py
@@ -20,6 +20,8 @@
from node_cli.operations.base import ( # noqa
update as update_op,
init as init_op,
+ init_sync as init_sync_op,
+ update_sync as update_sync_op,
turn_off as turn_off_op,
turn_on as turn_on_op,
restore as restore_op
diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py
index a5d68547..3715d793 100644
--- a/node_cli/operations/base.py
+++ b/node_cli/operations/base.py
@@ -2,7 +2,7 @@
#
# This file is part of node-cli
#
-# Copyright (C) 2021 SKALE Labs
+# Copyright (C) 2021-Present SKALE Labs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -20,7 +20,7 @@
import distro
import functools
import logging
-from typing import Dict, Optional
+from typing import Dict
from node_cli.cli.info import VERSION
from node_cli.configs import CONTAINER_CONFIG_PATH, CONTAINER_CONFIG_TMP_PATH
@@ -35,7 +35,13 @@
backup_old_contracts,
download_contracts,
configure_filebeat,
- configure_flask, unpack_backup_archive
+ configure_flask,
+ unpack_backup_archive
+)
+from node_cli.operations.volume import (
+ cleanup_volume_artifacts,
+ ensure_filestorage_mapping,
+ prepare_block_device
)
from node_cli.operations.docker_lvmpy import lvmpy_install # noqa
from node_cli.operations.skale_node import download_skale_node, sync_skale_node, update_images
@@ -109,8 +115,6 @@ def update(env_filepath: str, env: Dict) -> None:
prepare_host(
env_filepath,
- env['DISK_MOUNTPOINT'],
- env['SGX_SERVER_URL'],
env['ENV_TYPE'],
allocation=True
)
@@ -139,7 +143,7 @@ def update(env_filepath: str, env: Dict) -> None:
@checked_host
-def init(env_filepath: str, env: Dict, snapshot_from: Optional[str] = None) -> bool:
+def init(env_filepath: str, env: dict) -> bool:
sync_skale_node()
ensure_btrfs_kernel_module_autoloaded()
@@ -148,9 +152,7 @@ def init(env_filepath: str, env: Dict, snapshot_from: Optional[str] = None) -> b
prepare_host(
env_filepath,
- env['DISK_MOUNTPOINT'],
- env['SGX_SERVER_URL'],
- env_type=env['ENV_TYPE'],
+ env_type=env['ENV_TYPE']
)
link_env_file()
download_contracts(env)
@@ -163,8 +165,55 @@ def init(env_filepath: str, env: Dict, snapshot_from: Optional[str] = None) -> b
lvmpy_install(env)
init_shared_space_volume(env['ENV_TYPE'])
+ update_meta(
+ VERSION,
+ env['CONTAINER_CONFIGS_STREAM'],
+ env['DOCKER_LVMPY_STREAM'],
+ distro.id(),
+ distro.version()
+ )
+ update_resource_allocation(env_type=env['ENV_TYPE'])
+ update_images(env.get('CONTAINER_CONFIGS_DIR') != '')
+ compose_up(env)
+ return True
+
+
+def init_sync(
+ env_filepath: str,
+ env: dict,
+ archive: bool,
+ catchup: bool,
+ historic_state: bool
+) -> bool:
+ cleanup_volume_artifacts(env['DISK_MOUNTPOINT'])
+ download_skale_node(
+ env.get('CONTAINER_CONFIGS_STREAM'),
+ env.get('CONTAINER_CONFIGS_DIR')
+ )
+ sync_skale_node()
+
+ if env.get('SKIP_DOCKER_CONFIG') != 'True':
+ configure_docker()
+
+ prepare_host(
+ env_filepath,
+ env_type=env['ENV_TYPE'],
+ )
+
node_options = NodeOptions()
- node_options.snapshot_from = snapshot_from
+ node_options.archive = archive
+ node_options.catchup = catchup
+ node_options.historic_state = historic_state
+
+ ensure_filestorage_mapping()
+ link_env_file()
+ download_contracts(env)
+
+ generate_nginx_config()
+ prepare_block_device(
+ env['DISK_MOUNTPOINT'],
+ force=env['ENFORCE_BTRFS'] == 'True'
+ )
update_meta(
VERSION,
@@ -173,12 +222,50 @@ def init(env_filepath: str, env: Dict, snapshot_from: Optional[str] = None) -> b
distro.id(),
distro.version()
)
- update_resource_allocation(
- disk_device=env['DISK_MOUNTPOINT'],
- env_type=env['ENV_TYPE']
+ update_resource_allocation(env_type=env['ENV_TYPE'])
+ update_images(env.get('CONTAINER_CONFIGS_DIR') != '', sync_node=True)
+ compose_up(env, sync_node=True)
+ return True
+
+
+def update_sync(env_filepath: str, env: Dict) -> bool:
+ compose_rm(env, sync_node=True)
+ remove_dynamic_containers()
+ cleanup_volume_artifacts(env['DISK_MOUNTPOINT'])
+ download_skale_node(
+ env['CONTAINER_CONFIGS_STREAM'],
+ env.get('CONTAINER_CONFIGS_DIR')
)
- update_images(env.get('CONTAINER_CONFIGS_DIR') != '')
- compose_up(env)
+ sync_skale_node()
+
+ if env.get('SKIP_DOCKER_CONFIG') != 'True':
+ configure_docker()
+
+ ensure_filestorage_mapping()
+ backup_old_contracts()
+ download_contracts(env)
+
+ prepare_block_device(
+ env['DISK_MOUNTPOINT'],
+ force=env['ENFORCE_BTRFS'] == 'True'
+ )
+ generate_nginx_config()
+
+ prepare_host(
+ env_filepath,
+ env['ENV_TYPE'],
+ allocation=True
+ )
+
+ update_meta(
+ VERSION,
+ env['CONTAINER_CONFIGS_STREAM'],
+ env['DOCKER_LVMPY_STREAM'],
+ distro.id(),
+ distro.version()
+ )
+ update_images(env.get('CONTAINER_CONFIGS_DIR') != '', sync_node=True)
+ compose_up(env, sync_node=True)
return True
@@ -222,10 +309,7 @@ def restore(env, backup_path, config_only=False):
distro.id(),
distro.version()
)
- update_resource_allocation(
- disk_device=env['DISK_MOUNTPOINT'],
- env_type=env['ENV_TYPE']
- )
+ update_resource_allocation(env_type=env['ENV_TYPE'])
if not config_only:
compose_up(env)
diff --git a/node_cli/operations/docker_lvmpy.py b/node_cli/operations/docker_lvmpy.py
index 31036c95..1810516b 100644
--- a/node_cli/operations/docker_lvmpy.py
+++ b/node_cli/operations/docker_lvmpy.py
@@ -32,7 +32,7 @@
LVMPY_HEAL_CMD,
LVMPY_CRON_LOG_PATH,
LVMPY_CRON_SCHEDULE_MINUTES,
- SCHAINS_MNT_DIR,
+ SCHAINS_MNT_DIR_REGULAR,
VOLUME_GROUP
)
from lvmpy.src.install import setup as setup_lvmpy
@@ -44,7 +44,7 @@ def update_docker_lvmpy_env(env):
env['PHYSICAL_VOLUME'] = env['DISK_MOUNTPOINT']
env['VOLUME_GROUP'] = 'schains'
env['FILESTORAGE_MAPPING'] = FILESTORAGE_MAPPING
- env['MNT_DIR'] = SCHAINS_MNT_DIR
+ env['MNT_DIR'] = SCHAINS_MNT_DIR_REGULAR
env['PATH'] = os.environ.get('PATH', None)
return env
diff --git a/node_cli/operations/skale_node.py b/node_cli/operations/skale_node.py
index 044f7a5d..b3745070 100644
--- a/node_cli/operations/skale_node.py
+++ b/node_cli/operations/skale_node.py
@@ -35,11 +35,11 @@
logger = logging.getLogger(__name__)
-def update_images(local: bool = False) -> None:
+def update_images(local: bool = False, sync_node: bool = False) -> None:
if local:
- compose_build()
+ compose_build(sync_node=sync_node)
else:
- compose_pull()
+ compose_pull(sync_node=sync_node)
def download_skale_node(stream: Optional[str], src: Optional[str]) -> None:
diff --git a/node_cli/operations/volume.py b/node_cli/operations/volume.py
new file mode 100644
index 00000000..b6de918a
--- /dev/null
+++ b/node_cli/operations/volume.py
@@ -0,0 +1,164 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2021 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import glob
+import logging
+import os
+import shutil
+import tempfile
+
+from node_cli.utils.helper import run_cmd
+from node_cli.utils.git_utils import sync_repo
+from node_cli.configs import (
+ DOCKER_LVMPY_PATH,
+ DOCKER_LVMPY_REPO_URL,
+ FILESTORAGE_MAPPING,
+ SCHAINS_MNT_DIR_REGULAR,
+ SCHAINS_MNT_DIR_SYNC,
+ SKALE_STATE_DIR
+)
+
+logger = logging.getLogger(__name__)
+
+
+class FilesystemExistsError(Exception):
+ pass
+
+
+def update_docker_lvmpy_env(env):
+ env['PHYSICAL_VOLUME'] = env['DISK_MOUNTPOINT']
+ env['VOLUME_GROUP'] = 'schains'
+ env['FILESTORAGE_MAPPING'] = FILESTORAGE_MAPPING
+ env['SCHAINS_MNT_DIR'] = SCHAINS_MNT_DIR_REGULAR
+ env['PATH'] = os.environ.get('PATH', None)
+ return env
+
+
+def ensure_filestorage_mapping(mapping_dir=FILESTORAGE_MAPPING):
+ if not os.path.isdir(FILESTORAGE_MAPPING):
+ os.makedirs(FILESTORAGE_MAPPING)
+
+
+def sync_docker_lvmpy_repo(env):
+ if os.path.isdir(DOCKER_LVMPY_PATH):
+ shutil.rmtree(DOCKER_LVMPY_PATH)
+ sync_repo(
+ DOCKER_LVMPY_REPO_URL,
+ DOCKER_LVMPY_PATH,
+ env["DOCKER_LVMPY_STREAM"]
+ )
+
+
+def docker_lvmpy_update(env):
+ sync_docker_lvmpy_repo(env)
+ ensure_filestorage_mapping()
+ logger.info('Running docker-lvmpy update script')
+ update_docker_lvmpy_env(env)
+ run_cmd(
+ cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/update.sh'.split(),
+ env=env
+ )
+ logger.info('docker-lvmpy update done')
+
+
+def docker_lvmpy_install(env):
+ sync_docker_lvmpy_repo(env)
+ ensure_filestorage_mapping()
+ update_docker_lvmpy_env(env)
+ run_cmd(
+ cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/install.sh'.split(),
+ env=env
+ )
+ logger.info('docker-lvmpy installed')
+
+
+def block_device_mounted(block_device):
+ with open('/proc/mounts') as mounts:
+ return any(block_device in mount for mount in mounts.readlines())
+
+
+def ensure_not_mounted(block_device):
+ logger.info('Making sure %s is not mounted', block_device)
+ if block_device_mounted(block_device):
+ run_cmd(['umount', block_device])
+
+
+def cleanup_static_path(filestorage_mapping=FILESTORAGE_MAPPING):
+ logger.info('Removing all links from filestorage mapping')
+ for dir_link in glob.glob(os.path.join(filestorage_mapping, '*')):
+ logger.debug('Unlinking %s', dir_link)
+ if os.path.islink(dir_link):
+ os.unlink(dir_link)
+
+
+def cleanup_volume_artifacts(block_device):
+ ensure_not_mounted(block_device)
+ cleanup_static_path()
+
+
+def get_block_device_filesystem(block_device):
+ r = run_cmd(['blkid', '-o', 'udev', block_device])
+ output = r.stdout.decode('utf-8')
+ logger.debug('blkid output %s', output)
+ fs_line = next(filter(lambda s: s.startswith('ID_FS_TYPE'), output.split('\n')))
+ return fs_line.split('=')[1]
+
+
+def format_as_btrfs(block_device):
+ logger.info('Formating %s as btrfs', block_device)
+ run_cmd(['mkfs.btrfs', block_device, '-f'])
+
+
+def mount_device(block_device, mountpoint):
+ os.makedirs(mountpoint, exist_ok=True)
+ logger.info('Mounting %s device', block_device)
+ run_cmd(['mount', block_device, mountpoint])
+
+
+def prepare_block_device(block_device, force=False):
+ filesystem = None
+ try:
+ filesystem = get_block_device_filesystem(block_device)
+ except Exception as e:
+ logger.info('Cannot get filesystem type %s', e)
+ logger.debug('Cannot get filesystem type', exc_info=True)
+ if not force:
+ raise
+
+ if filesystem == 'btrfs':
+ logger.info('%s already formatted as btrfs', block_device)
+ ensure_btrfs_for_all_space(block_device)
+ else:
+ logger.info('%s contains %s filesystem', block_device, filesystem)
+ format_as_btrfs(block_device)
+ mount_device(block_device, SCHAINS_MNT_DIR_SYNC)
+
+
+def max_resize_btrfs(path):
+ run_cmd(['btrfs', 'filesystem', 'resize', 'max', path])
+
+
+def ensure_btrfs_for_all_space(block_device):
+ with tempfile.TemporaryDirectory(dir=SKALE_STATE_DIR) as mountpoint:
+ try:
+ mount_device(block_device, mountpoint)
+ logger.info('Resizing btrfs filesystem for %s', block_device)
+ max_resize_btrfs(mountpoint)
+ finally:
+ ensure_not_mounted(block_device)
diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py
index 1a3f11fb..784ef20d 100644
--- a/node_cli/utils/docker_utils.py
+++ b/node_cli/utils/docker_utils.py
@@ -29,9 +29,11 @@
from node_cli.utils.helper import run_cmd, str_to_bool
from node_cli.configs import (
COMPOSE_PATH,
+ SYNC_COMPOSE_PATH,
REMOVED_CONTAINERS_FOLDER_PATH,
SGX_CERTIFICATES_DIR_NAME,
- SKALE_DIR
+ SKALE_DIR,
+ NGINX_CONTAINER_NAME
)
@@ -73,7 +75,6 @@ def get_sanitized_container_name(container_info: dict) -> str:
def get_containers(container_name_filter=None, _all=True) -> list:
return docker_client().containers.list(all=_all)
- return docker_client().containers.list(all=_all, filters={'name': container_name_filter})
def get_all_schain_containers(_all=True) -> list:
@@ -182,12 +183,13 @@ def is_volume_exists(name: str, dutils=None):
return True
-def compose_rm(env={}):
+def compose_rm(env={}, sync_node: bool = False):
logger.info('Removing compose containers')
+ compose_path = get_compose_path(sync_node)
run_cmd(
cmd=(
'docker-compose',
- '-f', COMPOSE_PATH,
+ '-f', compose_path,
'down',
'-t', str(COMPOSE_SHUTDOWN_TIMEOUT),
),
@@ -196,20 +198,22 @@ def compose_rm(env={}):
logger.info('Compose containers removed')
-def compose_pull():
+def compose_pull(sync_node: bool = False):
logger.info('Pulling compose containers')
+ compose_path = get_compose_path(sync_node)
run_cmd(
- cmd=('docker-compose', '-f', COMPOSE_PATH, 'pull'),
+ cmd=('docker-compose', '-f', compose_path, 'pull'),
env={
'SKALE_DIR': SKALE_DIR
}
)
-def compose_build():
+def compose_build(sync_node: bool = False):
logger.info('Building compose containers')
+ compose_path = get_compose_path(sync_node)
run_cmd(
- cmd=('docker-compose', '-f', COMPOSE_PATH, 'build'),
+ cmd=('docker-compose', '-f', compose_path, 'build'),
env={
'SKALE_DIR': SKALE_DIR
}
@@ -220,7 +224,20 @@ def get_up_compose_cmd(services):
return ('docker-compose', '-f', COMPOSE_PATH, 'up', '-d', *services)
-def compose_up(env):
+def get_up_compose_sync_cmd():
+ return ('docker-compose', '-f', SYNC_COMPOSE_PATH, 'up', '-d')
+
+
+def get_compose_path(sync_node: bool) -> str:
+ return SYNC_COMPOSE_PATH if sync_node else COMPOSE_PATH
+
+
+def compose_up(env, sync_node=False):
+ if sync_node:
+ logger.info('Running containers for sync node')
+ run_cmd(cmd=get_up_compose_sync_cmd(), env=env)
+ return
+
logger.info('Running base set of containers')
if 'SGX_CERTIFICATES_DIR_NAME' not in env:
@@ -235,6 +252,12 @@ def compose_up(env):
run_cmd(cmd=get_up_compose_cmd(NOTIFICATION_COMPOSE_SERVICES), env=env)
+def restart_nginx_container(dutils=None):
+ dutils = dutils or docker_client()
+ nginx_container = dutils.containers.get(NGINX_CONTAINER_NAME)
+ nginx_container.restart()
+
+
def remove_images(images, dclient=None):
dc = dclient or docker_client()
for image in images:
diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py
index bcfa61b9..4d76164b 100644
--- a/node_cli/utils/helper.py
+++ b/node_cli/utils/helper.py
@@ -73,6 +73,10 @@
}
+class InvalidEnvFileError(Exception):
+ pass
+
+
def read_json(path):
with open(path, encoding='utf-8') as data_file:
return json.loads(data_file.read())
@@ -148,9 +152,8 @@ def get_username():
return os.environ.get('USERNAME') or os.environ.get('USER')
-def extract_env_params(env_filepath):
- env_params = get_env_config(env_filepath)
-
+def extract_env_params(env_filepath, sync_node=False, raise_for_status=True):
+ env_params = get_env_config(env_filepath, sync_node=sync_node)
absent_params = ', '.join(absent_env_params(env_params))
if absent_params:
click.echo(f"Your env file({env_filepath}) have some absent params: "
@@ -158,6 +161,8 @@ def extract_env_params(env_filepath):
f"You should specify them to make sure that "
f"all services are working",
err=True)
+ if raise_for_status:
+ raise InvalidEnvFileError(f'Missing params: {absent_params}')
return None
return env_params
@@ -359,6 +364,14 @@ def rsync_dirs(src: str, dest: str) -> None:
run_cmd(['rsync', '-r', f'{src}/.git', dest])
+def ok_result(payload: dict = None):
+ return 'ok', payload
+
+
+def err_result(msg: str = None):
+ return 'error', msg
+
+
class UrlType(click.ParamType):
name = 'url'
diff --git a/scripts/build.sh b/scripts/build.sh
index cdde93c0..3f334169 100755
--- a/scripts/build.sh
+++ b/scripts/build.sh
@@ -4,8 +4,9 @@ set -e
VERSION=$1
BRANCH=$2
+TYPE=$3
-USAGE_MSG='Usage: build.sh [VERSION] [BRANCH]'
+USAGE_MSG='Usage: build.sh [VERSION] [BRANCH] [TYPE]'
if [ -z "$1" ]
then
@@ -21,6 +22,13 @@ then
exit 1
fi
+if [ -z "$3" ]
+then
+ (>&2 echo 'You should provide type: normal or sync')
+ echo $USAGE_MSG
+ exit 1
+fi
+
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
PARENT_DIR="$(dirname "$DIR")"
@@ -37,8 +45,13 @@ echo "COMMIT = '$LATEST_COMMIT'" >> $DIST_INFO_FILEPATH
echo "BRANCH = '$BRANCH'" >> $DIST_INFO_FILEPATH
echo "OS = '$OS'" >> $DIST_INFO_FILEPATH
echo "VERSION = '$VERSION'" >> $DIST_INFO_FILEPATH
+echo "TYPE = '$TYPE'" >> $DIST_INFO_FILEPATH
-EXECUTABLE_NAME=skale-$VERSION-$OS
+if [ "$TYPE" = "sync" ]; then
+ EXECUTABLE_NAME=skale-$VERSION-$OS-sync
+else
+ EXECUTABLE_NAME=skale-$VERSION-$OS
+fi
pyinstaller main.spec
diff --git a/setup.py b/setup.py
index ff60099a..e6c260e3 100644
--- a/setup.py
+++ b/setup.py
@@ -66,9 +66,12 @@ def find_version(*file_paths):
"python-debian==0.1.49",
"python-iptables==1.0.1",
"PyYAML==6.0",
+ "pyOpenSSL==22.0.0",
"MarkupSafe==2.1.1",
'Flask==2.3.3',
'itsdangerous==2.1.2',
+ "cryptography==37.0.2",
+ "filelock==3.0.12",
'sh==1.14.2',
'python-crontab==2.6.0'
],
diff --git a/tests/.skale/config/nginx.conf.j2 b/tests/.skale/config/nginx.conf.j2
new file mode 100644
index 00000000..dc264362
--- /dev/null
+++ b/tests/.skale/config/nginx.conf.j2
@@ -0,0 +1,47 @@
+limit_req_zone $binary_remote_addr zone=one:10m rate=7r/s;
+
+server {
+ listen 3009;
+
+ {% if ssl %}
+ listen 311 ssl;
+ ssl_certificate /ssl/ssl_cert;
+ ssl_certificate_key /ssl/ssl_key;
+ {% endif %}
+
+ proxy_read_timeout 500s;
+ proxy_connect_timeout 500s;
+ proxy_send_timeout 500s;
+
+ error_log /var/log/nginx/error.log warn;
+ client_max_body_size 20m;
+
+ server_name localhost;
+ limit_req zone=one burst=10;
+
+ location / {
+ include uwsgi_params;
+ uwsgi_read_timeout 500s;
+ uwsgi_socket_keepalive on;
+ uwsgi_pass 127.0.0.1:3010;
+ }
+}
+
+server {
+ listen 80;
+
+ {% if ssl %}
+ listen 443 ssl;
+ ssl_certificate /ssl/ssl_cert;
+ ssl_certificate_key /ssl/ssl_key;
+ {% endif %}
+
+ error_log /var/log/nginx/error.log warn;
+ client_max_body_size 20m;
+ server_name localhost;
+ limit_req zone=one burst=50;
+
+ location / {
+ root /filestorage;
+ }
+}
\ No newline at end of file
diff --git a/tests/.skale/config/schain_allocation.yml b/tests/.skale/config/schain_allocation.yml
index 0aebc880..14947b77 100644
--- a/tests/.skale/config/schain_allocation.yml
+++ b/tests/.skale/config/schain_allocation.yml
@@ -11,25 +11,19 @@ devnet:
leveldb_limits:
large:
contract_storage: 12787195576
- db_storage: 8524797050
+ db_storage: 4262398525
medium:
contract_storage: 1598399446
- db_storage: 1065599631
+ db_storage: 532799815
small:
contract_storage: 99899965
- db_storage: 66599976
+ db_storage: 33299988
test:
contract_storage: 1598399446
- db_storage: 1065599631
+ db_storage: 532799815
test4:
contract_storage: 1598399446
- db_storage: 1065599631
- rotate_after_block:
- large: 1310721
- medium: 163840
- small: 10240
- test: 163840
- test4: 163840
+ db_storage: 532799815
shared_space: 8959950848
volume_limits:
large:
@@ -67,25 +61,19 @@ mainnet:
leveldb_limits:
large:
contract_storage: 303695989309
- db_storage: 202463992872
+ db_storage: 101231996436
medium:
contract_storage: 37961998663
- db_storage: 25307999108
+ db_storage: 12653999554
small:
contract_storage: 2372624916
- db_storage: 1581749944
+ db_storage: 790874972
test:
contract_storage: 37961998663
- db_storage: 25307999108
+ db_storage: 12653999554
test4:
contract_storage: 37961998663
- db_storage: 25307999108
- rotate_after_block:
- large: 31129628
- medium: 3891203
- small: 243200
- test: 3891203
- test4: 3891203
+ db_storage: 12653999554
shared_space: 212799979520
volume_limits:
large:
@@ -123,25 +111,19 @@ qanet:
leveldb_limits:
large:
contract_storage: 31967988940
- db_storage: 21311992627
+ db_storage: 10655996313
medium:
contract_storage: 3995998617
- db_storage: 2663999078
+ db_storage: 1331999539
small:
contract_storage: 249749913
- db_storage: 166499942
+ db_storage: 83249971
test:
contract_storage: 3995998617
- db_storage: 2663999078
+ db_storage: 1331999539
test4:
contract_storage: 3995998617
- db_storage: 2663999078
- rotate_after_block:
- large: 3276803
- medium: 409600
- small: 25600
- test: 409600
- test4: 409600
+ db_storage: 1331999539
shared_space: 22399942656
volume_limits:
large:
@@ -179,25 +161,19 @@ testnet:
leveldb_limits:
large:
contract_storage: 31967988940
- db_storage: 21311992627
+ db_storage: 10655996313
medium:
contract_storage: 3995998617
- db_storage: 2663999078
+ db_storage: 1331999539
small:
contract_storage: 249749913
- db_storage: 166499942
+ db_storage: 83249971
test:
contract_storage: 3995998617
- db_storage: 2663999078
+ db_storage: 1331999539
test4:
contract_storage: 3995998617
- db_storage: 2663999078
- rotate_after_block:
- large: 3276803
- medium: 409600
- small: 25600
- test: 409600
- test4: 409600
+ db_storage: 1331999539
shared_space: 22399942656
volume_limits:
large:
diff --git a/tests/cli/sync_node_test.py b/tests/cli/sync_node_test.py
new file mode 100644
index 00000000..3966d3c8
--- /dev/null
+++ b/tests/cli/sync_node_test.py
@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of node-cli
+#
+# Copyright (C) 2019 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import pathlib
+
+import mock
+import logging
+
+from node_cli.configs import SKALE_DIR, NODE_DATA_PATH
+from node_cli.cli.sync_node import _init_sync, _update_sync
+from node_cli.utils.helper import init_default_logger
+from node_cli.core.node_options import NodeOptions
+
+from tests.helper import (
+ run_command, subprocess_run_mock
+)
+from tests.resources_test import BIG_DISK_SIZE
+
+logger = logging.getLogger(__name__)
+init_default_logger()
+
+
+def test_init_sync(mocked_g_config):
+ pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True)
+ with mock.patch('subprocess.run', new=subprocess_run_mock), \
+ mock.patch('node_cli.core.node.init_sync_op'), \
+ mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), \
+ mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), \
+ mock.patch('node_cli.core.node.configure_firewall_rules'), \
+ mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False):
+ result = run_command(
+ _init_sync,
+ ['./tests/test-env']
+ )
+ assert result.exit_code == 0
+
+
+def test_init_sync_archive_catchup(mocked_g_config, clean_node_options):
+ pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True)
+# with mock.patch('subprocess.run', new=subprocess_run_mock), \
+ with mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), \
+ mock.patch('node_cli.operations.base.cleanup_volume_artifacts'), \
+ mock.patch('node_cli.operations.base.download_skale_node'), \
+ mock.patch('node_cli.operations.base.sync_skale_node'), \
+ mock.patch('node_cli.operations.base.configure_docker'), \
+ mock.patch('node_cli.operations.base.prepare_host'), \
+ mock.patch('node_cli.operations.base.ensure_filestorage_mapping'), \
+ mock.patch('node_cli.operations.base.link_env_file'), \
+ mock.patch('node_cli.operations.base.download_contracts'), \
+ mock.patch('node_cli.operations.base.generate_nginx_config'), \
+ mock.patch('node_cli.operations.base.prepare_block_device'), \
+ mock.patch('node_cli.operations.base.update_meta'), \
+ mock.patch('node_cli.operations.base.update_resource_allocation'), \
+ mock.patch('node_cli.operations.base.update_images'), \
+ mock.patch('node_cli.operations.base.compose_up'), \
+ mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), \
+ mock.patch('node_cli.core.node.configure_firewall_rules'), \
+ mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False):
+ result = run_command(
+ _init_sync,
+ ['./tests/test-env', '--archive', '--catchup', '--historic-state']
+ )
+ node_options = NodeOptions()
+
+ assert node_options.archive
+ assert node_options.catchup
+ assert node_options.historic_state
+
+ assert result.exit_code == 0
+
+
+def test_init_sync_historic_state_fail(mocked_g_config, clean_node_options):
+ pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True)
+ with mock.patch('subprocess.run', new=subprocess_run_mock), \
+ mock.patch('node_cli.core.node.init_sync_op'), \
+ mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), \
+ mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), \
+ mock.patch('node_cli.core.node.configure_firewall_rules'), \
+ mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False):
+ result = run_command(
+ _init_sync,
+ ['./tests/test-env', '--historic-state']
+ )
+ assert result.exit_code == 1
+ assert '--historic-state can be used only' in result.output
+
+
+def test_update_sync(mocked_g_config):
+ pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True)
+ with mock.patch('subprocess.run', new=subprocess_run_mock), \
+ mock.patch('node_cli.core.node.update_sync_op'), \
+ mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), \
+ mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), \
+ mock.patch('node_cli.core.node.configure_firewall_rules'), \
+ mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True):
+ result = run_command(
+ _update_sync,
+ ['./tests/test-env', '--yes']
+ )
+ assert result.exit_code == 0
diff --git a/tests/conftest.py b/tests/conftest.py
index 13b8ca65..9504523f 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -32,11 +32,15 @@
CONTAINER_CONFIG_TMP_PATH,
GLOBAL_SKALE_CONF_FILEPATH,
GLOBAL_SKALE_DIR,
+ META_FILEPATH,
+ NGINX_CONTAINER_NAME,
REMOVED_CONTAINERS_FOLDER_PATH,
STATIC_PARAMS_FILEPATH
)
-from node_cli.configs import META_FILEPATH
+from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH
+from node_cli.configs.ssl import SSL_FOLDER_PATH
from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH
+from node_cli.utils.docker_utils import docker_client
from node_cli.utils.global_config import generate_g_config_file
from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3
@@ -193,6 +197,15 @@ def mocked_g_config():
generate_g_config_file(GLOBAL_SKALE_DIR, GLOBAL_SKALE_CONF_FILEPATH)
+@pytest.fixture()
+def clean_node_options():
+ pathlib.Path(NODE_OPTIONS_FILEPATH).unlink(missing_ok=True)
+ try:
+ yield
+ finally:
+ pathlib.Path(NODE_OPTIONS_FILEPATH).unlink(missing_ok=True)
+
+
@pytest.fixture
def resource_alloc():
with open(RESOURCE_ALLOCATION_FILEPATH, 'w') as alloc_file:
@@ -201,6 +214,48 @@ def resource_alloc():
os.remove(RESOURCE_ALLOCATION_FILEPATH)
+@pytest.fixture
+def ssl_folder():
+ if os.path.isdir(SSL_FOLDER_PATH):
+ shutil.rmtree(SSL_FOLDER_PATH)
+ path = pathlib.Path(SSL_FOLDER_PATH)
+ path.mkdir(parents=True, exist_ok=True)
+ try:
+ yield
+ finally:
+ shutil.rmtree(SSL_FOLDER_PATH)
+
+
+@pytest.fixture
+def dutils():
+ return docker_client()
+
+
+@pytest.fixture
+def nginx_container(dutils, ssl_folder):
+ c = None
+ try:
+ c = dutils.containers.run(
+ 'nginx:1.20.2',
+ name=NGINX_CONTAINER_NAME,
+ detach=True,
+ volumes={
+ ssl_folder: {
+ 'bind': '/ssl',
+ 'mode': 'ro',
+ 'propagation': 'slave'
+ }
+ }
+ )
+ yield c
+ finally:
+ if c is not None:
+ try:
+ c.remove(force=True)
+ except Exception:
+ pass
+
+
@pytest.fixture
def meta_file_v1():
with open(META_FILEPATH, 'w') as f:
diff --git a/tests/core_ssl_test.py b/tests/core_ssl_test.py
index 318109b2..a7b3eaff 100644
--- a/tests/core_ssl_test.py
+++ b/tests/core_ssl_test.py
@@ -1,11 +1,14 @@
import os
import pathlib
+from docker import APIClient
-import mock
import pytest
-from node_cli.core.ssl import check_cert_openssl, SSLHealthcheckError, upload_cert
+from node_cli.core.ssl import upload_cert
+from node_cli.core.ssl.check import check_cert_openssl, SSLHealthcheckError
from node_cli.utils.helper import run_cmd
+from node_cli.configs.ssl import SSL_CERT_FILEPATH, SSL_KEY_FILEPATH
+from node_cli.configs import NGINX_CONTAINER_NAME
HOST = '127.0.0.1'
@@ -72,23 +75,21 @@ def test_verify_cert_bad_key(bad_key):
check_cert_openssl(cert, key, host=HOST, no_client=True)
-@mock.patch('node_cli.core.ssl.post_request')
-def test_upload_cert(pr_mock, cert_key_pair):
+def test_upload_cert(cert_key_pair, nginx_container, dutils):
cert, key = cert_key_pair
+
+ docker_api = APIClient()
+ nginx_container = dutils.containers.get(NGINX_CONTAINER_NAME)
+ stats = docker_api.inspect_container(nginx_container.id)
+ started_at = stats['State']['StartedAt']
+
+ assert not os.path.isfile(SSL_KEY_FILEPATH)
+ assert not os.path.isfile(SSL_CERT_FILEPATH)
+
upload_cert(cert, key, force=False, no_client=True)
- # args = pr_mock.call_args.args
- # assert args[0] == 'ssl_upload'
- kwargs = pr_mock.call_args.kwargs
- assert kwargs['files']['ssl_cert'][1].name == cert
- assert kwargs['files']['ssl_key'][1].name == key
- assert kwargs['files']['json'][1] == '{"force": false}'
-
- upload_cert(cert, key, force=True, no_client=True)
- # args = pr_mock.call_args.args
- # assert args[0] == 'ssl_upload'
- kwargs = pr_mock.call_args.kwargs
- assert kwargs['files']['ssl_cert'][1].name == cert
- assert kwargs['files']['ssl_key'][1].name == key
- assert kwargs['files']['json'][1] == '{"force": true}'
-
- assert pr_mock.call_count == 2
+
+ assert os.path.isfile(SSL_KEY_FILEPATH)
+ assert os.path.isfile(SSL_CERT_FILEPATH)
+
+ stats = docker_api.inspect_container(nginx_container.id)
+ assert started_at != stats['State']['StartedAt']
diff --git a/tests/helper.py b/tests/helper.py
index b065b397..832ac577 100644
--- a/tests/helper.py
+++ b/tests/helper.py
@@ -18,9 +18,12 @@
# along with this program. If not, see .
import mock
+import os
+
from click.testing import CliRunner
from mock import Mock, MagicMock
+BLOCK_DEVICE = os.getenv('BLOCK_DEVICE')
TEST_META_V1 = {
'version': '0.1.1',
@@ -43,8 +46,12 @@
}
-def response_mock(status_code=0, json_data=None,
- headers=None, raw=None):
+def response_mock(
+ status_code=0,
+ json_data=None,
+ headers=None,
+ raw=None
+):
result = MagicMock()
result.status_code = status_code
@@ -71,8 +78,7 @@ def run_command_mock(mock_call_path, response_mock,
return run_command(command, params, input=input)
-def subprocess_run_mock(cmd=None, shell=None, stdout=None,
- stderr=None, env=None, returncode=0):
+def subprocess_run_mock(*args, returncode=0, **kwargs):
result = MagicMock()
result.returncode = returncode
result.stdout = MagicMock()
diff --git a/tests/operations/common_test.py b/tests/operations/common_test.py
index 8a019ffd..cee64ebc 100644
--- a/tests/operations/common_test.py
+++ b/tests/operations/common_test.py
@@ -16,10 +16,6 @@ def test_docker_lvmpy_update():
assert False
-def test_download_filestorage_artifacts():
- assert False
-
-
def test_update_skale_node_repo():
assert False
diff --git a/tests/resources_test.py b/tests/resources_test.py
index b5613dec..19755e03 100644
--- a/tests/resources_test.py
+++ b/tests/resources_test.py
@@ -45,11 +45,8 @@ def resource_alloc_config():
def test_generate_resource_allocation_config():
- disk_device = '/dev/test'
with mock.patch('node_cli.core.resources.get_disk_size', return_value=NORMAL_DISK_SIZE):
- resource_allocation_config = compose_resource_allocation_config(
- disk_device, DEFAULT_ENV_TYPE
- )
+ resource_allocation_config = compose_resource_allocation_config(DEFAULT_ENV_TYPE)
assert resource_allocation_config['schain']['cpu_shares']['test4'] == 102
assert resource_allocation_config['schain']['cpu_shares']['test'] == 102
@@ -69,16 +66,16 @@ def test_generate_resource_allocation_config():
assert resource_allocation_config['schain']['disk']['medium'] == 8879996928
assert resource_allocation_config['schain']['disk']['large'] == 71039975424
- assert resource_allocation_config['ima']['cpu_shares'] == {'large': 204, 'medium': 25, 'small': 1, 'test': 25, 'test4': 25} # noqa
+ assert resource_allocation_config['ima']['cpu_shares'] == {
+ 'large': 204, 'medium': 25, 'small': 1, 'test': 25, 'test4': 25}
assert isinstance(resource_allocation_config['ima']['mem'], dict)
assert resource_allocation_config['schain']['volume_limits'] == SCHAIN_VOLUME_PARTS
def test_update_allocation_config(resource_alloc_config):
- block_device = '/dev/test'
with mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE):
- update_resource_allocation(block_device, DEFAULT_ENV_TYPE)
+ update_resource_allocation(DEFAULT_ENV_TYPE)
with open(RESOURCE_ALLOCATION_FILEPATH) as jfile:
assert json.load(jfile) != INITIAL_CONFIG
@@ -160,17 +157,12 @@ def test_get_memory_alloc(params_by_env_type):
def test_leveldb_limits():
- disk_device = '/dev/test'
with mock.patch('node_cli.core.resources.get_disk_size', return_value=NORMAL_DISK_SIZE):
- resource_allocation_config = compose_resource_allocation_config(
- disk_device,
- DEFAULT_ENV_TYPE
- )
-
+ resource_allocation_config = compose_resource_allocation_config(DEFAULT_ENV_TYPE)
assert resource_allocation_config['schain']['leveldb_limits'] == {
- 'large': {'contract_storage': 12787195576, 'db_storage': 8524797050},
- 'medium': {'contract_storage': 1598399446, 'db_storage': 1065599631},
- 'small': {'contract_storage': 99899965, 'db_storage': 66599976},
- 'test': {'contract_storage': 1598399446, 'db_storage': 1065599631},
- 'test4': {'contract_storage': 1598399446, 'db_storage': 1065599631}
+ 'large': {'contract_storage': 12787195576, 'db_storage': 4262398525},
+ 'medium': {'contract_storage': 1598399446, 'db_storage': 532799815},
+ 'small': {'contract_storage': 99899965, 'db_storage': 33299988},
+ 'test': {'contract_storage': 1598399446, 'db_storage': 532799815},
+ 'test4': {'contract_storage': 1598399446, 'db_storage': 532799815}
}
diff --git a/tests/test-env b/tests/test-env
index 0c293ba3..eb598381 100644
--- a/tests/test-env
+++ b/tests/test-env
@@ -11,4 +11,6 @@ MANAGER_CONTRACTS_ABI_URL=http://127.0.0.1
SGX_SERVER_URL=http://127.0.0.1
DISK_MOUNTPOINT=/dev/sss
DOCKER_LVMPY_STREAM='master'
-ENV_TYPE='devnet'
\ No newline at end of file
+ENV_TYPE='devnet'
+SCHAIN_NAME='test'
+ENFORCE_BTRFS=False
\ No newline at end of file
diff --git a/text.yml b/text.yml
index 2c074a6f..08bf65e8 100644
--- a/text.yml
+++ b/text.yml
@@ -29,8 +29,8 @@ node:
You should run < skale node init >
already_inited: Node was already inited before.
cmd_failed: |-
- Command failed. Please, check out < skale logs cli >
- and logs from docker containers for more information
+ Command failed. Please, check out < skale logs cli >
+ and logs from docker containers for more information
domain_name_changed: Domain name successfully changed
wallet:
successful_transfer: "Funds were successfully transferred"
@@ -60,6 +60,13 @@ exit:
wait_for_rotations: "Node is waiting to finish rotations"
completed: "Node exiting is completed"
+sync_node:
+ init:
+ help: Initialize sync SKALE node
+ archive: Run sync node in an archive node (disable block rotation)
+ historic_state: Enable historic state (works only in pair with --archive flag)
+ catchup: Add a flag to start sync node in catchup mode
+
lvmpy:
help: Lvmpy commands
run: