diff --git a/Docker/Dockerfile b/Docker/Dockerfile new file mode 100644 index 0000000..1708066 --- /dev/null +++ b/Docker/Dockerfile @@ -0,0 +1,54 @@ +FROM registry.openanolis.cn/openanolis/anolisos:8.9 +ENV PATH="/opt/miniconda/bin:$PATH" +ENV TZ=UTC-8 +ENV CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 + +ADD Docker/ob_build /usr/bin/ +ADD Docker/python-env-activate.sh /usr/bin/py-env-activate + +RUN yum install -y wget \ + && wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo \ + && yum -y install libffi-devel bzip2-devel readline readline-devel jq which bison ncurses-devel libaio autoconf automake libtool perl-CPAN gettext-devel perl-devel openssl-devel zlib-devel curl-devel xz-devel \ + expat-devel asciidoc xmlto rpm-build cmake make gcc gcc-c++ python2-devel python3-devel sudo git mysql-devel \ + && yum clean all + +RUN wget https://repo.anaconda.com/miniconda/Miniconda2-py27_4.8.3-Linux-x86_64.sh --no-check-certificate\ + && bash Miniconda2-py27_4.8.3-Linux-x86_64.sh -p /opt/miniconda -b \ + && rm -rf Miniconda2-py27_4.8.3-Linux-x86_64.sh \ + && conda clean --all -y + +COPY requirements3.txt /workspaces/obd/requirements3.txt +COPY service/service-requirements.txt workspaces/obd/service-requirements.txt + +RUN conda create -n py27 -y python=2.7 \ + && source /opt/miniconda/bin/activate base \ + && pip config set global.index-url https://mirrors.aliyun.com/pypi/simple \ + && pip config set global.trusted-host mirrors.aliyun.com \ + && pip install pyinstaller==3.6 \ + && rm -rf ~/.cache/pip \ + && conda clean -a -y + +RUN conda create -n py38 -y python=3.8 \ + && source /opt/miniconda/bin/activate py38 \ + && pip config set global.index-url https://mirrors.aliyun.com/pypi/simple \ + && pip config set global.trusted-host mirrors.aliyun.com \ + && pip install -r /workspaces/obd/requirements3.txt \ + && pip install -r /workspaces/obd/service-requirements.txt \ + && pip install pyinstaller==3.6 \ + && pip cache purge \ + && echo "source /opt/miniconda/bin/activate py38" >> ~/.bashrc \ + && /opt/miniconda/bin/conda clean -a -y \ + && rm -rf /workspaces/obd + +# install nodejs and yarn +RUN wget https://rpm.nodesource.com/setup_16.x \ + && touch /etc/redhat-release \ + && bash setup_16.x \ + && rm -f /etc/redhat-release \ + && yum install -y nodejs \ + && yum clean all + +RUN npm install -g yarn \ + && yarn config delete registry --global \ + && yarn config set registry https://registry.npmmirror.com/ --global \ + && npm cache clean --force \ No newline at end of file diff --git a/Docker/ob_build b/Docker/ob_build new file mode 100644 index 0000000..bf35405 --- /dev/null +++ b/Docker/ob_build @@ -0,0 +1,76 @@ +#!/bin/bash +set -x +set -e + +if [ $# -lt 4 ]; then + echo "no enough parameters. Please provide project_name, version and release." + exit 1 +fi + +# Get system release +os_release=$(grep -Po '(?<=^ID=")[^"]*' /etc/os-release || true) + +if [ -z "$os_release" ]; then + os_release=$(grep -Po '^ID=\K[^ ]+' /etc/os-release) +fi + +set +e +source /etc/profile +set -e + +project_dir=$1 +project_name=$2 +version=$3 +release=$4 +rpm_work_dir=${5:-rpm} +ob_build_script=${project_name}-build.sh +ob_build_spec=${project_name}.spec +ob_build_deps=${project_name}.deps +cur_dir=`pwd` +echo "cur dir: $cur_dir" +# check rpm work dir +if [ ! -d "${cur_dir}/${rpm_work_dir}" ]; then + echo "rpm work dir (${rpm_work_dir}) missing! Please create ${rpm_work_dir} in source code dir and place the packaging related files in the ${rpm_work_dir} dir." + exit 1 +fi + +# trigger building +echo "trigger building, current dir: " +pwd +cd $rpm_work_dir +ABS_PATH=`pwd` + +if [[ x"$os_release" == x"alios" && x"$AONE_COMPATIBLE_AUTO_DEP_CREATE" == x"on" ]]; then + if [ -e "$ob_build_deps" ]; then + echo "execute dep_create for alios" + dep_create $ob_build_deps + echo "execute sw for alios" + sw + fi +fi + +if [ -e "$ob_build_script" ]; then + bash $ob_build_script $project_dir $project_name $version $release +elif [ -e "$ob_build_spec" ]; then + if [[ x"$os_release" == x"alios" ]]; then + rpm_create $ob_build_spec -v $version -r $release + else + TOP_DIR=".rpm_create" + RPM_MACROS=$HOME/.rpmmacros + if [ -e $RPM_MACROS ]; then + mv -f $RPM_MACROS $RPM_MACROS.bak + fi + + # prepare rpm build dirs + mkdir -p $TOP_DIR/BUILD + mkdir -p $TOP_DIR/RPMS + mkdir -p $TOP_DIR/SRPMS + + echo "%_topdir $ABS_PATH/$TOP_DIR" > $RPM_MACROS + rpmbuild -bb $ob_build_spec + find $TOP_DIR/ -name "*.rpm" -exec mv {} . 2>/dev/null \; + fi +else + echo "packaging files missing! Please provide $ob_build_script or $ob_build_spec" + exit 1 +fi \ No newline at end of file diff --git a/Docker/python-env-activate.sh b/Docker/python-env-activate.sh new file mode 100755 index 0000000..406e1b0 --- /dev/null +++ b/Docker/python-env-activate.sh @@ -0,0 +1,6 @@ +eval "$(conda shell.bash hook)" +if [[ -z $1 ]]; then + echo "Please input proper python env" + exit 1 +fi +conda activate $1 \ No newline at end of file diff --git a/_workflow.py b/_workflow.py index ec1b91a..73f27da 100644 --- a/_workflow.py +++ b/_workflow.py @@ -22,13 +22,14 @@ import os import sys +from copy import deepcopy from _manager import Manager from _plugin import ComponentPluginLoader, pyScriptPluginExec, PyScriptPluginLoader, PyScriptPlugin from tool import OrderedDict -class WorkflowsIter: +class WorkflowsIter(object): def __init__(self, workflows): self.workflows = workflows @@ -65,6 +66,9 @@ def __getitem__(self, component_name): self.workflows[component_name] = ComponentWorkflow(self.name, component_name) return self.workflows[component_name] + def __len__(self): + return len(self.workflows) + def __setitem__(self, component_name, component_workflow): if not isinstance(component_workflow, ComponentWorkflow): raise TypeError("%s must be a instance of ComponentWorkflow" % component_workflow.__class__.__name__) @@ -72,11 +76,39 @@ def __setitem__(self, component_name, component_workflow): raise ValueError("%s is not a %s workflow" % (component_workflow, self.name)) self.workflows[component_name] = component_workflow - def __call__(self, dpeloy_config): + def __call__(self, sorted_components): workflows = [ - self[component] for component in dpeloy_config.sorted_components + self[component] for component in sorted_components ] return WorkflowsIter(workflows) + + +class SubWorkflows(object): + + def __init__(self) -> None: + self.workflows = OrderedDict() + + def add(self, workflow): + if not isinstance(workflow, ComponentWorkflow): + raise TypeError("%s must be a instance of ComponentWorkflow" % workflow.__class__.__name__) + if workflow.name not in self.workflows: + self.workflows[workflow.name] = Workflows(workflow.name) + self.workflows[workflow.name][workflow.component_name] = workflow + + def __getitem__(self, workflow_name): + return self.workflows[workflow_name] + + def __iter__(self): + return iter(self.workflows.values()) + + +class SubWorkflowTemplate(object): + + def __init__(self, name, component_name, version=None, kwargs=None): + self.name = name + self.component_name = component_name + self.version = version + self.kwargs = kwargs or {} class PluginTemplate(object): @@ -94,6 +126,11 @@ def __init__(self, name, component_name): self.name = name self.component_name = component_name self.stage = {} + self.sub_workflow = {} + self.global_kwargs = {} + + def set_global_kwargs(self, **kwargs): + self.global_kwargs = kwargs def add(self, stage, *plugins): return self.add_with_kwargs(stage, None, *plugins) @@ -113,14 +150,45 @@ def add_with_component_version_kwargs(self, stage, component_name, version, kwar if stage not in self.stage: self.stage[stage] = plugins else: + if stage in self.sub_workflow: + raise Exception("stage %s already has a workflow" % stage) self.stage[stage] += plugins + def add_workflow(self, stage, workflow): + return self.add_workflow_with_kwargs(stage, None, workflow) + + def add_workflow_with_component(self, stage, component_name, workflow): + return self.add_workflow_with_component_version(stage, component_name, None, workflow) + + def add_workflow_with_component_version(self, stage, component_name, version, workflow): + return self.add_workflow_with_component_version_kwargs(stage, component_name, version, None, workflow) + + def add_workflow_with_kwargs(self, stage, kwargs, workflow): + return self.add_workflow_with_component_version_kwargs(stage, self.component_name, None, kwargs, workflow) + + def add_workflow_with_component_version_kwargs(self, stage, component_name, version, kwargs, workflow): + stage = int(stage) + workflow = SubWorkflowTemplate(workflow, component_name, version, kwargs) + if stage not in self.stage: + self.stage[stage] = [workflow] + self.sub_workflow[stage] = workflow + else: + raise Exception("stage %s already has a workflow" % stage) + @property def stages(self): return sorted(self.stage.keys()) def __getitem__(self, stage): - return self.stage.get(stage, []) + if self.global_kwargs: + stages = [] + for template in self.stage.get(stage, []): + template = deepcopy(template) + template.kwargs.update(self.global_kwargs) + stages.append(template) + return stages + else: + return self.stage.get(stage, []) class ComponentWorkflowLoader(ComponentPluginLoader): diff --git a/core.py b/core.py index 680019e..b90aaa7 100644 --- a/core.py +++ b/core.py @@ -38,7 +38,7 @@ from _mirror import MirrorRepositoryManager, PackageInfo, RemotePackageInfo from _plugin import PluginManager, PluginType, InstallPlugin, PluginContextNamespace from _deploy import DeployManager, DeployStatus, DeployConfig, DeployConfigStatus, Deploy, ClusterStatus -from _workflow import WorkflowManager, Workflows +from _workflow import WorkflowManager, Workflows, SubWorkflowTemplate, SubWorkflows from _tool import Tool, ToolManager from _repository import RepositoryManager, LocalPackage, Repository, RepositoryVO import _errno as err @@ -183,10 +183,12 @@ def get_workflows(self, workflow_name, repositories=None, no_found_act='exit'): repositories = self.repositories workflows = Workflows(workflow_name) for repository in repositories: - workflows[repository.name] = self.get_workflow(repository, workflow_name, repository.name, repository.version, no_found_act=no_found_act) + template = self.get_workflow(repository, workflow_name, repository.name, repository.version, no_found_act=no_found_act) + if template: + workflows[repository.name] = template return workflows - def get_workflow(self, repository, workflow_name, component_name, version=0.1, no_found_act='exit'): + def get_workflow(self, repository, workflow_name, component_name, version=0.1, no_found_act='exit', **component_kwargs): if no_found_act == 'exit': no_found_exit = True else: @@ -195,41 +197,78 @@ def get_workflow(self, repository, workflow_name, component_name, version=0.1, n self._call_stdio('verbose', 'Searching %s template for components ...', workflow_name) template = self.workflow_manager.get_workflow_template(workflow_name, component_name, version) if template: - ret = self.call_workflow_template(template, repository) + ret = self.call_workflow_template(template, repository, **component_kwargs) if ret: self._call_stdio('verbose', 'Found for %s for %s-%s' % (template, template.component_name, template.version)) return ret if no_found_exit: - self._call_stdio('critical', 'No such %s template for %s-%s' % (template, template.component_name, template.version)) + self._call_stdio('critical', 'No such %s template for %s-%s' % (workflow_name, component_name, version)) exit(1) else: - self._call_stdio(msg_lv, 'No such %s template for %s-%s' % (template, template.component_name, template.version)) + self._call_stdio(msg_lv, 'No such %s template for %s-%s' % (workflow_name, component_name, version)) - def run_workflow(self, workflows, deploy_config=None, repositories=None, no_found_act='exit'): - if not deploy_config: - deploy_config = self.deploy.deploy_config + def run_workflow(self, workflows, sorted_components=[], repositories=None, no_found_act='exit', error_exit=True, **kwargs): + if not sorted_components and self.deploy: + sorted_components = self.deploy.deploy_config.sorted_components if not repositories: - repositories = self.repositories - repositories = {repository.name: repository for repository in repositories} - for stages in workflows(deploy_config): + repositories = self.repositories if self.repositories else [] + if not sorted_components: + sorted_components = [repository.name for repository in repositories] + + repositories_map = {repository.name: repository for repository in repositories} + for stages in workflows(sorted_components): + if not self.hanlde_sub_workflows(stages, sorted_components, repositories, no_found_act=no_found_act, **kwargs): + return False for component_name in stages: - for plugin_template in stages[component_name]: - if 'repository' in plugin_template.kwargs: - repository = plugin_template.kwargs['repository'] - del plugin_template.kwargs['repository'] - else: - if plugin_template.component_name in repositories: - repository = repositories[plugin_template.component_name] - else: - repository = repositories[component_name] - if not plugin_template.version: - if plugin_template.component_name in repositories: - plugin_template.version = repositories[component_name].version - else: - plugin_template.version = repository.version - plugin = self.search_py_script_plugin_by_template(plugin_template, no_found_act=no_found_act) - if plugin and not self.call_plugin(plugin, repository, **plugin_template.kwargs): - return False + for template in stages[component_name]: + if isinstance(template, SubWorkflowTemplate): + continue + if component_name in kwargs: + template.kwargs.update(kwargs[component_name]) + if not self.run_plugin_template(template, component_name, repositories_map, no_found_act=no_found_act) and error_exit: + return False + return True + + def hanlde_sub_workflows(self, stages, sorted_components, repositories, no_found_act='exit', **kwargs): + sub_workflows = SubWorkflows() + for repository in repositories: + component_name = repository.name + if component_name not in stages: + continue + for template in stages[component_name]: + if not isinstance(template, SubWorkflowTemplate): + continue + if component_name in kwargs: + template.kwargs.update(kwargs[component_name]) + version = template.version if template.version else repository.version + workflow = self.get_workflow(repository, template.name, template.component_name, version, no_found_act=no_found_act, **template.kwargs) + if workflow: + workflow.set_global_kwargs(**template.kwargs) + sub_workflows.add(workflow) + + for workflows in sub_workflows: + if not self.run_workflow(workflows, sorted_components, repositories, no_found_act=no_found_act, **kwargs): + return False + return True + + def run_plugin_template(self, plugin_template, component_name, repositories=None, no_found_act='exit', **kwargs): + if 'repository' in plugin_template.kwargs: + repository = plugin_template.kwargs['repository'] + del plugin_template.kwargs['repository'] + else: + if plugin_template.component_name in repositories: + repository = repositories[plugin_template.component_name] + else: + repository = repositories[component_name] + if not plugin_template.version: + if plugin_template.component_name in repositories: + plugin_template.version = repositories[component_name].version + else: + plugin_template.version = repository.version + plugin = self.search_py_script_plugin_by_template(plugin_template, no_found_act=no_found_act) + plugin_template.kwargs.update(kwargs) + if plugin and not self.call_plugin(plugin, repository, **plugin_template.kwargs): + return False return True def _init_call_args(self, repository, spacename=None, target_servers=None, **kwargs): @@ -245,7 +284,14 @@ def _init_call_args(self, repository, spacename=None, target_servers=None, **kwa 'cmd': self.cmds, 'options': self.options, 'stdio': self.stdio, - 'target_servers': target_servers + 'target_servers': target_servers, + 'mirror_manager': self.mirror_manager, + 'repository_manager': self.repository_manager, + 'plugin_manager': self.plugin_manager, + 'deploy_manager': self.deploy_manager, + 'lock_manager': self.lock_manager, + 'optimize_manager': self.optimize_manager, + 'tool_manager': self.tool_manager } if self.deploy: args['deploy_name'] = self.deploy.name @@ -258,9 +304,9 @@ def _init_call_args(self, repository, spacename=None, target_servers=None, **kwa args.update(kwargs) return args - def call_workflow_template(self, workflow_template, repository): + def call_workflow_template(self, workflow_template, repository, spacename=None, target_servers=None, **kwargs): self._call_stdio('verbose', 'Call workflow %s for %s' % (workflow_template, repository)) - args = self._init_call_args(repository, None, None, clients=None) + args = self._init_call_args(repository, spacename, target_servers, clients=None, **kwargs) return workflow_template(**args) def call_plugin(self, plugin, repository, spacename=None, target_servers=None, **kwargs): diff --git a/rpm/ob-deploy.spec b/rpm/ob-deploy.spec index ec77ab0..9b76bef 100644 --- a/rpm/ob-deploy.spec +++ b/rpm/ob-deploy.spec @@ -70,12 +70,13 @@ mkdir -p ${RPM_BUILD_ROOT}/usr/obd pip install -r plugins-requirements3.txt --target=$BUILD_DIR/SOURCES/site-packages -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com pip install -r service/service-requirements.txt --target=$BUILD_DIR/SOURCES/site-packages -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com # pyinstaller -y --clean -n obd-web -p $BUILD_DIR/SOURCES/site-packages -F service/app.py -pyinstaller --hidden-import=decimal -p $BUILD_DIR/SOURCES/site-packages --hidden-import service/app.py --hidden-import=configparser --hidden-import=Crypto.Hash.SHA --hidden-import=Crypto.PublicKey.RSA --hidden-import=Crypto.Signature.PKCS1_v1_5 --hidden-import=Crypto.Cipher.PKCS1_OAEP -F obd.py +pyinstaller --hidden-import=decimal -p $BUILD_DIR/SOURCES/site-packages --hidden-import service.app --hidden-import=configparser --hidden-import=Crypto.Hash.SHA --hidden-import=Crypto.PublicKey.RSA --hidden-import=Crypto.Signature.PKCS1_v1_5 --hidden-import=Crypto.Cipher.PKCS1_OAEP -F obd.py rm -f obd.py obd.spec \mkdir -p $BUILD_DIR/SOURCES/web \cp -rf $SRC_DIR/dist/obd ${RPM_BUILD_ROOT}/usr/bin/obd \cp -rf $SRC_DIR/web/dist $BUILD_DIR/SOURCES/web \cp -rf $SRC_DIR/plugins $BUILD_DIR/SOURCES/plugins +\cp -rf $SRC_DIR/workflows $BUILD_DIR/SOURCES/workflows \cp -rf $SRC_DIR/optimize $BUILD_DIR/SOURCES/optimize \cp -rf $SRC_DIR/example $BUILD_DIR/SOURCES/example \cp -rf $SRC_DIR/config_parser $BUILD_DIR/SOURCES/config_parser @@ -85,6 +86,7 @@ rm -f obd.py obd.spec \cp -rf $SRC_DIR/mirror/ $BUILD_DIR/SOURCES/ \cp -rf $BUILD_DIR/SOURCES/web ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/plugins ${RPM_BUILD_ROOT}/usr/obd/ +\cp -rf $BUILD_DIR/SOURCES/workflows ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/optimize ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/config_parser ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/mirror ${RPM_BUILD_ROOT}/usr/obd/ @@ -95,8 +97,8 @@ mkdir -p ${RPM_BUILD_ROOT}/usr/obd/lib/ mkdir -p ${RPM_BUILD_ROOT}/usr/obd/lib/executer \cp -rf ${RPM_DIR}/executer27 ${RPM_BUILD_ROOT}/usr/obd/lib/executer/ \cp -rf $BUILD_DIR/SOURCES/example ${RPM_BUILD_ROOT}/usr/obd/ -cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -s oceanbase oceanbase-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/plugins/obproxy-ce/* obproxy-ce/ -cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -sf ocp-server ocp-server-ce +cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/plugins/obproxy-ce/* obproxy-ce/ +cd ${RPM_BUILD_ROOT}/usr/obd/workflows && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/ obproxy-ce/ mv obproxy/3.1.0 obproxy/3.2.1 cd ${RPM_BUILD_ROOT}/usr/obd/config_parser && ln -s oceanbase oceanbase-ce cd ${RPM_BUILD_ROOT}/usr/obd/optimize && ln -s obproxy obproxy-ce