Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a Dockerfile to configure the build environment and optimize the workflow. #210

Open
wants to merge 6 commits into
base: workflow
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions Docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
FROM registry.openanolis.cn/openanolis/anolisos:8.9
ENV PATH="/opt/miniconda/bin:$PATH"
ENV TZ=UTC-8
ENV CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1

ADD Docker/ob_build /usr/bin/
ADD Docker/python-env-activate.sh /usr/bin/py-env-activate

RUN yum install -y wget \
&& wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo \
&& yum -y install libffi-devel bzip2-devel readline readline-devel jq which bison ncurses-devel libaio autoconf automake libtool perl-CPAN gettext-devel perl-devel openssl-devel zlib-devel curl-devel xz-devel \
expat-devel asciidoc xmlto rpm-build cmake make gcc gcc-c++ python2-devel python3-devel sudo git mysql-devel \
&& yum clean all

RUN wget https://repo.anaconda.com/miniconda/Miniconda2-py27_4.8.3-Linux-x86_64.sh --no-check-certificate\
&& bash Miniconda2-py27_4.8.3-Linux-x86_64.sh -p /opt/miniconda -b \
&& rm -rf Miniconda2-py27_4.8.3-Linux-x86_64.sh \
&& conda clean --all -y

COPY requirements3.txt /workspaces/obd/requirements3.txt
COPY service/service-requirements.txt workspaces/obd/service-requirements.txt

RUN conda create -n py27 -y python=2.7 \
&& source /opt/miniconda/bin/activate base \
&& pip config set global.index-url https://mirrors.aliyun.com/pypi/simple \
&& pip config set global.trusted-host mirrors.aliyun.com \
&& pip install pyinstaller==3.6 \
&& rm -rf ~/.cache/pip \
&& conda clean -a -y

RUN conda create -n py38 -y python=3.8 \
&& source /opt/miniconda/bin/activate py38 \
&& pip config set global.index-url https://mirrors.aliyun.com/pypi/simple \
&& pip config set global.trusted-host mirrors.aliyun.com \
&& pip install -r /workspaces/obd/requirements3.txt \
&& pip install -r /workspaces/obd/service-requirements.txt \
&& pip install pyinstaller==3.6 \
&& pip cache purge \
&& echo "source /opt/miniconda/bin/activate py38" >> ~/.bashrc \
&& /opt/miniconda/bin/conda clean -a -y \
&& rm -rf /workspaces/obd

# install nodejs and yarn
RUN wget https://rpm.nodesource.com/setup_16.x \
&& touch /etc/redhat-release \
&& bash setup_16.x \
&& rm -f /etc/redhat-release \
&& yum install -y nodejs \
&& yum clean all

RUN npm install -g yarn \
&& yarn config delete registry --global \
&& yarn config set registry https://registry.npmmirror.com/ --global \
&& npm cache clean --force
76 changes: 76 additions & 0 deletions Docker/ob_build
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#!/bin/bash
set -x
set -e

if [ $# -lt 4 ]; then
echo "no enough parameters. Please provide project_name, version and release."
exit 1
fi

# Get system release
os_release=$(grep -Po '(?<=^ID=")[^"]*' /etc/os-release || true)

if [ -z "$os_release" ]; then
os_release=$(grep -Po '^ID=\K[^ ]+' /etc/os-release)
fi

set +e
source /etc/profile
set -e

project_dir=$1
project_name=$2
version=$3
release=$4
rpm_work_dir=${5:-rpm}
ob_build_script=${project_name}-build.sh
ob_build_spec=${project_name}.spec
ob_build_deps=${project_name}.deps
cur_dir=`pwd`
echo "cur dir: $cur_dir"
# check rpm work dir
if [ ! -d "${cur_dir}/${rpm_work_dir}" ]; then
echo "rpm work dir (${rpm_work_dir}) missing! Please create ${rpm_work_dir} in source code dir and place the packaging related files in the ${rpm_work_dir} dir."
exit 1
fi

# trigger building
echo "trigger building, current dir: "
pwd
cd $rpm_work_dir
ABS_PATH=`pwd`

if [[ x"$os_release" == x"alios" && x"$AONE_COMPATIBLE_AUTO_DEP_CREATE" == x"on" ]]; then
if [ -e "$ob_build_deps" ]; then
echo "execute dep_create for alios"
dep_create $ob_build_deps
echo "execute sw for alios"
sw
fi
fi

if [ -e "$ob_build_script" ]; then
bash $ob_build_script $project_dir $project_name $version $release
elif [ -e "$ob_build_spec" ]; then
if [[ x"$os_release" == x"alios" ]]; then
rpm_create $ob_build_spec -v $version -r $release
else
TOP_DIR=".rpm_create"
RPM_MACROS=$HOME/.rpmmacros
if [ -e $RPM_MACROS ]; then
mv -f $RPM_MACROS $RPM_MACROS.bak
fi

# prepare rpm build dirs
mkdir -p $TOP_DIR/BUILD
mkdir -p $TOP_DIR/RPMS
mkdir -p $TOP_DIR/SRPMS

echo "%_topdir $ABS_PATH/$TOP_DIR" > $RPM_MACROS
rpmbuild -bb $ob_build_spec
find $TOP_DIR/ -name "*.rpm" -exec mv {} . 2>/dev/null \;
fi
else
echo "packaging files missing! Please provide $ob_build_script or $ob_build_spec"
exit 1
fi
6 changes: 6 additions & 0 deletions Docker/python-env-activate.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
eval "$(conda shell.bash hook)"
if [[ -z $1 ]]; then
echo "Please input proper python env"
exit 1
fi
conda activate $1
76 changes: 72 additions & 4 deletions _workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,14 @@

import os
import sys
from copy import deepcopy

from _manager import Manager
from _plugin import ComponentPluginLoader, pyScriptPluginExec, PyScriptPluginLoader, PyScriptPlugin
from tool import OrderedDict


class WorkflowsIter:
class WorkflowsIter(object):

def __init__(self, workflows):
self.workflows = workflows
Expand Down Expand Up @@ -65,18 +66,49 @@ def __getitem__(self, component_name):
self.workflows[component_name] = ComponentWorkflow(self.name, component_name)
return self.workflows[component_name]

def __len__(self):
return len(self.workflows)

def __setitem__(self, component_name, component_workflow):
if not isinstance(component_workflow, ComponentWorkflow):
raise TypeError("%s must be a instance of ComponentWorkflow" % component_workflow.__class__.__name__)
if component_workflow.name != self.name:
raise ValueError("%s is not a %s workflow" % (component_workflow, self.name))
self.workflows[component_name] = component_workflow

def __call__(self, dpeloy_config):
def __call__(self, sorted_components):
workflows = [
self[component] for component in dpeloy_config.sorted_components
self[component] for component in sorted_components
]
return WorkflowsIter(workflows)


class SubWorkflows(object):

def __init__(self) -> None:
self.workflows = OrderedDict()

def add(self, workflow):
if not isinstance(workflow, ComponentWorkflow):
raise TypeError("%s must be a instance of ComponentWorkflow" % workflow.__class__.__name__)
if workflow.name not in self.workflows:
self.workflows[workflow.name] = Workflows(workflow.name)
self.workflows[workflow.name][workflow.component_name] = workflow

def __getitem__(self, workflow_name):
return self.workflows[workflow_name]

def __iter__(self):
return iter(self.workflows.values())


class SubWorkflowTemplate(object):

def __init__(self, name, component_name, version=None, kwargs=None):
self.name = name
self.component_name = component_name
self.version = version
self.kwargs = kwargs or {}


class PluginTemplate(object):
Expand All @@ -94,6 +126,11 @@ def __init__(self, name, component_name):
self.name = name
self.component_name = component_name
self.stage = {}
self.sub_workflow = {}
self.global_kwargs = {}

def set_global_kwargs(self, **kwargs):
self.global_kwargs = kwargs

def add(self, stage, *plugins):
return self.add_with_kwargs(stage, None, *plugins)
Expand All @@ -113,14 +150,45 @@ def add_with_component_version_kwargs(self, stage, component_name, version, kwar
if stage not in self.stage:
self.stage[stage] = plugins
else:
if stage in self.sub_workflow:
raise Exception("stage %s already has a workflow" % stage)
self.stage[stage] += plugins

def add_workflow(self, stage, workflow):
return self.add_workflow_with_kwargs(stage, None, workflow)

def add_workflow_with_component(self, stage, component_name, workflow):
return self.add_workflow_with_component_version(stage, component_name, None, workflow)

def add_workflow_with_component_version(self, stage, component_name, version, workflow):
return self.add_workflow_with_component_version_kwargs(stage, component_name, version, None, workflow)

def add_workflow_with_kwargs(self, stage, kwargs, workflow):
return self.add_workflow_with_component_version_kwargs(stage, self.component_name, None, kwargs, workflow)

def add_workflow_with_component_version_kwargs(self, stage, component_name, version, kwargs, workflow):
stage = int(stage)
workflow = SubWorkflowTemplate(workflow, component_name, version, kwargs)
if stage not in self.stage:
self.stage[stage] = [workflow]
self.sub_workflow[stage] = workflow
else:
raise Exception("stage %s already has a workflow" % stage)

@property
def stages(self):
return sorted(self.stage.keys())

def __getitem__(self, stage):
return self.stage.get(stage, [])
if self.global_kwargs:
stages = []
for template in self.stage.get(stage, []):
template = deepcopy(template)
template.kwargs.update(self.global_kwargs)
stages.append(template)
return stages
else:
return self.stage.get(stage, [])


class ComponentWorkflowLoader(ComponentPluginLoader):
Expand Down
Loading