From 7f26383db97f41a7716f3932145bc5094ccb09f2 Mon Sep 17 00:00:00 2001 From: "Eduardo Ramos Testillano (eramedu)" Date: Fri, 23 Dec 2022 20:45:45 +0100 Subject: [PATCH] Discourage static linking Both build.sh and build-native.sh scripts work now with dynamic linking by default, although variable STATIC_LINKING may be used to force static linking (assigning value 'TRUE'). This has been configured to FALSE by default because it is discouraged (glibc should be used dynamically to keep important features like DL and DNS). Also, - build-native.sh script is improved to allow DEBUG troubleshooting option. - Project dockerfile includes everything except UT binary. - README.md has been adapted to explain both build methods (native, container) and static linking. - Add tool binaries to training image. Implements [0]. [0] https://github.com/testillano/h2agent/issues/69 Change-Id: I0bcef0634ad10fd51751f2f59c3c3fd2bfe86fbe --- .gitignore | 1 + Dockerfile | 7 ++- Dockerfile.training | 3 + README.md | 87 +++++++++++++++++++++-------- build-native.sh | 132 +++++++++++++++++++++++++------------------- build.sh | 5 +- 6 files changed, 153 insertions(+), 82 deletions(-) diff --git a/.gitignore b/.gitignore index 6fc2cac..ca0263d 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ CMakeCache.txt **/cmake_install.cmake **/Makefile build +build-native install_manifest.txt CMakeDoxygenDefaults.cmake CMakeDoxyfile.in diff --git a/Dockerfile b/Dockerfile index 7482689..cd67061 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,11 +13,14 @@ ARG make_procs=4 ARG build_type=Release # We could duplicate from local build directory, but prefer to build from scratch: -RUN cmake -DCMAKE_BUILD_TYPE=${build_type} -DSTATIC_LINKING=TRUE . && make -j${make_procs} +RUN cmake -DCMAKE_BUILD_TYPE=${build_type} -DSTATIC_LINKING=${STATIC_LINKING} . && make -j${make_procs} FROM ${scratch_img}:${scratch_img_tag} ARG build_type=Release -COPY --from=builder /code/build/${build_type}/bin/h2agent /opt/h2agent +COPY --from=builder /code/build/${build_type}/bin/h2agent /opt/ +COPY --from=builder /code/build/${build_type}/bin/h2client /opt/ +COPY --from=builder /code/build/${build_type}/bin/matching-helper /opt/ +COPY --from=builder /code/build/${build_type}/bin/arashpartow-helper /opt/ # We add curl & jq for helpers.src # Ubuntu has bash already installed, but vim is missing diff --git a/Dockerfile.training b/Dockerfile.training index 4a02df3..8fcfcf7 100644 --- a/Dockerfile.training +++ b/Dockerfile.training @@ -13,6 +13,9 @@ ARG base_os=ubuntu RUN if [ "${base_os}" = "alpine" ] ; then apk update && apk add dos2unix && rm -rf /var/cache/apk/* ; elif [ "${base_os}" = "ubuntu" ] ; then apt-get update && apt-get install -y dos2unix && apt-get clean ; fi RUN ln -s /opt/h2agent +RUN ln -s /opt/h2client +RUN ln -s /opt/matching-helper +RUN ln -s /opt/arashpartow-helper ENTRYPOINT ["sleep", "infinity"] CMD [] diff --git a/README.md b/README.md index 28bd4b3..eca3a34 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ Check the [releases](https://github.com/testillano/h2agent/releases) to get late ## How can you use it ? -`H2agent` process may be used natively, as a `docker` container, or as part of `kubernetes` deployment. +`H2agent` process (as well as other project binaries) may be used natively, as a `docker` container, or as part of `kubernetes` deployment. The easiest way to build the project is using [containers](https://en.wikipedia.org/wiki/LXC) technology (this project uses `docker`): **to generate all the artifacts**, just type the following: @@ -48,27 +48,18 @@ The easiest way to build the project is using [containers](https://en.wikipedia. $> ./build.sh --auto ``` -The option `--auto` builds the builder image (`--builder-image`) , then the project image (`--project-image`) and finally the project executable (`--project`). Then you will have everything available to run the process with three different modes: +The option `--auto` builds the builder image (`--builder-image`) , then the project image (`--project-image`) and finally project executables (`--project`). Then you will have everything available to run binaries with different modes: -* Run project executable natively (standalone): - - ```bash - $> ./build/Release/bin/h2agent & # default server at 0.0.0.0 with traffic/admin/prometheus ports: 8000/8074/8080 - ``` - - You may play with native helpers functions and examples: +* Run project image with docker: ```bash - $> source tools/helpers.src # type help in any moment after sourcing - $> server_example # follow instructions or just source it: source <(server_example) + $> docker run --rm -it -p 8000:8000 ghcr.io/testillano/h2agent:latest # default entrypoint is h2agent process ``` - You could also provide `-h` or `--help` to get **process help**: more information [here](#Execution-of-main-agent). - -* Run project image with docker: + You may override default entrypoint (`/opt/h2agent`) to run another binary packaged (check project `Dockerfile`), for example: ```bash - $> docker run --network=host --rm -it ghcr.io/testillano/h2agent:latest & # you may play native helpers again, on host + $> docker run --rm -it --network=host --entrypoint "/opt/h2client" ghcr.io/testillano/h2agent:latest --uri http://localhost:8000/unprovisioned # run in another shell to get response from h2agent server launched above ``` * Run within `kubernetes` deployment: corresponding `helm charts` are normally packaged into releases. This is described in ["how it is delivered"](#How-it-is-delivered) section, but in summary, you could do the following: @@ -77,18 +68,56 @@ The option `--auto` builds the builder image (`--builder-image`) , then t $> # helm dependency update helm/h2agent # no dependencies at the moment $> helm install h2agent-example helm/h2agent --wait $> pod=$(kubectl get pod -l app.kubernetes.io/name=h2agent --no-headers -o name) - $> kubectl exec ${pod} -c h2agent -- /opt/h2agent -h + $> kubectl exec ${pod} -c h2agent -- /opt/h2agent -h # run, for example, h2agent help ``` - You may enter the pod and play with helpers functions and examples which are also deployed with the chart under `/opt/utils` and automatically sourced on `bash` shell: + You may enter the pod and play with helpers functions and examples (deployed with the chart under `/opt/utils`) which are anyway, automatically sourced on `bash` shell: ```bash $> kubectl exec -it ${pod} -- bash ``` +It is also possible to build the project natively (not using containers) installing all the dependencies on the local host: + +```bash +$> ./build-native.sh # you may prepend non-empty DEBUG variable value in order to troubleshoot build procedure +``` + +So, you could run `h2agent` (or any other binary available under `./build//bin`) directly: + + +* Run project executable natively (standalone): + + ```bash + $> ./build/Release/bin/h2agent & # default server at 0.0.0.0 with traffic/admin/prometheus ports: 8000/8074/8080 + ``` + + Provide `-h` or `--help` to get **process help** (more information [here](#Execution-of-main-agent)) or execute any other project executable. + You may also play with project helpers functions and examples: + + ```bash + $> source tools/helpers.src # type help in any moment after sourcing + $> server_example # follow instructions or just source it: source <(server_example) + ``` + + +## Static linking + +Both build helpers (`build.sh` and `build-native.sh` scripts) allow to force project static link, although this is [not recommended](https://stackoverflow.com/questions/57476533/why-is-statically-linking-glibc-discouraged): + +```bash +$> STATIC_LINKING=TRUE ./build.sh --auto +- or - +$> STATIC_LINKING=TRUE ./build-native.sh +``` + +So, you could run binaries regardless if needed libraries are available or not (including `glibc` with all its drawbacks). -Next sections will describe in detail, how to build [project image](#Project-image) and project executable ([using docker](#Build-project-with-docker) or [natively](#Build-project-natively)). + + + +Next sections will describe in detail, how to build [project image](#Project-image) and project executables ([using docker](#Build-project-with-docker) or [natively](#Build-project-natively)). ## Project image @@ -130,7 +159,7 @@ Both `ubuntu` and `alpine` base images are supported, but the official image upl ### Usage -Builder image is used to build the executable. To run compilation over this image, again, just run with `docker`: +Builder image is used to build the project. To run compilation over this image, again, just run with `docker`: ```bash $> envs="-e MAKE_PROCS=$(grep processor /proc/cpuinfo -c) -e BUILD_TYPE=Release" @@ -160,7 +189,11 @@ It may be hard to collect every dependency, so there is a native build **automat $> ./build-native.sh ``` -Note: this script is tested on `ubuntu bionic`, then some requirements could be not fulfilled in other distributions. +Note 1: this script is tested on `ubuntu bionic`, then some requirements could be not fulfilled in other distributions. + +Note 2: once dependencies have been installed, you may just type `cmake . && make` to have incremental native builds. + +Note 3: if not stated otherwise, this document assumes that binaries (used on examples) are natively built. @@ -259,7 +292,15 @@ $> cat install_manifest.txt | sudo xargs rm ### Unit test Check the badge above to know the current coverage level. -You can execute it after project building, for example for `Release` target: `./build/Release/bin/unit-test`. +You can execute it after project building, for example for `Release` target: + +```bash +$> ./build/Release/bin/unit-test # native executable +- or - +$> docker run -it --rm -v ${PWD}/build/Release/bin/unit-test:/ut --entrypoint "/ut" ghcr.io/testillano/h2agent:latest # docker +``` + + #### Coverage @@ -749,7 +790,7 @@ Options: [-h|--help] This help. -Examples: +Examples: h2client --timeout 1 --uri http://localhost:8000/book/8472098362 h2client --method POST --header "content-type:application/json" --body '{"foo":"bar"}' --uri http://localhost:8000/data ``` @@ -892,7 +933,7 @@ A kata is available at `./kata` directory. It is designed to guide through a set Sometimes, `github` access restrictions to build the project from scratch could be a handicap. Other times, you could simple prefer to run training stuff isolated. -So you could find useful to run the corresponding docker container using the script `./tools/training.sh`. This script builds and runs an image based in `./Dockerfile.training` which adds the needed resources to run both `demo` and `kata`. The image working directory is `/home/h2agent` making the experience like working natively over the git checkout. +So you could find useful to run the corresponding docker container using the script `./tools/training.sh`. This script builds and runs an image based in `./Dockerfile.training` which adds the needed resources to run both `demo` and `kata`. The image working directory is `/home/h2agent` making the experience like working natively over the git checkout and providing by mean symlinks, main project executables. The training image is already available at `github container registry` and `docker hub` for every repository `tag`, and also for master as `latest`: diff --git a/build-native.sh b/build-native.sh index 6182652..72e207e 100755 --- a/build-native.sh +++ b/build-native.sh @@ -1,4 +1,5 @@ #!/bin/bash +# [troubleshoot] define non-empty value for 'DEBUG' variable in order to keep build native artifacts ############# # VARIABLES # @@ -7,10 +8,12 @@ REPO_DIR="$(git rev-parse --show-toplevel 2>/dev/null)" [ -z "$REPO_DIR" ] && { echo "You must execute under a valid git repository !" ; exit 1 ; } +STATIC_LINKING=${STATIC_LINKING:-FALSE} # https://stackoverflow.com/questions/57476533/why-is-statically-linking-glibc-discouraged: + # Dependencies nghttp2_ver=1.48.0 boost_ver=1.76.0 # safer to have this version (https://github.com/nghttp2/nghttp2/issues/1721). -ert_nghttp2_ver=v1.2.2 # to download nghttp2 patches (this must be aligned with previous: nghttp2 & boost) +ert_nghttp2_ver=v1.2.3 # to download nghttp2 patches (this must be aligned with previous: nghttp2 & boost) ert_logger_ver=v1.0.10 jupp0r_prometheuscpp_ver=v0.13.0 civetweb_civetweb_ver=v1.14 @@ -32,6 +35,7 @@ SUDO=${SUDO:-sudo} # FUNCTIONS # ############# failed() { + [ -n "${DEBUG}" ] && return 0 echo echo "Last step has failed (rc=$1). Some package installers" echo " could return non-zero code if already installed." @@ -41,13 +45,45 @@ failed() { cd ${TMP_DIR} } +# $1: optional prefix to clean up +clean_all() { + [ -n "${DEBUG}" ] && return 0 + rm -rf ${1}* +} + +# $1: what +ask() { + [ -z "${DEBUG}" ] && return 0 + echo "Continue with '$1' ? (y/n) [y]:" + read opt + [ -z "${opt}" ] && opt=y + + [ "${opt}" = "y" ] +} + +# $1: project URL; $2: tar gz version +download_and_unpack_github_archive() { + local project=$1 + local version=$2 + + local target=${project##*/}.tar.gz # URL basename + wget $1/archive/$2.tar.gz -O ${target} && tar xvf ${target} +} + ############# # EXECUTION # ############# -TMP_DIR=${REPO_DIR}/tmp.${RANDOM} -mkdir ${TMP_DIR} && cd ${TMP_DIR} -trap "cd ${REPO_DIR} && rm -rf ${TMP_DIR}" EXIT +TMP_DIR=${REPO_DIR}/$(basename $0 .sh) +if [ -d "${TMP_DIR}" ] +then + echo "Temporary already exists. Keep it ? (y/n) [y]:" + read opt + [ -z "${opt}" ] && opt=y + [ "${opt}" != "y" ] && rm -rf ${TMP_DIR} +fi + +mkdir -p ${TMP_DIR} && cd ${TMP_DIR} echo echo "Working on temporary directory '${TMP_DIR}' ..." echo @@ -63,60 +99,50 @@ echo "Required: cmake version 3.14" echo "Current: $(cmake --version 2>/dev/null | grep version)" echo "Install: cmake version ${cmake_ver}" echo -echo "(c)ontinue or [s]kip [s]:" -read opt -[ -z "${opt}" ] && opt=s -if [ "${opt}" = "c" ] -then - set -x && \ +ask cmake && set -x && \ wget https://github.com/Kitware/CMake/releases/download/v${cmake_ver}/cmake-${cmake_ver}.tar.gz && tar xvf cmake* && cd cmake*/ && \ ./bootstrap && make -j${make_procs} && ${SUDO} make install && \ - cd .. && rm -rf * && \ + cd .. && clean_all && \ set +x -fi ) || failed $? && \ -( -# boost +ask boost && ( set -x && \ wget https://boostorg.jfrog.io/artifactory/main/release/${boost_ver}/source/boost_$(echo ${boost_ver} | tr '.' '_').tar.gz && tar xvf boost* && cd boost*/ && \ ./bootstrap.sh && ${SUDO} ./b2 -j${make_procs} install && \ -cd .. && rm -rf * && \ +cd .. && clean_all && \ set +x ) || failed $? && \ -( +ask libssl-dev && ( ${SUDO} apt-get install -y libssl-dev ) || failed $? && \ -( -# nghttp2 +ask nghttp2 && ( set -x && \ -wget https://github.com/testillano/nghttp2/archive/${ert_nghttp2_ver}.tar.gz && tar xvf ${ert_nghttp2_ver}.tar.gz && \ -cp nghttp2*/deps/patches/nghttp2/${nghttp2_ver}/*.patch . && rm -rf nghttp2* && \ -wget https://github.com/nghttp2/nghttp2/releases/download/v${nghttp2_ver}/nghttp2-${nghttp2_ver}.tar.bz2 && tar xvf nghttp2* && \ -cd nghttp2*/ && for patch in ../*.patch; do patch -p1 < ${patch}; done && \ +download_and_unpack_github_archive https://github.com/testillano/nghttp2 ${ert_nghttp2_ver} && \ +cp nghttp2*/deps/patches/nghttp2/${nghttp2_ver}/*.patch . && clean_all nghttp2 && \ +wget https://github.com/nghttp2/nghttp2/releases/download/v${nghttp2_ver}/nghttp2-${nghttp2_ver}.tar.bz2 && tar xvf nghttp2-${nghttp2_ver}.tar.bz2 && \ +cd nghttp2-${nghttp2_ver}/ && for patch in ../*.patch; do patch -p1 < ${patch}; done && \ ./configure --enable-asio-lib --disable-shared --enable-python-bindings=no && ${SUDO} make -j${make_procs} install && \ -cd .. && rm -rf * && \ +cd .. && clean_all && \ set +x ) || failed $? && \ -( -# ert_logger +ask ert_logger && ( set -x && \ -wget https://github.com/testillano/logger/archive/${ert_logger_ver}.tar.gz && tar xvf ${ert_logger_ver}.tar.gz && cd logger-*/ && \ +download_and_unpack_github_archive https://github.com/testillano/logger ${ert_logger_ver} && cd logger-*/ && \ cmake -DERT_LOGGER_BuildExamples=OFF -DCMAKE_BUILD_TYPE=${build_type} . && ${SUDO} make -j${make_procs} && ${SUDO} make install && \ -cd .. && rm -rf * && \ +cd .. && clean_all && \ set +x ) || failed $? && \ -( +ask "libcurl4-openssl-dev and zlib1g-dev" && ( ${SUDO} apt-get install -y libcurl4-openssl-dev && \ ${SUDO} apt-get install -y zlib1g-dev ) || failed $? && \ -( -# jupp0r_prometheuscpp +ask jupp0r_prometheuscpp && ( set -x && \ wget https://github.com/jupp0r/prometheus-cpp/archive/refs/tags/${jupp0r_prometheuscpp_ver}.tar.gz && \ tar xvf ${jupp0r_prometheuscpp_ver}.tar.gz && cd prometheus-cpp*/3rdparty && \ @@ -124,76 +150,70 @@ wget https://github.com/civetweb/civetweb/archive/refs/tags/${civetweb_civetweb_ tar xvf ${civetweb_civetweb_ver}.tar.gz && mv civetweb-*/* civetweb && cd .. && \ mkdir build && cd build && cmake -DCMAKE_BUILD_TYPE=${build_type} -DENABLE_TESTING=OFF .. && \ make -j${make_procs} && ${SUDO} make install && \ -cd ../.. && rm -rf * && \ +cd ../.. && clean_all && \ set +x ) || failed $? && \ -( -# ert_metrics +ask ert_metrics && ( set -x && \ -wget https://github.com/testillano/metrics/archive/${ert_metrics_ver}.tar.gz && tar xvf ${ert_metrics_ver}.tar.gz && cd metrics-*/ && \ +download_and_unpack_github_archive https://github.com/testillano/metrics ${ert_metrics_ver} && cd metrics-*/ && \ cmake -DERT_METRICS_BuildExamples=OFF -DCMAKE_BUILD_TYPE=${build_type} . && make -j${make_procs} && ${SUDO} make install && \ -cd .. && rm -rf * && \ +cd .. && clean_all && \ set +x ) || failed $? && \ -( -# ert_multipart +ask ert_multipart && ( set -x && \ -wget https://github.com/testillano/multipart/archive/${ert_multipart_ver}.tar.gz && tar xvf ${ert_multipart_ver}.tar.gz && cd multipart-*/ && \ +download_and_unpack_github_archive https://github.com/testillano/multipart ${ert_multipart_ver} && cd multipart-*/ && \ cmake -DERT_MULTIPART_BuildExamples=OFF -DCMAKE_BUILD_TYPE=${build_type} . && make -j${make_procs} && ${SUDO} make install && \ -cd .. && rm -rf * && \ +cd .. && clean_all && \ set +x ) || failed $? && \ -( -# ert_http2comm +ask ert_http2comm && ( set -x && \ -wget https://github.com/testillano/http2comm/archive/${ert_http2comm_ver}.tar.gz && tar xvf ${ert_http2comm_ver}.tar.gz && cd http2comm-*/ && \ +download_and_unpack_github_archive https://github.com/testillano/http2comm ${ert_http2comm_ver} && cd http2comm-*/ && \ cmake -DCMAKE_BUILD_TYPE=${build_type} . && make -j${make_procs} && ${SUDO} make install && \ -cd .. && rm -rf * && \ +cd .. && clean_all && \ set +x ) || failed $? && \ -( -# nlohmann json +ask "nlohmann json" && ( set -x && \ wget https://github.com/nlohmann/json/releases/download/${nlohmann_json_ver}/json.hpp && \ ${SUDO} mkdir /usr/local/include/nlohmann && ${SUDO} mv json.hpp /usr/local/include/nlohmann && \ set +x ) || failed $? && \ -( -# pboettch json-schema-validator +ask "pboettch json-schema-validator" && ( set -x && \ -wget https://github.com/pboettch/json-schema-validator/archive/${pboettch_jsonschemavalidator_ver}.tar.gz && \ -tar xvf ${pboettch_jsonschemavalidator_ver}.tar.gz && cd json-schema-validator*/ && mkdir build && cd build && \ +download_and_unpack_github_archive https://github.com/pboettch/json-schema-validator ${pboettch_jsonschemavalidator_ver} && cd json-schema-validator*/ && \ +mkdir build && cd build && \ cmake .. && make -j${make_procs} && ${SUDO} make install && \ -cd ../.. && rm -rf * && \ +cd ../.. && clean_all && \ set +x ) || failed $? && \ -( -# google test framework +ask "google test framework" && ( set -x && \ wget https://github.com/google/googletest/archive/refs/tags/release-${google_test_ver:1}.tar.gz && \ tar xvf release-${google_test_ver:1}.tar.gz && cd googletest-release*/ && cmake . && ${SUDO} make -j${make_procs} install && \ -cd .. && rm -rf * && \ +cd .. && clean_all && \ set +x ) || failed $? && \ +ask "ArashPartow exprtk" && ( -# ArashPartow exprtk set -x && \ wget https://github.com/ArashPartow/exprtk/raw/${arashpartow_exprtk_ver}/exprtk.hpp && \ ${SUDO} mkdir /usr/local/include/arashpartow && ${SUDO} mv exprtk.hpp /usr/local/include/arashpartow && \ set +x ) || failed $? && \ -#( +#ask "doxygen and graphviz" && ( #${SUDO} apt-get install -y doxygen graphviz #) || failed $? && \ # h2agent project root: -cd ${REPO_DIR} && cmake -DCMAKE_BUILD_TYPE=${build_type} -DSTATIC_LINKING=TRUE . && make -j${make_procs} +ask "MAIN PROJECT" && cd ${REPO_DIR} && rm -rf build CMakeCache.txt CMakeFiles && cmake -DCMAKE_BUILD_TYPE=${build_type} -DSTATIC_LINKING=${STATIC_LINKING} . && make -j${make_procs} diff --git a/build.sh b/build.sh index 8a8761f..c71de82 100755 --- a/build.sh +++ b/build.sh @@ -3,6 +3,9 @@ ############# # VARIABLES # ############# + +STATIC_LINKING=${STATIC_LINKING:-FALSE} # https://stackoverflow.com/questions/57476533/why-is-statically-linking-glibc-discouraged: + image_tag__dflt=latest base_os__dflt=ubuntu base_tag__dflt=latest @@ -120,7 +123,7 @@ build_project() { _read make_procs _read build_type - envs="-e MAKE_PROCS=${make_procs} -e BUILD_TYPE=${build_type} -e STATIC_LINKING=TRUE" + envs="-e MAKE_PROCS=${make_procs} -e BUILD_TYPE=${build_type} -e STATIC_LINKING=${STATIC_LINKING}" set -x rm -f CMakeCache.txt