Skip to content

Commit

Permalink
Merge branch 'DataLinkDC:dev' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
Jam804 authored Jan 6, 2025
2 parents bd2ec6a + 14049f2 commit cf817c0
Show file tree
Hide file tree
Showing 22 changed files with 445 additions and 143 deletions.
70 changes: 59 additions & 11 deletions .github/workflows/backend.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ jobs:
strategy:
fail-fast: true
matrix:
flink: [ '1.14', '1.15', '1.16', '1.17', '1.18', '1.19', '1.20' ]
flink: [ '1.15', '1.16', '1.17', '1.18', '1.19', '1.20' ]
runs-on: ubuntu-latest
services:
registry:
Expand All @@ -165,6 +165,9 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Init Docker Network
run: |
docker network create -d bridge --subnet 172.28.0.0/16 --gateway 172.28.0.1 dinky_net
- name: Download artifact
uses: actions/download-artifact@v4
with:
Expand All @@ -189,14 +192,22 @@ jobs:
FLINK_VERSION=${{ matrix.flink }}
tags: |
localhost:5000/dinky/dinky-test:flink
- name: Build Flink Image
uses: docker/build-push-action@v5
with:
context: .
file: ./e2e_test/docker-compose-env/FlinkDockerfile
# 是否 docker push
push: true
build-args: |
FLINK_VERSION=${{ matrix.flink }}
tags: |
localhost:5000/dinky/flink:flink
- name: Init Env Jar
run: |
wget -O e2e_test/docker-compose-env/dinky/mysql-connector-java-8.0.30.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.30/mysql-connector-java-8.0.30.jar &&
wget -O e2e_test/docker-compose-env/flink/flink-shaded-hadoop-2-uber-2.8.3-10.0.jar https://repo1.maven.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-10.0/flink-shaded-hadoop-2-uber-2.8.3-10.0.jar &&
wget -O e2e_test/docker-compose-env/dinky/javax.ws.rs-api-2.1.1.jar https://repo1.maven.org/maven2/javax/ws/rs/javax.ws.rs-api/2.1.1/javax.ws.rs-api-2.1.1.jar
- name: Init Docker Network
run: |
docker network create -d bridge dinky_net
- name: Init Run Docker MySQL
uses: hoverkraft-tech/[email protected]
with:
Expand All @@ -209,16 +220,53 @@ jobs:
uses: hoverkraft-tech/[email protected]
with:
compose-file: ./e2e_test/docker-compose-env/hadoop/docker-compose.yml
- name: Replace Flink docker-compose yml
run: |
export FLINK_VERSION=${{ matrix.flink }} && envsubst < ./e2e_test/docker-compose-env/flink/docker-compose.yml > ./e2e_test/docker-compose-env/flink/docker-compose-${{ matrix.flink }}.yml
- name: Init Run Docker Flink
uses: hoverkraft-tech/[email protected]
with:
compose-file: ./e2e_test/docker-compose-env/flink/docker-compose-${{ matrix.flink }}.yml

compose-file: ./e2e_test/docker-compose-env/flink/docker-compose.yml
# k8s env
- name: Init k3s
uses: nolar/setup-k3d-k3s@v1
with:
version: v1.25.16+k3s4
k3d-args: -s 1 --network dinky_net --api-port 172.28.0.1:6550
- name: Get k3s kube config
run: k3d kubeconfig get --all && mkdir ./kube && k3d kubeconfig get --all > ./kube/k3s.yaml && sed -i 's/0.0.0.0/172.28.0.1/g' ./kube/k3s.yaml
- name: Init k8s RBAC and namespace
run: |
kubectl create namespace dinky
kubectl create serviceaccount dinky -n dinky
kubectl create clusterrolebinding flink-role-binding-dinky --clusterrole=edit --serviceaccount=dinky:dinky
- name: Init k3s main images
run: |
docker exec k3d-k3s-default-server-0 crictl pull library/busybox:latest
docker exec k3d-k3s-default-server-0 crictl pull flink:${{ matrix.flink }}-scala_2.12-java8
docker pull localhost:5000/dinky/flink:flink
docker tag localhost:5000/dinky/flink:flink dinky/flink:flink
docker save -o flink.tar dinky/flink:flink
k3d images import ./flink.tar
rm -rf ./flink.tar
- name: Test k3s host
run: |
curl -k https://172.28.0.1:6550
- name: Cp Flink Jar Deps
run: docker cp dinky:/opt/dinky/ ./dinky-release
run: |
docker cp dinky:/opt/dinky/ ./dinky-release
mv ./dinky-release/jar/dinky-app*.jar e2e_test/docker-compose-env/dinky/dinky-app.jar
- name: Run python http server
run: |
mkdir -p logs
ls e2e_test/docker-compose-env/dinky/
nohup python -m http.server -d e2e_test/docker-compose-env/dinky/ 9001 > ./logs/python_http.log &
- name: Run Docker Python Script
run: |
docker run -v ./dinky-release/extends/flink${{ matrix.flink }}:/flink/lib -v ./e2e_test/docker-compose-env/dinky/mysql-connector-java-8.0.30.jar:/flink/lib/mysql-connector-java-8.0.30.jar -v./e2e_test/docker-compose-env/flink/conf:/flink/conf -v ./dinky-release/jar:/dinky/jar -v./e2e_test/tools:/app -w /app --net dinky_net --rm --entrypoint /bin/bash python:3.9 -c 'pip install -r requirements.txt && python main.py dinky:8888'
docker run -v ./e2e_test/tools:/app -w /app -v ./kube:/kube -v ./e2e_test/docker-compose-env/dinky:/dinky/jar -v ./dinky-release/extends/flink${{ matrix.flink }}:/opt/flink/lib -v ./e2e_test/docker-compose-env/dinky/mysql-connector-java-8.0.30.jar:/opt/flink/lib/mysql-connector-java-8.0.30.jar --net dinky_net --rm --entrypoint /bin/bash python:3.9 -c 'pip install -r requirements.txt && python main.py dinky:8888 ${{ matrix.flink }}'
- name: Get k8s pods info and logs
if: ${{ always() }}
run: |
chmod -R 755 ./e2e_test/view_k8s_all_pod_logs.sh
./e2e_test/view_k8s_all_pod_logs.sh dinky
- name: Get Python HttpServer log
if: ${{ always() }}
run: |
cat ./logs/python_http.log
120 changes: 114 additions & 6 deletions .github/workflows/docker_build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,98 @@ on:
dinky_version:
description: 'dinky version'
required: true
docker_space:
description: 'docker space(eg: dinky)'
required: true

jobs:
build_releases:
name: build releases
build_front:
name: Build_NPM
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
frontend:
- 'dinky-web/**'
- uses: actions/setup-node@v3
with:
node-version: 16
- name: Get npm cache directory
id: npm-cache-dir
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v3
id: npm-cache # use this to check for `cache-hit` ==> if: steps.npm-cache.outputs.cache-hit != 'true'
with:
path: |
${{ steps.npm-cache-dir.outputs.dir }}
dinky-web/dist
key: ${{ runner.os }}-node-${{ hashFiles('dinky-web/**/package.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Install Dependencies
run: cd dinky-web && npm install --no-audit --progress=false --legacy-peer-deps
- name: Npm Web Build
run: cd dinky-web && npm run build
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: dinky-web
path: ./dinky-web/dist


build_release:
name: Build Release
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
flink: [ '1.14', '1.15', '1.16', '1.17', '1.18', '1.19', '1.20' ]
env:
MAVEN_OPTS: -Xmx2G -Xms2G
steps:
- uses: actions/checkout@v3
# maven编译
- name: Set up JDK
uses: actions/setup-java@v2
with:
java-version: 8
distribution: 'adopt'
- name: Cache local Maven repository
uses: actions/cache@v3
with:
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/flink
key: ${{ runner.os }}-maven-${{ hashFiles('pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Cache local Flink repository
uses: actions/cache@v3
with:
path: ~/.m2/repository/org/apache/flink
key: ${{ runner.os }}-${{ matrix.flink }}-maven-${{ hashFiles('pom.xml') }}
restore-keys: |
${{ runner.os }}-${{ matrix.flink }}-maven-
- name: Build and Package
run: |
./mvnw -B clean install \
-Dmaven.test.skip=true \
-Dspotless.check.skip=true \
-Denforcer.skip=false \
-Dmaven.javadoc.skip=true \
-P prod,flink-single-version,flink-${{ matrix.flink }},maven-central \
--no-snapshot-updates
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: dinky-realease-${{ matrix.flink }}
path: ./build/dinky-release*.tar.gz

build_image:
name: build image
runs-on: ubuntu-latest
needs: [build_front,build_release]
strategy:
fail-fast: true
matrix:
Expand All @@ -39,6 +123,18 @@ jobs:
# git checkout 代码
- name: Checkout
uses: actions/checkout@v4
- name: Download backed artifact
uses: actions/download-artifact@v4
with:
name: dinky-realease-${{ matrix.flink }}
path: ./build
- name: Download front artifact
uses: actions/download-artifact@v4
with:
name: dinky-web
path: ./build/dist
- run: |
tree ./build
# 设置 QEMU, 后面 docker buildx 依赖此.
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
Expand All @@ -54,16 +150,28 @@ jobs:
# DOCKERHUB_TOKEN: docker hub => Account Setting => Security 创建.
username: ${{ secrets.DOCKER_IO_USER }}
password: ${{ secrets.DOCKER_IO_PASS }}
# 登录 aliyun docker hub
- name: Login to Aliyun Docker
uses: docker/login-action@v3
with:
registry: registry.cn-hangzhou.aliyuncs.com
username: ${{ secrets.DOCKER_ALIYUN_USER }}
password: ${{ secrets.DOCKER_ALIYUN_PASS }}
# 构建 Docker 并推送到 Docker hub
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
platforms: linux/amd64,linux/arm64
file: ./deploy/docker/Dockerfile
# 是否 docker push
push: true
context: .
build-args: |
FLINK_VERSION=${{ matrix.flink }}
DINKY_VERSION=${{ inputs.dinky_version }}
tags: |
${{inputs.docker_space}}/dinky-standalone-server:${{ inputs.dinky_version }}-flink${{ matrix.flink }}
dinkydocker/dinky-standalone-server:${{ inputs.dinky_version }}-flink${{ matrix.flink }}
registry.cn-hangzhou.aliyuncs.com/dinky/dinky-standalone-server:${{ inputs.dinky_version }}-flink${{ matrix.flink }}
cache-from: type=gha
cache-to: type=gha,mode=max
44 changes: 7 additions & 37 deletions deploy/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,52 +1,22 @@
ARG FLINK_VERSION
ARG DINKY_VERSION

FROM flink:${FLINK_VERSION}-scala_2.12-java8 as flink-base

FROM node:18.15.0-alpine3.17 AS ui-build
WORKDIR /build/

ENV NODE_OPTIONS=--openssl-legacy-provider
ENV UMI_ENV=production

# 单独分离 package.json,是为了安装依赖可最大限度利用缓存
ADD ./dinky-web/package.json /build/package.json
RUN npm install --legacy-peer-deps
ADD ./dinky-web .
RUN npm run build

FROM maven:3.9-eclipse-temurin-8-alpine AS build
WORKDIR /build/
ARG FLINK_VERSION
ARG DINKY_VERSION
ENV FLINK_VERSION=${FLINK_VERSION}
ENV DINKY_VERSION=${DINKY_VERSION}

ADD . .
COPY --from=ui-build /build/dist/ /build/dinky-web/dist/

RUN mvn package -Dmaven.test.skip=true -P prod,flink-single-version,flink-${FLINK_VERSION},fast
RUN mkdir release && \
tar -C release -xvf build/dinky-release-${FLINK_VERSION}-*.tar.gz && \
mv release/dinky-release-* release/dinky


FROM eclipse-temurin:8-jre-jammy

FROM flink:${FLINK_VERSION}-scala_2.12-java8
RUN rm -f /opt/flink/lib/flink-table-planner-loader*.jar && cp /opt/flink/opt/flink-python*.jar /opt/flink/lib/ && cp /opt/flink/opt/flink-table-planner*.jar /opt/flink/lib/ 2>/dev/null || :
ARG FLINK_VERSION
ENV FLINK_VERSION=${FLINK_VERSION}
ENV DINKY_HOME=/opt/dinky/
ENV H2_DB=./tmp/db/h2

WORKDIR /opt/dinky/

USER root

COPY --from=build /build/release/dinky /opt/dinky/
COPY --from=flink-base /opt/flink/lib/*.jar /opt/dinky/extends/flink${FLINK_VERSION}/flink/
RUN rm -f /opt/dinky/extends/flink${FLINK_VERSION}/flink/flink-table-planner-loader*.jar
ADD build/dinky-release*.tar.gz /opt
RUN mv /opt/dinky-release* /opt/dinky && ln -s /opt/flink/lib/* /opt/dinky/extends/flink${FLINK_VERSION}/ && mkdir /opt/dinky/config/static
ADD build/dist/ /opt/dinky/config/static

WORKDIR /opt/dinky/

COPY --from=flink-base /opt/flink/opt/flink-table-planner*.jar /opt/dinky/extends/flink${FLINK_VERSION}/flink/

RUN mkdir /opt/dinky/customJar && chmod -R 777 /opt/dinky/ && sed -i 's/-Xms512M -Xmx2048M -XX:PermSize=512M/-XX:+UseContainerSupport -XX:InitialRAMPercentage=70.0 -XX:MaxRAMPercentage=70.0/g' ./bin/auto.sh

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,12 @@ public Object intercept(final Invocation invocation) throws Throwable {
BoundSql boundSql = statementHandler.getBoundSql();
Field field = boundSql.getClass().getDeclaredField("sql");
field.setAccessible(true);
field.set(boundSql, boundSql.getSql().replace("`", "\"").toLowerCase());
field.set(
boundSql,
boundSql.getSql()
.replace("`", "\"")
.replace("concat('%', ?, '%')", "concat('%', ?::text, '%')")
.toLowerCase());
return invocation.proceed();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import org.dinky.data.model.mapping.ClusterInstanceMapping;
import org.dinky.data.result.ProTableResult;
import org.dinky.data.vo.task.JobInstanceVo;
import org.dinky.executor.ExecutorConfig;
import org.dinky.explainer.lineage.LineageBuilder;
import org.dinky.explainer.lineage.LineageResult;
import org.dinky.job.FlinkJobTask;
Expand Down Expand Up @@ -295,7 +294,7 @@ public void refreshJobByTaskIds(Integer... taskIds) {
@Override
public LineageResult getLineage(Integer id) {
History history = getJobInfoDetail(id).getHistory();
return LineageBuilder.getColumnLineageByLogicalPlan(history.getStatement(), ExecutorConfig.DEFAULT);
return LineageBuilder.getColumnLineageByLogicalPlan(history.getStatement(), history.getConfigJson());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1101,10 +1101,10 @@ public List<TaskDTO> getUserTasks(Integer userId) {
private Boolean hasTaskOperatePermission(Integer firstLevelOwner, List<Integer> secondLevelOwners) {
boolean isFirstLevelOwner = firstLevelOwner != null && firstLevelOwner == StpUtil.getLoginIdAsInt();
if (TaskOwnerLockStrategyEnum.OWNER.equals(
SystemConfiguration.getInstances().getTaskOwnerLockStrategy())) {
SystemConfiguration.getInstances().getTaskOwnerLockStrategy().getValue())) {
return isFirstLevelOwner;
} else if (TaskOwnerLockStrategyEnum.OWNER_AND_MAINTAINER.equals(
SystemConfiguration.getInstances().getTaskOwnerLockStrategy())) {
SystemConfiguration.getInstances().getTaskOwnerLockStrategy().getValue())) {
return isFirstLevelOwner
|| (secondLevelOwners != null && secondLevelOwners.contains(StpUtil.getLoginIdAsInt()));
}
Expand Down
2 changes: 1 addition & 1 deletion dinky-core/src/main/java/org/dinky/api/FlinkAPI.java
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ private String getResult(String route) {
}

private JsonNode post(String route, String body) {
String url = NetConstant.SLASH + route;
String url = address + NetConstant.SLASH + route;
if (!address.startsWith(NetConstant.HTTP) && !address.startsWith(NetConstant.HTTPS)) {
url = NetConstant.HTTP + url;
}
Expand Down
Loading

0 comments on commit cf817c0

Please sign in to comment.