Skip to content

Commit

Permalink
chore(ci): fix deploy ci for 2.3
Browse files Browse the repository at this point in the history
Signed-off-by: Swilder-M <[email protected]>
  • Loading branch information
Swilder-M committed Sep 2, 2024
1 parent e0031ad commit b31cf8a
Show file tree
Hide file tree
Showing 53 changed files with 178 additions and 158 deletions.
27 changes: 16 additions & 11 deletions .github/scripts/directory_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import sys
import json
import re
from urllib.parse import urlparse

directory_file = sys.argv[1]
docs_path = sys.argv[2]
Expand All @@ -16,13 +17,21 @@ def check_md_content(md_file):
success = False
return

md_content = open(md_file, 'r').read()
md_content = re.sub(r'<!--([\s\S]*?)-->', '', open(md_file, 'r').read())

if 'ee' in directory_file:
md_content = re.sub(r'{% emqxce %}([\s\S]*?){% endemqxce %}', '', md_content)
else:
md_content = re.sub(r'{% emqxee %}([\s\S]*?){% endemqxee %}', '', md_content)

image_list = re.findall('(.*?)!\[(.*?)\]\((.*?)\)', md_content)
url_list = re.findall('(.*?)\[(.*?)\]\((.*?)\)', md_content)
for url in url_list:
if url[0].endswith('!'):
continue
if url[2].startswith(('http://', 'https://', '<', '#')):
if url[2].startswith(('http://', 'https://', '<', '#', 'mailto:', 'tel:')):
continue
if urlparse(url[2]).path.endswith('.html'):
continue
url_path = url[2].split('.md')[0]
ref_md_path = os.path.join(f'{"/".join(md_file.split("/")[:-1])}/', f'{url_path}.md')
Expand Down Expand Up @@ -51,20 +60,18 @@ def get_md_files(dir_config, path):
for i in dir_config:
md_name = i.get('path')
md_children = i.get('children')
if md_name and md_children:
print(f'{i.get("title")} has path and children')
success = False

if md_children:
md_list += get_md_files(md_children, path)
else:
if md_name:
if md_name.startswith(('http://', 'https://')):
continue
elif md_name == './':
md_list.append(f'{docs_path}/{path}/README.md')
md_list.append(f'{docs_path}/{path}/index.md')
else:
md_list.append(f'{docs_path}/{path}/{md_name}.md')

if md_children:
md_list += get_md_files(md_children, path)

return list(set(md_list))


Expand All @@ -85,8 +92,6 @@ def get_md_files(dir_config, path):

for file in md_file_list:
check_md_content(file)
else:
sys.exit(f'No {directory_file} file!')

if not success:
sys.exit('No pass!')
Expand Down
8 changes: 4 additions & 4 deletions .github/scripts/remove_unused.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@
def get_markdown_file(dir_config, base_path):
current_files = []
for row in dir_config:
if row.get('children'):
current_files += get_markdown_file(row['children'], base_path)
else:
if row.get('path'):
current_files.append(
f'{base_path}/README.md' if row['path'] == './'
f'{base_path}/index.md' if row['path'] == './'
else f'{base_path}/{row["path"]}.md'
)
if row.get('children'):
current_files += get_markdown_file(row['children'], base_path)
return current_files


Expand Down
20 changes: 10 additions & 10 deletions .github/workflows/check_markdown.yaml
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
name: Check markdown
name: Check Docs

on: [push, pull_request]

jobs:
Markdown_Checker:
markdown_check:
runs-on: ubuntu-latest
steps:
- name: Check out code
- name: check out code
uses: actions/checkout@main
- name: Install markdownlint

- name: install markdownlint
run: sudo npm install -g markdownlint-cli

- name: Check markdown
- name: check markdown
run: markdownlint -c .github/workflows/markdown_config.json ./

Directory_Checker:
directory_check:
runs-on: ubuntu-latest
steps:
- name: Check out code
- name: check out code
uses: actions/checkout@main
- name: Check directory config

- name: check directory config
run: python3 .github/scripts/directory_check.py directory.json $(pwd)
87 changes: 51 additions & 36 deletions .github/workflows/deploy_docs.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name: Deploy Docs

concurrency:
group: ${{ github.ref }}
group: ${{ github.ref_name }}
cancel-in-progress: true

on:
Expand All @@ -17,28 +17,28 @@ jobs:
if: github.repository_owner == 'emqx'
steps:
- name: clone docs
uses: actions/checkout@v2
uses: actions/checkout@main
with:
fetch-depth: 0
path: docs-files

- name: clone frontend
uses: actions/checkout@v2
uses: actions/checkout@main
with:
repository: 'emqx/emqx-io-docs-frontend'
repository: 'emqx/docs-emqx-com-frontend'
ref: next
token: ${{ secrets.CI_GIT_TOKEN }}
path: frontend

- name: use node.js
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: 14.15
node-version-file: 'frontend/.nvmrc'

- name: use python
uses: actions/setup-python@v2
- name: use pnpm
uses: pnpm/action-setup@v4
with:
python-version: '3.8'
architecture: 'x64'
version: 8

- name: set env
run: |
Expand All @@ -59,51 +59,66 @@ jobs:
- name: move files
run: |
rm frontend/docs/en/README.md || true
rm frontend/docs/zh/README.md || true
rm frontend/docs/en/index.md || true
rm frontend/docs/zh/index.md || true
rm frontend/docs/*.md || true
rm frontend/README.md
mkdir -p frontend/docs/en/${VERSION}/
mkdir -p frontend/docs/zh/${VERSION}/
mkdir -p frontend/docs/.vuepress/public/api/
cp -r docs-files/en_US/* frontend/docs/en/${VERSION}/
cp -r docs-files/zh_CN/* frontend/docs/zh/${VERSION}/
cp docs-files/directory.json frontend/docs/.vuepress/config/directory.json
rm frontend/index.md || true
mkdir -p frontend/docs/en/${DOCS_TYPE}/${VERSION}/
mkdir -p frontend/docs/zh/${DOCS_TYPE}/${VERSION}/
mkdir -p frontend/docs/public/api/
cp -r docs-files/en_US/* frontend/docs/en/${DOCS_TYPE}/${VERSION}/
cp -r docs-files/zh_CN/* frontend/docs/zh/${DOCS_TYPE}/${VERSION}/
cp docs-files/directory.json frontend/docs/.vitepress/config/directory.json
- name: generate version config
run: |
cd docs-files
python3 .github/scripts/generate_version.py $(git tag | egrep "v(.*)$" | xargs echo -n) > ../frontend/docs/.vuepress/public/api/${DOCS_TYPE}_versions.json
cat ../frontend/docs/.vuepress/public/api/${DOCS_TYPE}_versions.json
python3 .github/scripts/generate_version.py $(git tag | egrep "v(.*)$" | xargs echo -n) > ../frontend/docs/public/api/${DOCS_TYPE}_versions.json
cat ../frontend/docs/public/api/${DOCS_TYPE}_versions.json
- name: build docs
run: |
cd frontend
yarn && yarn build
pnpm install
pnpm build
- name: upload dist
- name: set aws credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}

- name: upload dist to s3
run: |
aws s3 rm --recursive s3://docs-emqx-com/zh/${DOCS_TYPE}/${VERSION} || true
aws s3 rm --recursive s3://docs-emqx-com/en/${DOCS_TYPE}/${VERSION} || true
aws s3 cp --recursive frontend/docs/.vitepress/dist/ s3://docs-emqx-com/
aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_DOCS_CLOUDFRONT_ID }} --paths "/zh/${DOCS_TYPE}/${VERSION}/*" "/en/${DOCS_TYPE}/${VERSION}/*" "/api/${DOCS_TYPE}_versions.json" "/sitemap_${DOCS_TYPE}_${VERSION}.xml"
- name: upload dist to cos
run: |
pip3 install coscmd
coscmd config -a ${{ secrets.TENCENT_COS_ID }} -s ${{ secrets.TENCENT_COS_KEY }} -b neugates-io-1302406139 -r ap-hongkong
coscmd delete -r -f docs/en/${VERSION} || true
coscmd delete -r -f docs/zh/${VERSION} || true
coscmd config -a ${{ secrets.TENCENT_COS_ID }} -s ${{ secrets.TENCENT_COS_KEY }} -b neugates-io-1302406139 -e cos.accelerate.myqcloud.com
cd frontend/docs/.vuepress/
coscmd upload -r dist/ /docs/
- name: refresh cdn cache
coscmd config -a ${{ secrets.TENCENT_COS_ID }} -s ${{ secrets.TENCENT_COS_KEY }} -b docs-1302406139 -r ap-shanghai
coscmd delete -r -f en/${DOCS_TYPE}/${VERSION} || true
coscmd delete -r -f zh/${DOCS_TYPE}/${VERSION} || true
coscmd config -a ${{ secrets.TENCENT_COS_ID }} -s ${{ secrets.TENCENT_COS_KEY }} -b docs-1302406139 -e cos.accelerate.myqcloud.com
coscmd upload -r frontend/docs/.vitepress/dist/ /
- name: flush cdn
run: |
pip3 install tccli
tccli configure set secretId ${{ secrets.TENCENT_COS_ID }}
tccli configure set secretKey ${{ secrets.TENCENT_COS_KEY }}
tccli configure set region ap-hongkong
tccli cdn PurgePathCache --Paths '["https://neugates.io/docs/", "https://neuron-docs.emqx.net"]' --FlushType delete
tccli configure set region ap-shanghai
tccli cdn PurgePathCache --cli-unfold-argument --Paths https://docs.emqx.com/zh/${DOCS_TYPE}/${VERSION}/ https://docs.emqx.com/en/${DOCS_TYPE}/${VERSION}/ --FlushType delete
tccli cdn PurgeUrlsCache --cli-unfold-argument --Urls https://docs.emqx.com/api/${DOCS_TYPE}_versions.json https://docs.emqx.com/sitemap_${DOCS_TYPE}_${VERSION}.xml
- name: update search index
uses: Swilder-M/docsearch-scraper-simple@v4
uses: Swilder-M/docsearch-scraper-simple@next
env:
APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID_NEXT }}
API_KEY: ${{ secrets.ALGOLIA_API_KEY_NEXT }}
with:
docs_type: ${{ env.DOCS_TYPE }}
docs_version: ${{ env.VERSION }}
24 changes: 12 additions & 12 deletions .github/workflows/markdown_config.json
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
{
"default": false,
"MD001": true,
"MD003": {"style": "atx"},
"MD011": true,
"MD018": true,
"MD019": true,
"MD023": true,
"MD025": {"level": 1, "front_matter_title": ""},
"MD042": true,
"MD046": {"style": "fenced"},
"MD048": {"style": "backtick"}
}
"default": false,
"MD001": true,
"MD003": {"style": "atx"},
"MD011": true,
"MD018": true,
"MD019": true,
"MD023": true,
"MD025": {"level": 1, "front_matter_title": ""},
"MD042": true,
"MD046": {"style": "fenced"},
"MD048": {"style": "backtick"}
}
6 changes: 3 additions & 3 deletions en_US/README.md → en_US/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ NeuronEX is a version of the Neuron integrated data stream processing engine eKu

### Diversified Connectivity

Neuron provides diversified driver protocol support for various industries, including building automation, CNC machines, Robotics, Electricity, various PLCs, and even intelligent sensors, such as Modbus, OPCUA, Ethernet/IP, IEC104, BACnet, Siemens, Mitsubishi, and more.</br>
Neuron supports applications that connect to various cloud or IIoT platforms, such as MQTT, WebSocket, SparkPlug B, and other custom applications.</br>
With MQTT, IIoT platforms, big data, and AI/ML analysis software can be better integrated into private clouds, EMQX Cloud, AWS, Google Cloud, Azure, or local servers.</br>
Neuron provides diversified driver protocol support for various industries, including building automation, CNC machines, Robotics, Electricity, various PLCs, and even intelligent sensors, such as Modbus, OPCUA, Ethernet/IP, IEC104, BACnet, Siemens, Mitsubishi, and more.<br />
Neuron supports applications that connect to various cloud or IIoT platforms, such as MQTT, WebSocket, SparkPlug B, and other custom applications.<br />
With MQTT, IIoT platforms, big data, and AI/ML analysis software can be better integrated into private clouds, EMQX Cloud, AWS, Google Cloud, Azure, or local servers.<br />
Through SparkPlug B, unified data operations will be provided for industrial applications, eliminating the complexity of ERP, MES, SCADA, and historian accessing device data.

### Lightweight
Expand Down
6 changes: 3 additions & 3 deletions en_US/project/compile.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ $ cmake .. && make
:::tip
There are three optional parameters in CMakeLists:
* CMAKE_ BUILD_ Type "Debug", which compiles the debug version by default.
* DISABLE_WERROR, which treats all warnings as errors.</br>Usage example:```cmake -DISABLE_WERROR=1 ..```
* DISABLE_WERROR, which treats all warnings as errors.<br />Usage example:```cmake -DISABLE_WERROR=1 ..```
* DISABLE_ ASAN, select whether to enable libasan memory detection.
:::

Expand All @@ -40,7 +40,7 @@ $ ./ neuron
3. Combining libasan runtime memory analysis, most memory issues can be resolved.

:::tip
Libasan refers to the Address Sanitizer (ASan) library, which is a memory error detection tool used to help detect memory errors during program execution, such as buffer overflows, using freed memory, and using uninitialized memory.</br>
ASan detects memory errors by injecting additional code while the program is running. It uses a red and black tree data structure to track the allocation of memory blocks, and uses shadow memory to detect read and write access to unallocated memory. When a memory error is detected, ASan prints relevant information, such as the location and type of the error.</br>
Libasan refers to the Address Sanitizer (ASan) library, which is a memory error detection tool used to help detect memory errors during program execution, such as buffer overflows, using freed memory, and using uninitialized memory.<br />
ASan detects memory errors by injecting additional code while the program is running. It uses a red and black tree data structure to track the allocation of memory blocks, and uses shadow memory to detect read and write access to unallocated memory. When a memory error is detected, ASan prints relevant information, such as the location and type of the error.<br />
Libasan is a runtime library for ASan that can be used with compilers, such as Clang, GCC, and so on. It provides the necessary functions and data structures to detect and report memory errors during program execution.
:::
4 changes: 2 additions & 2 deletions en_US/quick-start/installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ Users can choose according to their own needs.

| Linux distribution | Required packages |
| ------------------------------------------------------------ | ------------------ |
| **Debian package system**</br>Ubuntu 20.04 </br>Ubuntu 18.04 </br>Ubuntu16.04</br>Debian 11</br>Debian 10</br>Debian 9</br>Debian 8 | deb |
| **Redhat package system**</br>CentOS Stream 9</br>CentOS Stream 8</br>CentOS 7 | rpm |
| **Debian package system**<br />Ubuntu 20.04 <br />Ubuntu 18.04 <br />Ubuntu16.04<br />Debian 11<br />Debian 10<br />Debian 9<br />Debian 8 | deb |
| **Redhat package system**<br />CentOS Stream 9<br />CentOS Stream 8<br />CentOS 7 | rpm |

:::tip
The rpm/deb package uses systemd to manage the neuron process and it is recommended that the rpm/deb package is used in preference.
Expand Down
2 changes: 1 addition & 1 deletion en_US/south-devices/ads/ads.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ In the context of the ADS plugin, a tag address consists of two components,
`INDEX_GROUP` and `INDEX_OFFSET`, which represents the index group and the
index offset respectively.

> INDEX_GROUP,INDEX_OFFSET</span>
> INDEX_GROUP,INDEX_OFFSET
Both `INDEX_GROUP` and `INDEX_OFFSET` could be in decimal or hexadecimal format.

Expand Down
2 changes: 1 addition & 1 deletion en_US/south-devices/bacnet-ip/bacnet-ip.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

### Address Format

> AREA[ADDRESS]</span>
> AREA[ADDRESS]
| AREA | ADDRESS RANGE | ATTRIBUTE | DATA TYPE | REMARK |
| ---- | ------------- | ---------- | ------------- | ------------------ |
Expand Down
Loading

0 comments on commit b31cf8a

Please sign in to comment.