Skip to content

Commit

Permalink
Update CI setup
Browse files Browse the repository at this point in the history
- Caching of build artifacts between steps works now
- Integration Tests are their own job now
- All dependencies updated to v3
- Bytecode patching performed as part of build step via CI
- Removed mkdocs deployment temporarily, will be added back again in a
  separate change
- Fixed a bug in the bytecoder patcher, formatted with black
  • Loading branch information
jbaiter committed May 31, 2022
1 parent fa0e7cf commit 7cb6d1b
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 96 deletions.
66 changes: 17 additions & 49 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,62 +8,30 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Set up JDK
uses: actions/setup-java@v1
uses: actions/setup-java@v3
with:
distribution: corretto
java-version: 11
- uses: actions/cache@v2
env:
cache-name: cache-maven-artifacts
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
cache: maven
- name: Build with Maven and run unit tests
run: mvn -B package
- name: Run integration tests
run: ./integration-tests/run.sh
publish:
if: github.event_name == 'push' && contains(github.ref, 'main')
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
- name: Create Solr 7/8 JAR
run: ./util/patch_solr78_bytecode.py
- name: Upload build artifacts
uses: actions/upload-artifact@v3
with:
java-version: 11
server-id: ossrh-snapshots
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- uses: actions/cache@v2
env:
cache-name: cache-maven-artifacts
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- name: Install XML utils
run: sudo apt update && sudo apt install libxml2-utils
- name: Set project version
run: echo "PROJECT_VERSION=$(xmllint --xpath '/*[local-name()="project"]/*[local-name()="version"]/text()' pom.xml)" >> $GITHUB_ENV
- name: Publish to the Maven Central Repository
run: if [[ "$PROJECT_VERSION" =~ .*SNAPSHOT ]]; then mvn -B deploy; fi
env:
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
publish_mkdocs:
name: build-artifacts
path: target/*.jar

integration_tests:
if: github.event_name == 'push' && contains(github.ref, 'main')
runs-on: ubuntu-latest
needs: [build, publish]
needs: build
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
- uses: actions/download-artifact@v3
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install mkdocs singledispatch
- name: Mkdocs deploy
run: mkdocs gh-deploy --force
name: build-artifacts
- name: Run integration tests
run: ./integration-tests/run.sh
73 changes: 38 additions & 35 deletions integration-tests/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,6 @@ wait_for_solr() {
set -e
}

create_solr78_jar() {
solr9_jar="$(ls ../target/*.jar |egrep -v '(javadoc|sources|original)' |head -n 1)"
solr78_jar=$SOLR78_PLUGIN_PATH/$(basename $solr9_jar)
mkdir -p $SOLR78_PLUGIN_PATH
python3 ../util/patch_solr78_bytecode.py $solr9_jar $solr78_jar
}

# Make sure we're in the test directory
cd $SCRIPT_DIR

Expand All @@ -36,21 +29,28 @@ if [ ! -d "../target" ]; then
exit 1
fi

create_solr78_jar
solr9_jar="$(ls ../target/*.jar |egrep -v '(javadoc|original|source|solr78)')"
solr78_jar="$(ls ../target/*.jar |egrep 'solr78.jar')"
if [ -z "$solr78_jar" ]; then
echo "No solr78 jar found in ../target, please run 'util/patch_solr78_bytecode.py' in the parent directory first!"
exit 1
fi

plugin_dir="$(mktemp -d)"
cp $solr9_jar "$plugin_dir"
for version in $SOLR9_VERSIONS; do
printf "Testing $version: "
container_name="ocrhltest-$version"
docker run \
--name "$container_name" \
-e SOLR_LOG_LEVEL=ERROR \
-v "$(pwd)/solr/install-plugin.sh:/docker-entrypoint-initdb.d/install-plugin.sh" \
-v "$(pwd)/solr/core/v9:/opt/core-config" \
-v "$(pwd)/data:/ocr-data" \
-v "$(realpath ..)/target:/build" \
-p "31337:8983" \
solr:$version \
solr-precreate ocr /opt/core-config > /dev/null 2>&1 & \
--name "$container_name" \
-e SOLR_LOG_LEVEL=ERROR \
-v "$(pwd)/solr/install-plugin.sh:/docker-entrypoint-initdb.d/install-plugin.sh" \
-v "$(pwd)/solr/core/v9:/opt/core-config" \
-v "$(pwd)/data:/ocr-data" \
-v "$plugin_dir:/build" \
-p "31337:8983" \
solr:$version \
solr-precreate ocr /opt/core-config & > /dev/null 2>&1 & \
wait_for_solr "$container_name"
if ! python3 test.py; then
printf " !!!FAIL!!!\n"
Expand All @@ -62,20 +62,22 @@ for version in $SOLR9_VERSIONS; do
docker rm "$container_name" > /dev/null
done

rm -rf "$plugin_dir"/*.jar
cp $solr78_jar "$plugin_dir"
# Solr 8 versions, use a different plugin JAR
for version in $SOLR8_VERSIONS; do
printf "Testing $version: "
container_name="ocrhltest-$version"
docker run \
--name "$container_name" \
-e SOLR_LOG_LEVEL=ERROR \
-v "$(pwd)/solr/install-plugin.sh:/docker-entrypoint-initdb.d/install-plugin.sh" \
-v "$(pwd)/solr/core/v8:/opt/core-config" \
-v "$(pwd)/data:/ocr-data" \
-v "$SOLR78_PLUGIN_PATH:/build" \
-p "31337:8983" \
solr:$version \
solr-precreate ocr /opt/core-config > /dev/null 2>&1 & \
--name "$container_name" \
-e SOLR_LOG_LEVEL=ERROR \
-v "$(pwd)/solr/install-plugin.sh:/docker-entrypoint-initdb.d/install-plugin.sh" \
-v "$(pwd)/solr/core/v8:/opt/core-config" \
-v "$(pwd)/data:/ocr-data" \
-v "$plugin_dir:/build" \
-p "31337:8983" \
solr:$version \
solr-precreate ocr /opt/core-config > /dev/null 2>&1 & \
wait_for_solr "$container_name"
if ! python3 test.py; then
printf " !!!FAIL!!!\n"
Expand All @@ -91,15 +93,15 @@ done
for version in $SOLR7_VERSIONS; do
printf "Testing $version: "
docker run \
--name "ocrhltest-$version" \
-e SOLR_LOG_LEVEL=ERROR \
-v "$(pwd)/solr/install-plugin-v7.sh:/docker-entrypoint-initdb.d/install-plugin-v7.sh" \
-v "$(pwd)/solr/core/v7:/opt/core-config" \
-v "$(pwd)/data:/ocr-data" \
-v "$SOLR78_PLUGIN_PATH:/build" \
-p "31337:8983" \
solr:$version \
solr-precreate ocr /opt/core-config > /dev/null 2>&1 & \
--name "ocrhltest-$version" \
-e SOLR_LOG_LEVEL=ERROR \
-v "$(pwd)/solr/install-plugin-v7.sh:/docker-entrypoint-initdb.d/install-plugin-v7.sh" \
-v "$(pwd)/solr/core/v7:/opt/core-config" \
-v "$(pwd)/data:/ocr-data" \
-v "$plugin_dir:/build" \
-p "31337:8983" \
solr:$version \
solr-precreate ocr /opt/core-config > /dev/null 2>&1 & \
wait_for_solr "$container_name"
if ! python3 test.py; then
printf " !!!FAIL!!!\n"
Expand All @@ -114,3 +116,4 @@ done
rm -rf /tmp/solrocr-solr78

echo "INTEGRATION TESTS SUCCEEDED"
rm -rf "$plugin_dir"
42 changes: 30 additions & 12 deletions util/patch_solr78_bytecode.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@
Building two JARs from one pom without a ton of ceremony seems to be
out of scope for it, so this manual approach was chosen instead.
"""
import os
import struct
import sys
import zipfile
from pathlib import Path
from typing import BinaryIO
from typing import BinaryIO, Callable, Dict, Union


def _get_utf8_size(entry: bytes) -> int:
Expand All @@ -20,7 +21,7 @@ def _get_utf8_size(entry: bytes) -> int:

#: Mapping from tags used in the constant pool to the size their
#: associated information takes up in the pool
CONSTANT_POOL_SIZES = {
CONSTANT_POOL_SIZES: Dict[int, Union[int, Callable[[bytes], int]]] = {
7: 3, # classinfo(u1 tag, u2 name_idx),
9: 5, # fieldref(u1 tag, u2 class_idx, u2 name_and_type_idx)
10: 5, # methodref(u1 tag, u2 class_idx, u2 name_and_type_idx)
Expand Down Expand Up @@ -95,16 +96,17 @@ def patch_close_hook(bytecode: bytes) -> bytes:
if buf[target_idx] == 7: # reference to java.lang.Object
buf[target_idx] = 8 # change to CloseHook
# Update the constant pool size
if isinstance(CONSTANT_POOL_SIZES[tag], int):
constant_pool_size += CONSTANT_POOL_SIZES[tag]
pool_size = CONSTANT_POOL_SIZES[tag]
if isinstance(pool_size, int):
constant_pool_size += pool_size
else:
constant_pool_size += CONSTANT_POOL_SIZES[tag](
constant_pool_size += pool_size(
buf[constant_pool_start + constant_pool_size :]
)
after_pool_idx = constant_pool_start + constant_pool_size
iface_count = struct.unpack_from(">H", buf, after_pool_idx + 6)[0]
if iface_count != 1:
return bytes
return bytes(buf)

# Start of interface pointers
ifaces_idx = after_pool_idx + 8
Expand Down Expand Up @@ -133,18 +135,34 @@ def patch_jar(source_path: Path, target: BinaryIO) -> None:


if __name__ == "__main__":
if len(sys.argv) < 2:
if len(sys.argv) == 1:
build_path = Path(__file__).parent / "../target"
source_path = next(
p
for p in build_path.iterdir()
if p.name.endswith(".jar")
and p.name.startswith("solr-ocrhighlighting")
and not p.stem.split("-")[-1] in ("javadoc", "sources", "solr78")
).resolve()
elif sys.argv[1] in ("-h", "--help"):
print(__doc__)
print("Usage:")
print("$ patch_solr78_bytecode.py <source_jar> [<target_jar>]", file=sys.stderr)
print(
"$ patch_solr78_bytecode.py [<source_jar>] [<target_jar>]", file=sys.stderr
)
sys.exit(1)
source_path = Path(sys.argv[1])
else:
source_path = Path(sys.argv[1])
if not source_path.exists():
print(f"File at {source_path} does not exist!")
sys.exit(1)
if len(sys.argv) == 3:
target_path = Path(sys.argv[2])
with target_path.open("wb") as fp:
patch_jar(source_path, fp)
else:
patch_jar(source_path, sys.stdout.buffer)
target_path = (source_path.parent / f"{source_path.stem}-solr78.jar").resolve()

print(
f"Patching '{source_path.relative_to(os.getcwd())}', writing output to '{target_path.relative_to(os.getcwd())}'"
)
with target_path.open("wb") as fp:
patch_jar(source_path, fp)

0 comments on commit 7cb6d1b

Please sign in to comment.