diff --git a/.docker-compose/docker-compose.h2.dev.yaml b/.docker-compose/docker-compose.h2.dev.yaml
new file mode 100644
index 0000000..7051730
--- /dev/null
+++ b/.docker-compose/docker-compose.h2.dev.yaml
@@ -0,0 +1,61 @@
+version: '3'
+
+services:
+
+ fits:
+ image: artourkin/fits-web:main
+ container_name: fits
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 8081:8080
+
+
+ rest:
+ container_name: rest
+ build:
+ context: ..
+ dockerfile: ../Dockerfile
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ deploy:
+ replicas: 1
+ ports:
+ - 8082:8080
+ depends_on:
+ - fits
+ - db-docker
+
+
+ web:
+ build:
+ context: ..
+ dockerfile: ../web/Dockerfile
+ container_name: web
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 8080:3000
+
+
+ db-docker:
+ image: oscarfonts/h2
+ container_name: db-docker
+ env_file: ../.env
+ environment:
+ - H2_OPTIONS=-ifNotExists
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 1521:1521
+ - 81:81
+
+networks:
+ web:
\ No newline at end of file
diff --git a/.docker-compose/docker-compose.mysql.cluster.yaml b/.docker-compose/docker-compose.mysql.cluster.yaml
new file mode 100644
index 0000000..6aae85a
--- /dev/null
+++ b/.docker-compose/docker-compose.mysql.cluster.yaml
@@ -0,0 +1,179 @@
+version: '3'
+
+services:
+
+ fits:
+ build:
+ context: ..
+ dockerfile: ../fits/Dockerfile
+ container_name: fits
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 8081:8080
+
+ rest:
+ build:
+ context: ..
+ dockerfile: ../Dockerfile
+ env_file: ../.env
+ environment:
+ - SPRING_DATASOURCE_URL=jdbc:mysql://mysql-router:6446/fitsinn
+ - DB_SELECTOR=mysql
+ networks:
+ - web
+ restart: unless-stopped
+ deploy:
+ replicas: 3
+ depends_on:
+ - fits
+ - mysql-router
+
+ web:
+ build:
+ context: ..
+ dockerfile: ../web/Dockerfile.dev
+ container_name: web
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ depends_on:
+ - rest
+ ports:
+ - 8080:3000
+
+
+ adminer:
+ image: adminer
+ container_name: adminer
+ env_file: ../.env
+ restart: unless-stopped
+ networks:
+ - web
+ ports:
+ - 8090:8080
+
+ mysql-server-1:
+ container_name: mysql-server-1
+ env_file:
+ - ../config/mysql-cluster/mysql-server.env
+ image: mysql/mysql-server:8.0.12
+ networks:
+ - web
+ command:
+ [
+ "mysqld",
+ "--server_id=1",
+ "--binlog_checksum=NONE",
+ "--gtid_mode=ON",
+ "--enforce_gtid_consistency=ON",
+ "--log_bin",
+ "--log_slave_updates=ON",
+ "--master_info_repository=TABLE",
+ "--relay_log_info_repository=TABLE",
+ "--transaction_write_set_extraction=XXHASH64",
+ "--user=mysql",
+ "--skip-host-cache",
+ "--skip-name-resolve",
+ "--default_authentication_plugin=mysql_native_password",
+ ]
+
+ mysql-server-2:
+ container_name: mysql-server-2
+ env_file:
+ - ../config/mysql-cluster/mysql-server.env
+ image: mysql/mysql-server:8.0.12
+ networks:
+ - web
+ command:
+ [
+ "mysqld",
+ "--server_id=2",
+ "--binlog_checksum=NONE",
+ "--gtid_mode=ON",
+ "--enforce_gtid_consistency=ON",
+ "--log_bin",
+ "--log_slave_updates=ON",
+ "--master_info_repository=TABLE",
+ "--relay_log_info_repository=TABLE",
+ "--transaction_write_set_extraction=XXHASH64",
+ "--user=mysql",
+ "--skip-host-cache",
+ "--skip-name-resolve",
+ "--default_authentication_plugin=mysql_native_password",
+ ]
+
+
+ mysql-server-3:
+ container_name: mysql-server-3
+ env_file:
+ - ../config/mysql-cluster/mysql-server.env
+ image: mysql/mysql-server:8.0.12
+ networks:
+ - web
+ command:
+ [
+ "mysqld",
+ "--server_id=3",
+ "--binlog_checksum=NONE",
+ "--gtid_mode=ON",
+ "--enforce_gtid_consistency=ON",
+ "--log_bin",
+ "--log_slave_updates=ON",
+ "--master_info_repository=TABLE",
+ "--relay_log_info_repository=TABLE",
+ "--transaction_write_set_extraction=XXHASH64",
+ "--user=mysql",
+ "--skip-host-cache",
+ "--skip-name-resolve",
+ "--default_authentication_plugin=mysql_native_password",
+ ]
+
+ mysql-shell:
+ container_name: mysql-shell
+ env_file:
+ - ../config/mysql-cluster/mysql-shell.env
+ image: neumayer/mysql-shell-batch
+ networks:
+ - web
+ volumes:
+ - ./mysql-cluster/scripts/:/scripts/
+ depends_on:
+ - mysql-server-1
+ - mysql-server-2
+ - mysql-server-3
+
+ mysql-router:
+ container_name: mysql-router
+ env_file:
+ - ../config/mysql-cluster/mysql-router.env
+ image: mysql/mysql-router:8.0
+ networks:
+ - web
+ ports:
+ - 3306:6446
+ depends_on:
+ - mysql-server-1
+ - mysql-server-2
+ - mysql-server-3
+ - mysql-shell
+ restart: on-failure
+
+ nginx:
+ image: nginx
+ container_name: nginx
+ env_file: ../.env
+ volumes:
+ - ./config/nginx/nginx.conf:/etc/nginx/conf.d/default.conf
+ ports:
+ - 8082:80
+ networks:
+ - web
+ depends_on:
+ - rest
+
+networks:
+ web:
\ No newline at end of file
diff --git a/.docker-compose/docker-compose.mysql.dev.yaml b/.docker-compose/docker-compose.mysql.dev.yaml
new file mode 100644
index 0000000..dda1c5a
--- /dev/null
+++ b/.docker-compose/docker-compose.mysql.dev.yaml
@@ -0,0 +1,89 @@
+version: '3'
+
+services:
+
+ fits:
+ build:
+ context: ..
+ dockerfile: ../fits/Dockerfile
+ container_name: fits
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 8081:8080
+
+ rest:
+ build:
+ context: ..
+ dockerfile: ../Dockerfile
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ environment:
+ - LOGGING_LEVEL_ORG_HIBERNATE_SQL=DEBUG
+ - SPRING_JPA_SHOW_SQL=true
+ - DB_SELECTOR=mysql
+ deploy:
+ replicas: 1
+ ports:
+ - 8092:8080
+ depends_on:
+ - fits
+ - db-docker
+
+ web:
+ build:
+ context: ..
+ dockerfile: ../web/Dockerfile
+ container_name: web
+ env_file: ../.env
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 8080:3000
+
+ db-docker:
+ image: mysql:8.0
+ container_name: db-docker
+ env_file: ../.env
+ environment:
+ MYSQL_DATABASE: fitsinn
+ MYSQL_USER: user
+ MYSQL_PASSWORD: pass
+ MYSQL_ROOT_PASSWORD: pass
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 3306:3306
+
+
+ adminer:
+ image: adminer
+ container_name: adminer
+ env_file: ../.env
+ restart: unless-stopped
+ networks:
+ - web
+ ports:
+ - 8090:8080
+
+ nginx:
+ image: nginx
+ container_name: nginx
+ env_file: ../.env
+ volumes:
+ - ./config/nginx/nginx.conf:/etc/nginx/conf.d/default.conf
+ ports:
+ - 8082:80
+ networks:
+ - web
+ depends_on:
+ - rest
+
+networks:
+ web:
\ No newline at end of file
diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml
index 374a790..7366d98 100644
--- a/.github/workflows/docker-image.yml
+++ b/.github/workflows/docker-image.yml
@@ -3,6 +3,8 @@ name: Docker Image CI
on:
push:
branches: [main, dev, release/*]
+ tags:
+ - 'v*'
pull_request:
branches: [main]
@@ -29,13 +31,33 @@ jobs:
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
+
+ - name: Extract Git metadata
+ id: vars
+ run: |
+ echo "GIT_SHA_SHORT=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
+ echo "BRANCH_NAME=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> "$GITHUB_ENV"
+ echo "GIT_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV"
+
+ - name: Print GitHub ref and other variables
+ run: |
+ echo "GitHub ref: $GITHUB_REF"
+ echo "Git tag: $GIT_TAG"
+ echo "Branch name: $BRANCH_NAME"
+ echo "Commit SHA: $GIT_SHA_SHORT"
+
- name: Build and push FITSInn REST
- uses: docker/build-push-action@v5
- with:
- context: .
- platforms: linux/amd64,linux/arm64
- push: true
- tags: artourkin/fitsinn-rest:${{ github.ref_name }}
+ run: |
+ IMAGE_NAME=artourkin/fitsinn-rest
+ CONTEXT=.
+ if [ "${{ github.ref }}" == "refs/heads/main" ]; then
+ docker buildx build --push --tag $IMAGE_NAME:latest $CONTEXT
+ elif [[ "${{ github.ref }}" == refs/tags/* ]]; then
+ docker buildx build --push --tag $IMAGE_NAME:$GIT_TAG $CONTEXT
+ else
+ docker buildx build --push --tag $IMAGE_NAME:$BRANCH_NAME-$GIT_SHA_SHORT $CONTEXT
+ fi
+
- name: Cache node modules
id: cache-npm
uses: actions/cache@v3
@@ -53,17 +75,29 @@ jobs:
name: List the state of node modules
continue-on-error: true
run: npm list
+
- name: Build and push FITSInn WEB
- uses: docker/build-push-action@v5
- with:
- file: ./web/Dockerfile
- platforms: linux/amd64,linux/arm64
- push: true
- tags: artourkin/fitsinn-web:${{ github.ref_name }}
+ run: |
+ IMAGE_NAME=artourkin/fitsinn-web
+ CONTEXT=./web
+ if [ "${{ github.ref }}" == "refs/heads/main" ]; then
+ docker buildx build --push --tag $IMAGE_NAME:latest $CONTEXT
+ elif [[ "${{ github.ref }}" == refs/tags/* ]]; then
+ docker buildx build --push --tag $IMAGE_NAME:$GIT_TAG $CONTEXT
+ else
+ docker buildx build --push --tag $IMAGE_NAME:$BRANCH_NAME-$GIT_SHA_SHORT $CONTEXT
+ fi
+
- name: Build and push FITS WEB
- uses: docker/build-push-action@v5
- with:
- file: ./fits/Dockerfile
- platforms: linux/amd64,linux/arm64/v8
- push: true
- tags: artourkin/fits-web:${{ github.ref_name }}
+ run: |
+ IMAGE_NAME=artourkin/fits-web
+ CONTEXT=./fits
+ if [ "${{ github.ref }}" == "refs/heads/main" ]; then
+ docker buildx build --push --tag $IMAGE_NAME:latest $CONTEXT
+ elif [[ "${{ github.ref }}" == refs/tags/* ]]; then
+ docker buildx build --push --tag $IMAGE_NAME:$GIT_TAG $CONTEXT
+ else
+ docker buildx build --push --tag $IMAGE_NAME:$BRANCH_NAME-$GIT_SHA_SHORT $CONTEXT
+ fi
+
+
diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar
new file mode 100644
index 0000000..cb28b0e
Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
new file mode 100644
index 0000000..7d02699
--- /dev/null
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.7/apache-maven-3.8.7-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar
diff --git a/Dockerfile b/Dockerfile
index 4bde26b..7270632 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,7 +1,7 @@
FROM maven:3.9.0 as builder
COPY . /app
WORKDIR /app
-RUN mvn -pl !web clean install -Pprod
+RUN --mount=type=cache,target=/root/.m2 mvn -pl -web clean install
FROM openjdk:21-jdk-slim
WORKDIR /app
diff --git a/Dockerfile.dev b/Dockerfile.dev
new file mode 100644
index 0000000..d8d13ab
--- /dev/null
+++ b/Dockerfile.dev
@@ -0,0 +1,16 @@
+FROM maven:3.9.0 as builder
+COPY . /app
+WORKDIR /app
+RUN --mount=type=cache,target=/root/.m2 mvn -pl -web clean install -DskipTests
+
+FROM openjdk:21-jdk-slim
+WORKDIR /app
+RUN printenv
+COPY --from=builder /app/main/target/fitsinn-main-*.jar ./app.jar
+
+RUN chown 1001 ./app.jar \
+ && chmod "g+rwX" ./app.jar
+
+USER 1001
+EXPOSE 8080
+ENTRYPOINT ["java", "-jar", "app.jar"]
diff --git a/README.md b/README.md
index b653042..6eb7969 100644
--- a/README.md
+++ b/README.md
@@ -34,15 +34,20 @@ Installation of FITSInn to Docker Swarm or K8S is possible, but is not currently
### Local build
-Building the Docker images from scratch and starting FITSInn is executed via:
-
+Building the Docker images from scratch and starting FITSInn is executed via:
```
docker-compose -f docker-compose.dev.yaml up --build
```
File uploading using bash:
+```
+bash ./utils/fileupload.sh http://localhost:8082 ~/rnd/data/govdocs_fits/govdocs1/000/
+```
-bash fileupload.sh http://localhost:8082 ~/rnd/data/govdocs_fits/govdocs1/000/
+File uploading using python (pip package requests in necessary):
+```
+python ./utils/fileupload.py http://localhost:8082/multipleupload ~/rnd/data/govdocs_fits/govdocs1/000/ 100 3
+```
## Issues
diff --git a/config/clickhouse/config.xml b/config/clickhouse/config.xml
new file mode 100644
index 0000000..6c4a403
--- /dev/null
+++ b/config/clickhouse/config.xml
@@ -0,0 +1,1038 @@
+
+
+
+
+
+ trace
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+
+ 1000M
+ 10
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 8123
+
+
+ 9000
+
+
+ 9004
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 9009
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4096
+
+
+ 3
+
+
+
+
+ false
+
+
+ /path/to/ssl_cert_file
+ /path/to/ssl_key_file
+
+
+ false
+
+
+ /path/to/ssl_ca_cert_file
+
+
+ deflate
+
+
+ medium
+
+
+ -1
+ -1
+
+
+ false
+
+
+
+
+
+
+ /etc/clickhouse-server/server.crt
+ /etc/clickhouse-server/server.key
+
+ /etc/clickhouse-server/dhparam.pem
+ none
+ true
+ true
+ sslv2,sslv3
+ true
+
+
+
+ true
+ true
+ sslv2,sslv3
+ true
+
+
+
+ RejectCertificateHandler
+
+
+
+
+
+
+
+
+ 100
+
+
+ 0
+
+
+
+ 10000
+
+
+ 0.9
+
+
+ 4194304
+
+
+ 0
+
+
+
+
+
+ 8589934592
+
+
+ 5368709120
+
+
+
+ /var/lib/clickhouse/
+
+
+ /var/lib/clickhouse/tmp/
+
+
+
+
+
+ /var/lib/clickhouse/user_files/
+
+
+
+
+
+
+
+
+
+
+ users.xml
+
+
+
+ /var/lib/clickhouse/access/
+
+
+
+
+
+
+ default
+
+
+
+
+
+
+
+
+
+
+
+ default
+
+
+
+
+
+
+
+
+ true
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ localhost
+ 9000
+
+
+
+
+
+
+
+
+ localhost
+ 9000
+
+
+
+
+ localhost
+ 9000
+
+
+
+
+
+
+ 127.0.0.1
+ 9000
+
+
+
+
+ 127.0.0.2
+ 9000
+
+
+
+
+
+ true
+
+ 127.0.0.1
+ 9000
+
+
+
+ true
+
+ 127.0.0.2
+ 9000
+
+
+
+
+
+
+ localhost
+ 9440
+ 1
+
+
+
+
+
+
+ localhost
+ 9000
+
+
+
+
+ localhost
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 3600
+
+
+
+ 3600
+
+
+ 60
+
+
+
+
+
+
+
+
+
+
+
+
+ system
+
+
+ toYYYYMM(event_date)
+
+
+
+
+
+ 7500
+
+
+
+
+ system
+
+
+ toYYYYMM(event_date)
+ 7500
+
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 7500
+
+
+
+
+
+
+
+
+ system
+
+ 7500
+ 1000
+
+
+
+
+ system
+
+
+ 60000
+
+
+
+
+
+
+ engine MergeTree
+ partition by toYYYYMM(finish_date)
+ order by (finish_date, finish_time_us, trace_id)
+
+ system
+
+ 7500
+
+
+
+
+
+ system
+
+
+
+ 1000
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ *_dictionary.xml
+
+
+
+
+
+
+
+ /clickhouse/task_queue/ddl
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ click_cost
+ any
+
+ 0
+ 3600
+
+
+ 86400
+ 60
+
+
+
+ max
+
+ 0
+ 60
+
+
+ 3600
+ 300
+
+
+ 86400
+ 3600
+
+
+
+
+
+ /var/lib/clickhouse/format_schemas/
+
+
+
+
+ hide encrypt/decrypt arguments
+ ((?:aes_)?(?:encrypt|decrypt)(?:_mysql)?)\s*\(\s*(?:'(?:\\'|.)+'|.*?)\s*\)
+
+ \1(???)
+
+
+
+
+
+
+
+
+
+ false
+
+ false
+
+
+ https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277
+
+
+
+
+
\ No newline at end of file
diff --git a/config/clickhouse/initdb.sql b/config/clickhouse/initdb.sql
new file mode 100644
index 0000000..d97dce4
--- /dev/null
+++ b/config/clickhouse/initdb.sql
@@ -0,0 +1,10 @@
+CREATE TABLE characterisationresult
+(
+ file_path String,
+ property String,
+ source String,
+ property_value String,
+ value_type String
+) ENGINE = ReplacingMergeTree
+ PRIMARY KEY (source, property, file_path)
+ ORDER BY (source, property, file_path);
diff --git a/config/clickhouse/users.xml b/config/clickhouse/users.xml
new file mode 100644
index 0000000..2c5b9f7
--- /dev/null
+++ b/config/clickhouse/users.xml
@@ -0,0 +1,58 @@
+
+
+
+
+
+
+
+ 10000000000
+
+
+ random
+
+
+
+
+ 1
+
+
+
+
+
+
+ 4acfe3202a5ff5cf467898fc58aab1d615029441
+
+ ::/0
+
+ default
+ default
+ 1
+
+
+
+
+
+
+
+
+
+
+ 3600
+
+
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
\ No newline at end of file
diff --git a/config/mysql-cluster/mysql-router.env b/config/mysql-cluster/mysql-router.env
new file mode 100644
index 0000000..7118d81
--- /dev/null
+++ b/config/mysql-cluster/mysql-router.env
@@ -0,0 +1,7 @@
+MYSQL_HOST=mysql-server-1
+MYSQL_PORT=3306
+
+MYSQL_USER=root
+MYSQL_PASSWORD=mysql
+
+MYSQL_INNODB_NUM_MEMBERS=3
diff --git a/config/mysql-cluster/mysql-server.env b/config/mysql-cluster/mysql-server.env
new file mode 100644
index 0000000..a3691cd
--- /dev/null
+++ b/config/mysql-cluster/mysql-server.env
@@ -0,0 +1,2 @@
+MYSQL_ROOT_PASSWORD=mysql
+MYSQL_ROOT_HOST=%
\ No newline at end of file
diff --git a/config/mysql-cluster/mysql-shell.env b/config/mysql-cluster/mysql-shell.env
new file mode 100644
index 0000000..992ec3c
--- /dev/null
+++ b/config/mysql-cluster/mysql-shell.env
@@ -0,0 +1,6 @@
+MYSQL_USER=root
+MYSQL_HOST=mysql-server-1
+MYSQL_PORT=3306
+MYSQL_PASSWORD=mysql
+MYSQLSH_SCRIPT=/scripts/setupCluster.js
+MYSQL_SCRIPT=/scripts/db.sql
diff --git a/config/mysql-cluster/scripts/db.sql b/config/mysql-cluster/scripts/db.sql
new file mode 100644
index 0000000..4ae6754
--- /dev/null
+++ b/config/mysql-cluster/scripts/db.sql
@@ -0,0 +1,3 @@
+CREATE DATABASE IF NOT EXISTS fitsinn;
+CREATE USER IF NOT EXISTS 'user'@'%' IDENTIFIED BY 'pass';
+GRANT ALL PRIVILEGES ON fitsinn.* TO 'user'@'%';
diff --git a/config/mysql-cluster/scripts/setupCluster.js b/config/mysql-cluster/scripts/setupCluster.js
new file mode 100644
index 0000000..23f5559
--- /dev/null
+++ b/config/mysql-cluster/scripts/setupCluster.js
@@ -0,0 +1,16 @@
+var dbPass = "mysql"
+var clusterName = "devCluster"
+
+try {
+ print('Setting up InnoDB cluster...\n');
+ shell.connect('root@mysql-server-1:3306', dbPass)
+ var cluster = dba.createCluster(clusterName);
+ print('Adding instances to the cluster.');
+ cluster.addInstance({user: "root", host: "mysql-server-2", password: dbPass})
+ print('.');
+ cluster.addInstance({user: "root", host: "mysql-server-3", password: dbPass})
+ print('.\nInstances successfully added to the cluster.');
+ print('\nInnoDB cluster deployed successfully.\n');
+} catch(e) {
+ print('\nThe InnoDB cluster could not be created.\n\nError: ' + e.message + '\n');
+}
diff --git a/config/nginx/nginx.conf b/config/nginx/nginx.conf
new file mode 100644
index 0000000..9222f1c
--- /dev/null
+++ b/config/nginx/nginx.conf
@@ -0,0 +1,14 @@
+upstream backend {
+ server rest:8080;
+}
+
+server {
+ listen 80;
+
+ location / {
+ proxy_pass http://backend/;
+ proxy_request_buffering off;
+ proxy_http_version 1.1;
+ client_max_body_size 0;
+ }
+}
\ No newline at end of file
diff --git a/core/src/main/java/rocks/artur/api/AnalyzePersistFile.java b/core/src/main/java/rocks/artur/api/AnalyzePersistFile.java
index 86f9988..2bd9ea4 100644
--- a/core/src/main/java/rocks/artur/api/AnalyzePersistFile.java
+++ b/core/src/main/java/rocks/artur/api/AnalyzePersistFile.java
@@ -1,6 +1,8 @@
package rocks.artur.api;
-import java.io.File;
+import rocks.artur.api_impl.utils.ByteFile;
+
+import java.util.List;
/**
* This interface enables the following actions:
@@ -8,6 +10,9 @@
* - to persist a characterisation result in a db.
*/
public interface AnalyzePersistFile {
- Long uploadCharacterisationResults(File file);
- Long uploadCharacterisationResults(byte[] file, String filename);
+
+ Long uploadCharacterisationResults(ByteFile file, String datasetName);
+
+ Long uploadCharacterisationResults(List files, String datasetName);
+
}
diff --git a/core/src/main/java/rocks/artur/api/CharacterisationResultProducer.java b/core/src/main/java/rocks/artur/api/CharacterisationResultProducer.java
index 4a0353b..d998ba4 100644
--- a/core/src/main/java/rocks/artur/api/CharacterisationResultProducer.java
+++ b/core/src/main/java/rocks/artur/api/CharacterisationResultProducer.java
@@ -1,5 +1,6 @@
package rocks.artur.api;
+import rocks.artur.api_impl.utils.ByteFile;
import rocks.artur.domain.CharacterisationResult;
import java.io.File;
@@ -15,7 +16,7 @@ public interface CharacterisationResultProducer {
* @return A version of the tool
* @throws IOException
*/
- String getVersion() throws IOException;
+ String getVersion();
/***
*
@@ -25,7 +26,7 @@ public interface CharacterisationResultProducer {
* @return A list of @CharacterisationResult
* @throws IOException
*/
- List processFile(File file) throws IOException;
+ List processFile(File file);
/***
@@ -33,9 +34,8 @@ public interface CharacterisationResultProducer {
* This method extracts metadata properties from a given digital object passed as a byte array.
*
* @param file Input File
- * @param filename
* @return A list of @CharacterisationResult
* @throws IOException
*/
- List processFile(byte[] file, String filename) throws IOException;
+ List processFile(ByteFile file);
}
\ No newline at end of file
diff --git a/core/src/main/java/rocks/artur/api/GetCollectionStatistics.java b/core/src/main/java/rocks/artur/api/GetCollectionStatistics.java
index 4ee5ae3..67719cd 100644
--- a/core/src/main/java/rocks/artur/api/GetCollectionStatistics.java
+++ b/core/src/main/java/rocks/artur/api/GetCollectionStatistics.java
@@ -5,6 +5,6 @@
import java.util.Map;
public interface GetCollectionStatistics {
- Map getStatistics(FilterCriteria filterCriteria);
+ Map getStatistics(FilterCriteria filterCriteria, String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api/GetDatasetInfo.java b/core/src/main/java/rocks/artur/api/GetDatasetInfo.java
new file mode 100644
index 0000000..be38089
--- /dev/null
+++ b/core/src/main/java/rocks/artur/api/GetDatasetInfo.java
@@ -0,0 +1,7 @@
+package rocks.artur.api;
+
+import java.util.List;
+
+public interface GetDatasetInfo {
+ List listDatasets();
+}
diff --git a/core/src/main/java/rocks/artur/api/GetObjects.java b/core/src/main/java/rocks/artur/api/GetObjects.java
index a6732f8..0adef3b 100644
--- a/core/src/main/java/rocks/artur/api/GetObjects.java
+++ b/core/src/main/java/rocks/artur/api/GetObjects.java
@@ -10,8 +10,8 @@
* This interface enables getting characterisation results.
*/
public interface GetObjects {
- List getObjects(FilterCriteria filterCriteria);
- Iterable getObject(String filePath);
+ List getObjects(FilterCriteria filterCriteria, String datasetName);
+ Iterable getObject(String filePath, String datasetName);
- List getConflictsFromObject(String filePath);
+ List getConflictsFromObject(String filePath, String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api/GetProperties.java b/core/src/main/java/rocks/artur/api/GetProperties.java
index b371c5e..2c6239c 100644
--- a/core/src/main/java/rocks/artur/api/GetProperties.java
+++ b/core/src/main/java/rocks/artur/api/GetProperties.java
@@ -9,6 +9,6 @@
* This interface enables getting a property distribution.
*/
public interface GetProperties {
- List getProperties();
- List getProperties(FilterCriteria filter);
+ List getProperties(String datasetName);
+ List getProperties(FilterCriteria filter, String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api/GetPropertyValueDistribution.java b/core/src/main/java/rocks/artur/api/GetPropertyValueDistribution.java
index 54733bb..01566ba 100644
--- a/core/src/main/java/rocks/artur/api/GetPropertyValueDistribution.java
+++ b/core/src/main/java/rocks/artur/api/GetPropertyValueDistribution.java
@@ -11,5 +11,5 @@
* This interface enables getting a property value distribution given a property name.
*/
public interface GetPropertyValueDistribution {
- List getPropertyValueDistribution(Property propertyName, FilterCriteria filterCriteria);
+ List getPropertyValueDistribution(Property propertyName, FilterCriteria filterCriteria, String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api/GetPropertyValueDistributionWithFilter.java b/core/src/main/java/rocks/artur/api/GetPropertyValueDistributionWithFilter.java
index 1b1ebac..17b5173 100644
--- a/core/src/main/java/rocks/artur/api/GetPropertyValueDistributionWithFilter.java
+++ b/core/src/main/java/rocks/artur/api/GetPropertyValueDistributionWithFilter.java
@@ -8,5 +8,5 @@
* This interface enables getting a property value distribution given a filter.
*/
public interface GetPropertyValueDistributionWithFilter {
- List getPropertyValueDistributionWithFilter(String propertyName, String filter);
+ List getPropertyValueDistributionWithFilter(String propertyName, String filter, String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api/GetSamples.java b/core/src/main/java/rocks/artur/api/GetSamples.java
index 4cc9506..96a80c2 100644
--- a/core/src/main/java/rocks/artur/api/GetSamples.java
+++ b/core/src/main/java/rocks/artur/api/GetSamples.java
@@ -15,7 +15,7 @@ public interface GetSamples {
void setProperties(List properties);
- Iterable getObjects(FilterCriteria filterCriteria);
+ Iterable getObjects(FilterCriteria filterCriteria, String datasetName);
- List getSamplingInfo(FilterCriteria filterCriteria);
+ List getSamplingInfo(FilterCriteria filterCriteria, String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api/GetSources.java b/core/src/main/java/rocks/artur/api/GetSources.java
index b4b18c6..bd1e5c5 100644
--- a/core/src/main/java/rocks/artur/api/GetSources.java
+++ b/core/src/main/java/rocks/artur/api/GetSources.java
@@ -6,5 +6,5 @@
* This interface enables getting a property distribution.
*/
public interface GetSources {
- List getSources();
+ List getSources(String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api/ResolveConflicts.java b/core/src/main/java/rocks/artur/api/ResolveConflicts.java
index a08ea5c..11126b7 100644
--- a/core/src/main/java/rocks/artur/api/ResolveConflicts.java
+++ b/core/src/main/java/rocks/artur/api/ResolveConflicts.java
@@ -1,5 +1,5 @@
package rocks.artur.api;
public interface ResolveConflicts {
- void run();
+ void run(String datasetName);
}
diff --git a/core/src/main/java/rocks/artur/api_impl/AnalyzePersistFileImpl.java b/core/src/main/java/rocks/artur/api_impl/AnalyzePersistFileImpl.java
index 2399727..36f471a 100644
--- a/core/src/main/java/rocks/artur/api_impl/AnalyzePersistFileImpl.java
+++ b/core/src/main/java/rocks/artur/api_impl/AnalyzePersistFileImpl.java
@@ -2,11 +2,12 @@
import rocks.artur.api.AnalyzePersistFile;
import rocks.artur.api.CharacterisationResultProducer;
+import rocks.artur.api_impl.utils.ByteFile;
import rocks.artur.domain.CharacterisationResult;
import rocks.artur.domain.CharacterisationResultGateway;
-import java.io.File;
-import java.io.IOException;
+
+import java.util.ArrayList;
import java.util.List;
public class AnalyzePersistFileImpl implements AnalyzePersistFile {
@@ -20,24 +21,20 @@ public AnalyzePersistFileImpl(CharacterisationResultProducer characterisationRes
}
@Override
- public Long uploadCharacterisationResults(File file) {
- try {
- List characterisationResults = characterisationResultProducer.processFile(file);
- characterisationResults.forEach(item -> characterisationResultGateway.addCharacterisationResult(item));
- return Long.valueOf(characterisationResults.size());
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
+ public Long uploadCharacterisationResults(ByteFile file, String datasetName) {
+ List characterisationResults = characterisationResultProducer.processFile(file);
+ characterisationResultGateway.addCharacterisationResults(characterisationResults, datasetName);
+ return Long.valueOf(characterisationResults.size());
}
@Override
- public Long uploadCharacterisationResults(byte[] file, String filename) {
- try {
- List characterisationResults = characterisationResultProducer.processFile(file, filename);
- characterisationResultGateway.addCharacterisationResults(characterisationResults);
- return Long.valueOf(characterisationResults.size());
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
+ public Long uploadCharacterisationResults(List files, String datasetName) {
+ List characterisationResults = new ArrayList<>();
+ files.stream().forEach(file -> {
+ List tmp = characterisationResultProducer.processFile(file);
+ characterisationResults.addAll(tmp);
+ });
+ characterisationResultGateway.addCharacterisationResults(characterisationResults, datasetName);
+ return Long.valueOf(characterisationResults.size());
}
}
diff --git a/core/src/main/java/rocks/artur/api_impl/CRH_ResolveConflictsImpl.java b/core/src/main/java/rocks/artur/api_impl/CRH_ResolveConflictsImpl.java
new file mode 100644
index 0000000..ec76608
--- /dev/null
+++ b/core/src/main/java/rocks/artur/api_impl/CRH_ResolveConflictsImpl.java
@@ -0,0 +1,153 @@
+package rocks.artur.api_impl;
+
+import rocks.artur.api.ResolveConflicts;
+import rocks.artur.domain.CharacterisationResult;
+import rocks.artur.domain.CharacterisationResultGateway;
+import rocks.artur.domain.Entry;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+public class CRH_ResolveConflictsImpl {//implements ResolveConflicts {
+
+
+ private CharacterisationResultGateway characterisationResultGateway;
+
+ public CRH_ResolveConflictsImpl(CharacterisationResultGateway characterisationResultGateway) {
+ this.characterisationResultGateway = characterisationResultGateway;
+ }
+
+
+ public void run(String datasetName) {
+ init(datasetName);
+ System.out.println(sourceWeights);
+ //System.out.println("sum of weights: " + sourceWeights.values().stream().reduce(0d, Double::sum));
+ updateTruth(datasetName);
+ System.out.println("sum of weights: " + sourceWeights.values().stream().reduce(0d, Double::sum));
+ //System.out.println(truth);
+ for (int i = 0; i < 3; i++) {
+ updateWeights(datasetName);
+ System.out.println(sourceWeights);
+ System.out.println("sum of weights: " + sourceWeights.values().stream().reduce(0d, Double::sum));
+ updateTruth(datasetName);
+ //System.out.println(truth);
+ }
+
+ resolveConflicts(datasetName);
+ }
+
+ private void resolveConflicts(String datasetName) {
+ truth.entrySet().stream().forEach( entry -> {
+ Entry key = entry.getKey();
+ String value = entry.getValue();
+
+ List characterisationResultsByEntry = characterisationResultGateway.getCharacterisationResultsByEntry(key, datasetName);
+ for (CharacterisationResult characterisationResult : characterisationResultsByEntry) {
+ if (!characterisationResult.getValue().equals(value)) {
+ characterisationResultGateway.delete(characterisationResult, datasetName);
+ }
+ }
+
+
+ });
+ }
+
+ private void updateWeights(String datasetName) {
+ Map score = sources.stream().collect(Collectors.toMap(
+ Function.identity(),
+ s -> 0.0));
+
+ Map count = sources.stream().collect(Collectors.toMap(
+ Function.identity(),
+ s -> 0.0));
+
+
+ List entries = characterisationResultGateway.getEntries(datasetName);
+
+ for (Entry entry : entries) {
+ List characterisationResults = characterisationResultGateway.getCharacterisationResultsByEntry(entry, datasetName);
+
+ for (CharacterisationResult characterisationResult : characterisationResults) {
+
+ String trueValue = truth.get(entry);
+
+ String value = characterisationResult.getValue();
+ String source = characterisationResult.getSource();
+ if (value.equals(trueValue)) {
+ score.put(source, score.getOrDefault(source, 0.0) + 0);
+ } else {
+ score.put(source, score.getOrDefault(source, 0.0) + 1);
+ }
+ count.put(source, count.getOrDefault(source, 0.0) + 1);
+ }
+ }
+ for (String source : score.keySet()) {
+ Double countSource = count.getOrDefault(source, 1.0);
+ if (countSource == 0 ) {
+ score.put(source, 0d);
+ } else {
+ score.put(source, score.get(source) / countSource);
+ }
+ }
+ Double sum = score.values().stream().reduce(0.0, (a, b) -> a + b);
+
+ score.replaceAll((s, v) -> score.get(s) / sum);
+
+ Optional> max = score.entrySet().stream().max(Map.Entry.comparingByValue());
+ if (max.isPresent()) {
+ Double norm_score = max.get().getValue();
+ for (String source : score.keySet()) {
+ double w = score.get(source) / norm_score;
+ Double weig = score.get(source);
+ if (w == 0d) {
+ sourceWeights.put(source,0.00001);
+ } else {
+ sourceWeights.put(source, -Math.log(w));
+ }
+ }
+ }
+ }
+
+ private void updateTruth(String datasetName) {
+ List entries = characterisationResultGateway.getEntries(datasetName);
+ for (Entry entry : entries) {
+ List characterisationResults = characterisationResultGateway.getCharacterisationResultsByEntry(entry, datasetName);
+
+ if (characterisationResults.size() > 0) {
+ CharacterisationResult firstResult = characterisationResults.get(0);
+ Map votingScores = new HashMap<>();
+ for (CharacterisationResult characterisationResult : characterisationResults) {
+ String source = characterisationResult.getSource();
+ Double sourceWeight = sourceWeights.get(source);
+ String value = characterisationResult.getValue();
+
+ votingScores.put(value, votingScores.getOrDefault(value, 0.0) + sourceWeight);
+ }
+ Optional> first = votingScores.entrySet().stream().max(Map.Entry.comparingByValue());
+ if (first.isPresent()) {
+ String trueValue = first.get().getKey();
+ truth.put(entry, trueValue);
+ }
+ }
+ }
+ }
+
+ List sources;
+ Map sourceWeights;
+ Map truth;
+
+ void init(String datasetName) {
+
+ sources = characterisationResultGateway.getSources(datasetName);
+ sourceWeights = sources.stream().collect(Collectors.toMap(
+ Function.identity(),
+ s -> 1.0 / sources.size()));
+ truth = new HashMap<>();
+
+
+ }
+}
diff --git a/core/src/main/java/rocks/artur/api_impl/GetCollectionStatisticsImpl.java b/core/src/main/java/rocks/artur/api_impl/GetCollectionStatisticsImpl.java
index ea97869..10bcd1c 100644
--- a/core/src/main/java/rocks/artur/api_impl/GetCollectionStatisticsImpl.java
+++ b/core/src/main/java/rocks/artur/api_impl/GetCollectionStatisticsImpl.java
@@ -15,8 +15,8 @@ public GetCollectionStatisticsImpl(CharacterisationResultGateway characterisatio
}
@Override
- public Map getStatistics(FilterCriteria filterCriteria) {
- Map sizeStatistics = characterisationResultGateway.getCollectionStatistics(filterCriteria);
+ public Map getStatistics(FilterCriteria filterCriteria, String datasetName) {
+ Map sizeStatistics = characterisationResultGateway.getCollectionStatistics(filterCriteria, datasetName);
return sizeStatistics;
}
diff --git a/core/src/main/java/rocks/artur/api_impl/GetDatasetInfoImpl.java b/core/src/main/java/rocks/artur/api_impl/GetDatasetInfoImpl.java
new file mode 100644
index 0000000..a45f302
--- /dev/null
+++ b/core/src/main/java/rocks/artur/api_impl/GetDatasetInfoImpl.java
@@ -0,0 +1,19 @@
+package rocks.artur.api_impl;
+
+import rocks.artur.api.GetDatasetInfo;
+import rocks.artur.domain.CharacterisationResultGateway;
+
+import java.util.List;
+
+public class GetDatasetInfoImpl implements GetDatasetInfo {
+ private CharacterisationResultGateway characterisationResultGateway;
+
+ public GetDatasetInfoImpl(CharacterisationResultGateway characterisationResultGateway) {
+ this.characterisationResultGateway = characterisationResultGateway;
+ }
+
+ @Override
+ public List listDatasets() {
+ return this.characterisationResultGateway.listDatasets();
+ }
+}
diff --git a/core/src/main/java/rocks/artur/api_impl/GetObjectsImpl.java b/core/src/main/java/rocks/artur/api_impl/GetObjectsImpl.java
index 61d2585..bffbbb2 100644
--- a/core/src/main/java/rocks/artur/api_impl/GetObjectsImpl.java
+++ b/core/src/main/java/rocks/artur/api_impl/GetObjectsImpl.java
@@ -16,20 +16,20 @@ public GetObjectsImpl(CharacterisationResultGateway characterisationResultGatewa
}
@Override
- public List getObjects(FilterCriteria filterCriteria) {
- List objects = characterisationResultGateway.getObjects(filterCriteria);
+ public List getObjects(FilterCriteria filterCriteria, String datasetName) {
+ List objects = characterisationResultGateway.getObjects(filterCriteria, datasetName);
return objects;
}
@Override
- public Iterable getObject(String filePath) {
- Iterable characterisationResultsByFilepath = characterisationResultGateway.getCharacterisationResultsByFilepath(filePath);
+ public Iterable getObject(String filePath, String datasetName) {
+ Iterable characterisationResultsByFilepath = characterisationResultGateway.getCharacterisationResultsByFilepath(filePath, datasetName);
return characterisationResultsByFilepath;
}
@Override
- public List getConflictsFromObject(String filePath) {
- List characterisationResultsByFilepath = characterisationResultGateway.getConflictsByFilepath(filePath);
+ public List getConflictsFromObject(String filePath, String datasetName) {
+ List characterisationResultsByFilepath = characterisationResultGateway.getConflictsByFilepath(filePath, datasetName);
return characterisationResultsByFilepath;
}
diff --git a/core/src/main/java/rocks/artur/api_impl/GetPropertiesImpl.java b/core/src/main/java/rocks/artur/api_impl/GetPropertiesImpl.java
index f1839d9..4c6d0a3 100644
--- a/core/src/main/java/rocks/artur/api_impl/GetPropertiesImpl.java
+++ b/core/src/main/java/rocks/artur/api_impl/GetPropertiesImpl.java
@@ -15,14 +15,14 @@ public GetPropertiesImpl(CharacterisationResultGateway characterisationResultGat
}
@Override
- public List getProperties() {
- List propertyDistribution = characterisationResultGateway.getPropertyDistribution(null);
+ public List getProperties(String datasetName) {
+ List propertyDistribution = characterisationResultGateway.getPropertyDistribution(null, datasetName);
return propertyDistribution;
}
@Override
- public List getProperties(FilterCriteria filter) {
- List propertyDistribution = characterisationResultGateway.getPropertyDistribution(filter);
+ public List getProperties(FilterCriteria filter, String datasetName) {
+ List propertyDistribution = characterisationResultGateway.getPropertyDistribution(filter, datasetName);
return propertyDistribution;
}
}
diff --git a/core/src/main/java/rocks/artur/api_impl/GetPropertyValueDistributionImpl.java b/core/src/main/java/rocks/artur/api_impl/GetPropertyValueDistributionImpl.java
index 792c79e..86f7ce9 100644
--- a/core/src/main/java/rocks/artur/api_impl/GetPropertyValueDistributionImpl.java
+++ b/core/src/main/java/rocks/artur/api_impl/GetPropertyValueDistributionImpl.java
@@ -18,8 +18,8 @@ public GetPropertyValueDistributionImpl(CharacterisationResultGateway characteri
@Override
- public List getPropertyValueDistribution(Property property, FilterCriteria filterCriteria) {
- List valueDistributionByProperty = characterisationResultGateway.getPropertyValueDistribution(property, filterCriteria);
+ public List getPropertyValueDistribution(Property property, FilterCriteria filterCriteria, String datasetName) {
+ List valueDistributionByProperty = characterisationResultGateway.getPropertyValueDistribution(property, filterCriteria, datasetName);
return valueDistributionByProperty;
}
}
diff --git a/core/src/main/java/rocks/artur/api_impl/GetSamplesImpl.java b/core/src/main/java/rocks/artur/api_impl/GetSamplesImpl.java
index 9df1966..0e3b98e 100644
--- a/core/src/main/java/rocks/artur/api_impl/GetSamplesImpl.java
+++ b/core/src/main/java/rocks/artur/api_impl/GetSamplesImpl.java
@@ -34,8 +34,8 @@ public void setProperties(List properties) {
}
@Override
- public List getObjects(FilterCriteria filterCriteria) {
- List samplingResults = characterisationResultGateway.getSamples(filterCriteria, algorithm, properties);
+ public List getObjects(FilterCriteria filterCriteria, String datasetName) {
+ List samplingResults = characterisationResultGateway.getSamples(filterCriteria, algorithm, properties, datasetName);
List results = new ArrayList<>();
switch (algorithm) {
@@ -50,8 +50,8 @@ public List getObjects(FilterCriteria filterCriteria) {
}
@Override
- public List getSamplingInfo(FilterCriteria filterCriteria) {
- List samplingResults = characterisationResultGateway.getSamples(filterCriteria, algorithm, properties);
+ public List getSamplingInfo(FilterCriteria filterCriteria, String datasetName) {
+ List samplingResults = characterisationResultGateway.getSamples(filterCriteria, algorithm, properties, datasetName);
return samplingResults;
}
diff --git a/core/src/main/java/rocks/artur/api_impl/GetSourcesImpl.java b/core/src/main/java/rocks/artur/api_impl/GetSourcesImpl.java
index 977ccee..c1631b6 100644
--- a/core/src/main/java/rocks/artur/api_impl/GetSourcesImpl.java
+++ b/core/src/main/java/rocks/artur/api_impl/GetSourcesImpl.java
@@ -13,8 +13,8 @@ public GetSourcesImpl(CharacterisationResultGateway characterisationResultGatewa
}
@Override
- public List getSources() {
- List sources = characterisationResultGateway.getSources();
+ public List getSources(String datasetName) {
+ List sources = characterisationResultGateway.getSources(datasetName);
return sources;
}
}
diff --git a/core/src/main/java/rocks/artur/api_impl/Native_ResolveConflictsImpl.java b/core/src/main/java/rocks/artur/api_impl/Native_ResolveConflictsImpl.java
new file mode 100644
index 0000000..2a9581c
--- /dev/null
+++ b/core/src/main/java/rocks/artur/api_impl/Native_ResolveConflictsImpl.java
@@ -0,0 +1,16 @@
+package rocks.artur.api_impl;
+
+import rocks.artur.api.ResolveConflicts;
+import rocks.artur.domain.CharacterisationResultGateway;
+
+public class Native_ResolveConflictsImpl implements ResolveConflicts {
+ private CharacterisationResultGateway characterisationResultGateway;
+
+ public Native_ResolveConflictsImpl(CharacterisationResultGateway characterisationResultGateway) {
+ this.characterisationResultGateway = characterisationResultGateway;
+ }
+ @Override
+ public void run(String datasetName) {
+ characterisationResultGateway.resolveConflictsNative(datasetName);
+ }
+}
diff --git a/core/src/main/java/rocks/artur/api_impl/filter/FilterOperation.java b/core/src/main/java/rocks/artur/api_impl/filter/FilterOperation.java
index d510842..058bd2d 100644
--- a/core/src/main/java/rocks/artur/api_impl/filter/FilterOperation.java
+++ b/core/src/main/java/rocks/artur/api_impl/filter/FilterOperation.java
@@ -1,6 +1,5 @@
package rocks.artur.api_impl.filter;
-import rocks.artur.domain.ValueType;
public enum FilterOperation {
LESS("<"), LESS_OR_EQUAL ("<="),
diff --git a/core/src/main/java/rocks/artur/api_impl/utils/ByteFile.java b/core/src/main/java/rocks/artur/api_impl/utils/ByteFile.java
new file mode 100644
index 0000000..748a148
--- /dev/null
+++ b/core/src/main/java/rocks/artur/api_impl/utils/ByteFile.java
@@ -0,0 +1,27 @@
+package rocks.artur.api_impl.utils;
+
+public class ByteFile {
+ byte[] file;
+ String filename;
+
+ public ByteFile(byte[] file, String filename) {
+ this.file = file;
+ this.filename = filename;
+ }
+
+ public byte[] getFile() {
+ return file;
+ }
+
+ public String getFilename() {
+ return filename;
+ }
+
+ public void setFile(byte[] file) {
+ this.file = file;
+ }
+
+ public void setFilename(String filename) {
+ this.filename = filename;
+ }
+}
diff --git a/core/src/main/java/rocks/artur/domain/CharacterisationResultGateway.java b/core/src/main/java/rocks/artur/domain/CharacterisationResultGateway.java
index f9a86bc..ba2c7fc 100644
--- a/core/src/main/java/rocks/artur/domain/CharacterisationResultGateway.java
+++ b/core/src/main/java/rocks/artur/domain/CharacterisationResultGateway.java
@@ -18,14 +18,14 @@ public interface CharacterisationResultGateway {
*
* @param characterisationResult
*/
- void addCharacterisationResult(CharacterisationResult characterisationResult);
+ void addCharacterisationResult(CharacterisationResult characterisationResult, String datasetName);
/**
* gets all characterisation results
*
* @return an iterable of all results stored in the DB.
*/
- List getCharacterisationResults(FilterCriteria filter);
+ List getCharacterisationResults(FilterCriteria filter, String datasetName);
/**
* gets a distribution of all properties that match the given filter criteria.
@@ -33,56 +33,60 @@ public interface CharacterisationResultGateway {
* @param filter a filter criteria
* @return a list of property statistics
*/
- List getPropertyDistribution(FilterCriteria filter);
+ List getPropertyDistribution(FilterCriteria filter, String datasetName);
/**
* gets characterisation results describing a digital object identified by the given file path.
*
* @return an iterable of characterisation results.
*/
- List getCharacterisationResultsByFilepath(String filePath);
+ List getCharacterisationResultsByFilepath(String filePath, String datasetName);
- List getCharacterisationResultsByEntry(Entry entry);
+ List getCharacterisationResultsByEntry(Entry entry, String datasetName);
- List getConflictEntries();
+ List getConflictEntries(String datasetName);
- List getEntries();
+ List getEntries(String datasetName);
/**
* gets a list of characterisation results with conflicts for a given digital object.
*
* @return an iterable of characterisation results.
*/
- List getConflictsByFilepath(String filepath);
+ List getConflictsByFilepath(String filepath, String datasetName);
- Map getCollectionStatistics(FilterCriteria filterCriteria);
+ Map getCollectionStatistics(FilterCriteria filterCriteria, String datasetName);
- List getPropertyValueDistribution(Property property, FilterCriteria filter);
+ List getPropertyValueDistribution(Property property, FilterCriteria filter, String datasetName);
/**
* gets a list of sources of characterisation results.
*
* @return an iterable of characterisation result sources.
*/
- List getSources();
+ List getSources(String datasetName);
/**
* gets a list of objects.
*
* @return an iterable of PropertiesPerObjectStatistic.
*/
- List getObjects(FilterCriteria filterCriteria);
+ List getObjects(FilterCriteria filterCriteria, String datasetName);
/**
* gets a list of samples.
*
* @return an iterable of PropertiesPerObjectStatistic.
*/
- List getSamples(FilterCriteria filterCriteria, SamplingAlgorithms algorithm, List properties);
+ List getSamples(FilterCriteria filterCriteria, SamplingAlgorithms algorithm, List properties, String datasetName);
- void addCharacterisationResults(List characterisationResults);
+ void addCharacterisationResults(List characterisationResults, String datasetName);
- double getConflictRate();
+ double getConflictRate(String datasetName);
- void delete(CharacterisationResult characterisationResult);
+ void delete(CharacterisationResult characterisationResult, String datasetName);
+
+ void resolveConflictsNative(String datasetName);
+
+ List listDatasets();
}
diff --git a/core/src/main/java/rocks/artur/domain/Property.java b/core/src/main/java/rocks/artur/domain/Property.java
index e607ce1..bb3b730 100644
--- a/core/src/main/java/rocks/artur/domain/Property.java
+++ b/core/src/main/java/rocks/artur/domain/Property.java
@@ -7,13 +7,9 @@ public enum Property {
FORMAT(ValueType.STRING),
FORMAT_VERSION(ValueType.STRING),
MIMETYPE(ValueType.STRING),
- FILENAME(ValueType.STRING),
- AUTHOR(ValueType.STRING),
EXTERNALIDENTIFIER(ValueType.STRING),
SIZE(ValueType.INTEGER),
- MD5CHECKSUM(ValueType.STRING),
FSLASTMODIFIED(ValueType.TIMESTAMP),
- FILEPATH(ValueType.STRING),
CREATED(ValueType.TIMESTAMP),
LASTMODIFIED(ValueType.TIMESTAMP),
CREATINGAPPLICATIONVERSION(ValueType.STRING),
@@ -26,15 +22,12 @@ public enum Property {
WELLFORMED(ValueType.STRING),
- MESSAGE(ValueType.STRING),
-
LINEBREAK(ValueType.STRING),
CHARSET(ValueType.STRING),
PAGECOUNT(ValueType.INTEGER),
WORDCOUNT(ValueType.INTEGER),
CHARACTERCOUNT(ValueType.INTEGER),
HASANNOTATIONS(ValueType.STRING),
- TITLE(ValueType.STRING),
ISTAGGED(ValueType.STRING),
HASFORMS(ValueType.STRING),
HASOUTLINE(ValueType.STRING),
diff --git a/docker-compose.clickhouse.dev.yaml b/docker-compose.clickhouse.dev.yaml
new file mode 100644
index 0000000..835e06e
--- /dev/null
+++ b/docker-compose.clickhouse.dev.yaml
@@ -0,0 +1,105 @@
+version: '3'
+
+services:
+
+ fits:
+ build:
+ context: ./fits
+ dockerfile: ./Dockerfile
+ container_name: fits
+ env_file: .env
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 8081:8080
+
+ rest:
+ build:
+ context: .
+ dockerfile: ./Dockerfile
+ env_file: .env
+ networks:
+ - web
+ restart: unless-stopped
+ environment:
+ - DB_SELECTOR=clickhouse
+ deploy:
+ replicas: 1
+ ports:
+ - 8092:8080
+ depends_on:
+ - fits
+ - db-docker
+
+ web:
+ build:
+ context: ./web
+ dockerfile: ./Dockerfile
+ container_name: web
+ env_file: .env
+ networks:
+ - web
+ restart: unless-stopped
+ ports:
+ - 8080:3000
+
+ db-docker:
+ image: yandex/clickhouse-server
+ container_name: db-docker
+ networks:
+ - web
+ ports:
+ - 8123:8123
+ - 9000:9000
+ - 9004:9004
+
+
+ db-docker-init:
+ image: yandex/clickhouse-server
+ container_name: db-docker-init
+ volumes:
+ - ./config/clickhouse:/var/clickhouse
+ depends_on:
+ - db-docker
+ networks:
+ - web
+ entrypoint: [ '/bin/sh', '-c' ]
+ command: |
+ "
+ while ! clickhouse-client --host db-docker -q \"SHOW databases;\"; do
+ echo waiting for clickhouse up
+ sleep 1
+ done
+
+ clickhouse-client --host db-docker --queries-file /var/clickhouse/initdb.sql
+
+ tail -f /dev/null
+ "
+
+
+ adminer:
+ image: adminer
+ container_name: adminer
+ env_file: .env
+ restart: unless-stopped
+ networks:
+ - web
+ ports:
+ - 8090:8080
+
+ nginx:
+ image: nginx
+ container_name: nginx
+ env_file: .env
+ volumes:
+ - ./config/nginx/nginx.conf:/etc/nginx/conf.d/default.conf
+ ports:
+ - 8082:80
+ networks:
+ - web
+ depends_on:
+ - rest
+
+networks:
+ web:
\ No newline at end of file
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 09c02ce..c464759 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -3,7 +3,7 @@ version: '3'
services:
fits:
- image: artourkin/fits-web:release-0.8
+ image: artourkin/fits-web:main
container_name: fits
env_file: .env
networks:
@@ -13,7 +13,7 @@ services:
- 8081:8080
rest:
- image: artourkin/fitsinn-rest:release-0.8
+ image: artourkin/fitsinn-rest:main
container_name: rest
env_file: .env
networks:
@@ -26,7 +26,7 @@ services:
- db-docker
web:
- image: artourkin/fitsinn-web:release-0.8
+ image: artourkin/fitsinn-web:main
container_name: web
env_file: .env
networks:
diff --git a/fits-client/src/main/java/rocks/artur/FITSClient/FITSClient.java b/fits-client/src/main/java/rocks/artur/FITSClient/FITSClient.java
index e0be439..fa0a57f 100644
--- a/fits-client/src/main/java/rocks/artur/FITSClient/FITSClient.java
+++ b/fits-client/src/main/java/rocks/artur/FITSClient/FITSClient.java
@@ -16,11 +16,15 @@
import org.xml.sax.SAXException;
import rocks.artur.FITSObjects.FITSPropertyJsonPath;
import rocks.artur.api.CharacterisationResultProducer;
+import rocks.artur.api_impl.utils.ByteFile;
import rocks.artur.domain.CharacterisationResult;
import rocks.artur.domain.Property;
+import rocks.artur.domain.ValueType;
import rocks.artur.utils.JSONToolkit;
+import rocks.artur.utils.STAXToolkit;
import javax.xml.XMLConstants;
+import javax.xml.stream.XMLStreamException;
import javax.xml.transform.Source;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
@@ -29,12 +33,13 @@
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Set;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.format.DateTimeParseException;
+import java.util.*;
import java.util.stream.Collectors;
//@ApplicationScoped
@@ -43,12 +48,25 @@ public class FITSClient implements CharacterisationResultProducer {
List knownProperties = Arrays.stream(FITSPropertyJsonPath.values()).map(Enum::name).collect(Collectors.toList());
private String FITS_URL = "http://localhost:8888";
+
+ static DateTimeFormatter outputFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
+
+ static DateTimeFormatter inputFormatter = new DateTimeFormatterBuilder()
+ .appendPattern("[yyyy:MM:dd HH:mm:ssXXX][yyyy:MM:dd HH:mm:ss][yyyy:MM:dd HH:mmXXX][yyyy-MM-dd HH:mm:ss][yyyy/MM/dd HH:mm:ss]")
+ .toFormatter();
+
@Override
- public String getVersion() throws IOException {
+ public String getVersion(){
CloseableHttpClient httpclient = HttpClients.createDefault();
HttpGet httpGet = new HttpGet(getFITS_URL() + "/version");
- return getString(httpclient.execute(httpGet));
+ try {
+ return getString(httpclient.execute(httpGet));
+ } catch (IOException e) {
+ LOG.error("Exception occurred when querying the FITS version");
+ e.printStackTrace();
+ }
+ return "";
}
@@ -74,46 +92,88 @@ public boolean isValid(byte[] file) {
}
}
- public List processFile(byte[] file, String filename) throws IOException {
-
- if (isValid(file)) {
- try {
- String fitsSTRING = new String(file, StandardCharsets.UTF_8);
- return extractCharacterisationResults(fitsSTRING);
- } catch (JSONException e) {
- throw new RuntimeException(e);
- }
- } else {
+ public boolean isValid(String content) {
+ return content.contains("xmlns=\"http://hul.harvard.edu/ois/xml/ns/fits/fits_output\"");
+ }
- CloseableHttpClient httpclient = HttpClients.createDefault();
- HttpPost httppost = new HttpPost(getFITS_URL() + "/fits/examine");
+ @Override
+ public List processFile(File file) {
+ String fileName = file.getName();
+ byte[] fileContent = new byte[0];
+ try {
+ fileContent = Files.readAllBytes(file.toPath());
+ } catch (IOException e) {
+ LOG.error("Exception occurred during file processing");
+ e.printStackTrace();
+ }
+ ByteFile byteFile = new ByteFile(fileContent, fileName);
+ return processFile(byteFile);
+ }
- ByteArrayBody body = new ByteArrayBody(file, filename);
+ @Override
+ public List processFile(ByteFile file) {
+ ArrayList result = new ArrayList<>();
+ if (file.getFile().length == 0) {
+ return result;
+ }
+ try {
+ String content = new String(file.getFile());
+ if (!isValid(content)) {
+ CloseableHttpClient httpclient = HttpClients.createDefault();
+ HttpPost httppost = new HttpPost(getFITS_URL() + "/fits/examine");
+ ByteArrayBody body = new ByteArrayBody(file.getFile(), file.getFilename());
+ MultipartEntityBuilder builder = MultipartEntityBuilder.create();
+ builder.addPart("datafile", body);
+ HttpEntity reqEntity = builder.build();
+ httppost.setEntity(reqEntity);
+ CloseableHttpResponse response = httpclient.execute(httppost);
+
+ content = getString(response);
+ LOG.debug(content);
+ }
+ result.addAll(extractCharacterisationResultsStax(content));
+ } catch (Exception e) {
+ LOG.error("Exception occurred during FITS file parsing");
+ e.printStackTrace();
+ }
- MultipartEntityBuilder builder = MultipartEntityBuilder.create();
- builder.addPart("datafile", body);
- HttpEntity reqEntity = builder.build();
- httppost.setEntity(reqEntity);
+ result=this.fixDateTypes(result);
+ return result;
+ }
- CloseableHttpResponse response = httpclient.execute(httppost);
- String fitsResultXML = getString(response);
- LOG.debug(fitsResultXML);
- try {
- return extractCharacterisationResults(fitsResultXML);
- } catch (JSONException e) {
- throw new RuntimeException(e);
+ private ArrayList fixDateTypes(ArrayList result) {
+ result.stream().forEach(item -> {
+ if (item.getValueType().equals(ValueType.TIMESTAMP)){
+ String value = item.getValue();
+ LOG.debug(String.format("Parsing Object: %s", item));
+ if (item.getSource().startsWith("OIS File Information")) {
+ LocalDateTime parsed =
+ LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(value)),
+ TimeZone.getDefault().toZoneId());
+ item.setValue(parsed.format(outputFormatter));
+ } else {
+ LocalDateTime parsed = tryParseLocalDateTime(value, inputFormatter);
+ if (parsed != null) {
+ item.setValue(parsed.format(outputFormatter));
+ } else {
+ item.setValue(null);
+ }
+ }
+ LOG.debug(String.format("Parsed Result: %s", item));
}
- }
+ });
+ return result;
}
- @Override
- public List processFile(File file) throws IOException {
- String fileName = file.getName();
- byte[] fileContent = Files.readAllBytes(file.toPath());
- return processFile(fileContent, fileName);
+ LocalDateTime tryParseLocalDateTime(String datetimeString, DateTimeFormatter formatter) {
+ try {
+ return LocalDateTime.parse(datetimeString, formatter);
+ } catch (DateTimeParseException e) {
+ return null;
+ }
}
- private List extractCharacterisationResults(String fitsResultXML) throws JSONException {
+ List extractCharacterisationResults(String fitsResultXML) throws JSONException {
List results = new ArrayList<>();
String fitsResultJSON = JSONToolkit.translateXML(fitsResultXML);
Set availableFitsProperties = JSONToolkit.getAvailableFitsProperties(fitsResultJSON);
@@ -133,14 +193,20 @@ private List extractCharacterisationResults(String fitsR
JSONToolkit.getCharacterisationResults(FITSPropertyJsonPath.IDENTIFICATION, fitsResultJSON);
results.addAll(characterisationResults);
- String filepath = results.stream().filter(result ->
- result.getProperty().equals(Property.FILEPATH)).findFirst().get().getValue().toString();
- addFilepathLabel(results, filepath);
+ //String filepath = results.stream().filter(result ->
+ // result.getProperty().equals(Property.FILEPATH)).findFirst().get().getValue().toString();
+ //addFilepathLabel(results, filepath);
return results;
}
+ List extractCharacterisationResultsStax(String fitsResultXML) throws XMLStreamException {
+ STAXToolkit staxToolkit = new STAXToolkit();
+ return staxToolkit.getCharacterisationResults(fitsResultXML);
+
+ }
+
private void addFilepathLabel(List characterisationResults, String filepath) {
characterisationResults.stream().forEach(result -> result.setFilePath(filepath));
diff --git a/fits-client/src/main/java/rocks/artur/utils/JSONToolkit.java b/fits-client/src/main/java/rocks/artur/utils/JSONToolkit.java
index f4de29d..7f84d32 100644
--- a/fits-client/src/main/java/rocks/artur/utils/JSONToolkit.java
+++ b/fits-client/src/main/java/rocks/artur/utils/JSONToolkit.java
@@ -15,12 +15,11 @@
import rocks.artur.domain.CharacterisationResult;
import rocks.artur.domain.Property;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.LocalDateTime;
-import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.format.DateTimeParseException;
import java.util.*;
import java.util.stream.Collectors;
@@ -28,7 +27,12 @@ public class JSONToolkit {
private static final Logger LOG = LoggerFactory.getLogger(JSONToolkit.class);
public static int PRETTY_PRINT_INDENT_FACTOR = 4;
- static DateTimeFormatter outputFormat = DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss");
+ static DateTimeFormatter outputFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
+
+ static DateTimeFormatter inputFormatter = new DateTimeFormatterBuilder()
+ .appendPattern("[yyyy:MM:dd HH:mm:ssXXX][yyyy:MM:dd HH:mm:ss][yyyy:MM:dd HH:mmXXX][yyyy-MM-dd HH:mm:ss][yyyy/MM/dd HH:mm:ss]")
+ .toFormatter();
+
public static String translateXML(String xmlString) throws JSONException {
JSONObject xmlJSONObj = XML.toJSONObject(xmlString);
@@ -120,19 +124,25 @@ private static Collection extends CharacterisationResult> parseGenericProperty
}
CharacterisationResult tmpResult = new CharacterisationResult();
- setValues(tmpResult, Property.valueOf(jsonPath.name().toUpperCase()), gp.getContent());
- tmpResult.setSource(gp.getToolname() + ":" + gp.getToolversion());
- tmpResult = convertDataTypes(tmpResult);
- result.add(tmpResult);
+ boolean isPresent = Arrays.stream(Property.values()).anyMatch(item -> jsonPath.name().equalsIgnoreCase(item.name()));
+ if (isPresent) {
+ setValues(tmpResult, Property.valueOf(jsonPath.name().toUpperCase()), gp.getContent());
+ tmpResult.setSource(gp.getToolname() + ":" + gp.getToolversion());
+ tmpResult = convertDataTypes(tmpResult);
+ result.add(tmpResult);
+ }
}
} else {
GenericProperty read = document.read(jsonPath.getFitsProperty(), GenericProperty.class);
CharacterisationResult tmpResult = new CharacterisationResult();
- setValues(tmpResult, Property.valueOf(jsonPath.name().toUpperCase()), read.getContent());
- tmpResult.setSource(read.getToolname() + ":" + read.getToolversion());
- tmpResult = convertDataTypes(tmpResult);
- result.add(tmpResult);
+ boolean isPresent = Arrays.stream(Property.values()).anyMatch(item -> jsonPath.name().equalsIgnoreCase(item.name()));
+ if (isPresent) {
+ setValues(tmpResult, Property.valueOf(jsonPath.name().toUpperCase()), read.getContent());
+ tmpResult.setSource(read.getToolname() + ":" + read.getToolversion());
+ tmpResult = convertDataTypes(tmpResult);
+ result.add(tmpResult);
+ }
}
} catch (Exception e) {
e.printStackTrace();
@@ -140,57 +150,27 @@ private static Collection extends CharacterisationResult> parseGenericProperty
return result;
}
- private static CharacterisationResult convertDataTypes(CharacterisationResult tmpResult) {
-
+ private static CharacterisationResult convertDataTypes(CharacterisationResult tmpResult) throws DateTimeParseException {
switch (tmpResult.getProperty()) {
case CREATED:
case FSLASTMODIFIED:
case LASTMODIFIED:
LOG.debug(String.format("Parsing Object: %s", tmpResult));
- if (tmpResult.getSource().startsWith("Exiftool")) {
- try {
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy:MM:dd HH:mm:ssXXX");
- LocalDateTime parse = sdf.parse(tmpResult.getValue()).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
- tmpResult.setValue(parse.format(outputFormat));
- } catch (ParseException e) {
- try {
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss");
- LocalDateTime parse = sdf.parse(tmpResult.getValue()).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
- tmpResult.setValue(parse.format(outputFormat));
- } catch (ParseException ex) {
- try {
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy:MM:dd HH:mmXXX");
- LocalDateTime parse = sdf.parse(tmpResult.getValue()).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
- tmpResult.setValue(parse.format(outputFormat));
- } catch (ParseException ex2) {
- throw new RuntimeException(ex2);
- }
- }
- }
- } else if (tmpResult.getSource().startsWith("NLNZ Metadata Extractor")) {
- try {
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- LocalDateTime parse = sdf.parse(tmpResult.getValue()).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
- tmpResult.setValue(parse.format(outputFormat));
- } catch (ParseException ex) {
- throw new RuntimeException(ex);
- }
- } else if (tmpResult.getSource().startsWith("OIS File Information")) {
- LocalDateTime triggerTime =
+
+ if (tmpResult.getSource().startsWith("OIS File Information")) {
+ LocalDateTime parsed =
LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(tmpResult.getValue())),
TimeZone.getDefault().toZoneId());
- tmpResult.setValue(triggerTime.format(outputFormat));
- } else if (tmpResult.getSource().startsWith("Tika")) {
- DateTimeFormatter tikaFormatter = DateTimeFormatter.ISO_INSTANT;
- Instant dateInstant = Instant.from(tikaFormatter.parse(tmpResult.getValue()));
- LocalDateTime date = LocalDateTime.ofInstant(dateInstant, ZoneId.systemDefault());
- tmpResult.setValue(date.format(outputFormat));
+ tmpResult.setValue(parsed.format(outputFormatter));
+ } else {
+ LocalDateTime parsed = LocalDateTime.parse(tmpResult.getValue(), inputFormatter);
+ tmpResult.setValue(parsed.format(outputFormatter));
}
+
LOG.debug(String.format("Parsed Result: %s", tmpResult));
break;
}
-
return tmpResult;
}
diff --git a/fits-client/src/main/java/rocks/artur/utils/STAXToolkit.java b/fits-client/src/main/java/rocks/artur/utils/STAXToolkit.java
new file mode 100644
index 0000000..a064f39
--- /dev/null
+++ b/fits-client/src/main/java/rocks/artur/utils/STAXToolkit.java
@@ -0,0 +1,209 @@
+package rocks.artur.utils;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import rocks.artur.domain.CharacterisationResult;
+import rocks.artur.domain.Property;
+
+import javax.xml.namespace.QName;
+import javax.xml.stream.XMLInputFactory;
+import javax.xml.stream.XMLStreamConstants;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.XMLStreamReader;
+import java.io.StringReader;
+import java.security.KeyPairGenerator;
+import java.util.*;
+
+public class STAXToolkit {
+
+ private final Logger LOG = LoggerFactory.getLogger(STAXToolkit.class);
+ Map identities = new HashMap<>(); // [, ]
+ List sources = new ArrayList<>(); // [, ]
+ String filepath;
+ String propertyValue;
+ List results = new ArrayList<>();
+
+ public List getCharacterisationResults(String fitsResultXML) throws XMLStreamException {
+
+ XMLInputFactory factory = XMLInputFactory.newInstance();
+ XMLStreamReader reader = factory.createXMLStreamReader(new StringReader(fitsResultXML));
+
+ while (reader.hasNext()) {
+ int event = reader.next();
+ switch (event) {
+ case XMLStreamConstants.START_ELEMENT:
+ QName elementName = reader.getName();
+ handleStartElement(elementName, reader);
+ break;
+ case XMLStreamConstants.END_ELEMENT:
+ QName endElementName = reader.getName();
+ handleEndElement(endElementName, reader);
+ break;
+ case XMLStreamConstants.CHARACTERS:
+ String text = reader.getText().trim();
+ handleText(text);
+ break;
+ }
+ }
+ results.forEach(item -> item.setFilePath(filepath));
+
+ return results;
+ }
+
+ private void handleStartElement(QName elementName, XMLStreamReader reader) throws XMLStreamException {
+ // Add your logic to handle specific elements
+ String elementNameLocalPart = elementName.getLocalPart();
+ LOG.debug("Start Element: " + elementNameLocalPart);
+
+ switch (elementNameLocalPart) {
+ case "identity":
+ for (int i = 0; i < reader.getAttributeCount(); i++) {
+ String attributeName = reader.getAttributeName(i).getLocalPart();
+ String attributeValue = reader.getAttributeValue(i);
+ LOG.debug(" - Attribute: " + attributeName + "=" + attributeValue);
+ if (attributeName.equals("format") || attributeName.equals("mimetype")) {
+ identities.put(attributeName, attributeValue);
+ }
+ }
+ break;
+ case "tool":
+ for (int i = 0; i < reader.getAttributeCount(); i++) {
+ String attributeName = reader.getAttributeName(i).getLocalPart();
+ String attributeValue = reader.getAttributeValue(i);
+ LOG.debug(" - Attribute: " + attributeName + "=" + attributeValue);
+
+ if ("toolname".equals(attributeName)) {
+ sources.add(attributeValue);
+ }
+ if ("toolversion".equals(attributeName)) {
+ String toolname = sources.get(sources.size() - 1);
+ toolname += ":" + attributeValue;
+ sources.set(sources.size() - 1, toolname);
+ }
+ }
+ break;
+ case "version":
+ String version_source = "";
+ for (int i = 0; i < reader.getAttributeCount(); i++) {
+ String attributeName = reader.getAttributeName(i).getLocalPart();
+ String attributeValue = reader.getAttributeValue(i);
+ LOG.debug(" - Attribute: " + attributeName + "=" + attributeValue);
+ if ("toolname".equals(attributeName)) {
+ version_source = attributeValue;
+ }
+ if ("toolversion".equals(attributeName)) {
+ version_source += ":" + attributeValue;
+ }
+ }
+ CharacterisationResult resVersion = new CharacterisationResult();
+ resVersion.setProperty(Property.FORMAT_VERSION);
+ resVersion.setSource(version_source);
+ resVersion.setValueType(Property.FORMAT_VERSION.getValueType());
+ results.add(resVersion);
+ break;
+
+ case "well-formed":
+ String sourceWellformed = "";
+ for (int i = 0; i < reader.getAttributeCount(); i++) {
+ String attributeName = reader.getAttributeName(i).getLocalPart();
+ String attributeValue = reader.getAttributeValue(i);
+ LOG.debug(" - Attribute: " + attributeName + "=" + attributeValue);
+ if ("toolname".equals(attributeName)) {
+ sourceWellformed = attributeValue;
+ }
+ if ("toolversion".equals(attributeName)) {
+ sourceWellformed += ":" + attributeValue;
+ }
+ }
+ CharacterisationResult resWellformed = new CharacterisationResult();
+ resWellformed.setProperty(Property.WELLFORMED);
+ resWellformed.setSource(sourceWellformed);
+ resWellformed.setValueType(Property.WELLFORMED.getValueType());
+ results.add(resWellformed);
+ break;
+ default:
+ String property = elementNameLocalPart;
+ boolean isPresent = Arrays.stream(Property.values()).anyMatch(item -> property.equalsIgnoreCase(item.name()));
+ if (isPresent) {
+ String source = "";
+ for (int i = 0; i < reader.getAttributeCount(); i++) {
+ String attributeName = reader.getAttributeName(i).getLocalPart();
+ String attributeValue = reader.getAttributeValue(i);
+ LOG.debug(" - Attribute: " + attributeName + "=" + attributeValue);
+ if ("toolname".equals(attributeName)) {
+ source = attributeValue;
+ }
+ if ("toolversion".equals(attributeName)) {
+ source += ":" + attributeValue;
+ }
+ }
+ CharacterisationResult new_res = new CharacterisationResult();
+ Property propertyEnum = Property.valueOf(property.toUpperCase());
+ new_res.setProperty(propertyEnum);
+ new_res.setSource(source);
+ new_res.setValueType(propertyEnum.getValueType());
+ results.add(new_res);
+ }
+
+
+ }
+
+ }
+
+ private void handleEndElement(QName endElementName, XMLStreamReader reader) {
+ // Add your logic to handle specific end elements
+ String elementNameLocalPart = endElementName.getLocalPart();
+ LOG.debug("End Element: " + elementNameLocalPart);
+
+
+ switch (elementNameLocalPart) {
+
+ case "identity":
+ for (Map.Entry identity : identities.entrySet()) {
+ String property = identity.getKey();
+ String value = identity.getValue();
+
+ for (String source : sources) {
+ CharacterisationResult new_res = new CharacterisationResult();
+ Property propertyEnum = Property.valueOf(property.toUpperCase());
+ new_res.setProperty(propertyEnum);
+ new_res.setValue(value);
+ new_res.setSource(source);
+ new_res.setValueType(propertyEnum.getValueType());
+ results.add(new_res);
+ }
+ }
+ identities.clear();
+ sources.clear();
+ break;
+ case "version":
+ CharacterisationResult characterisationResultVersion = results.get(results.size() - 1);
+ String format = identities.get("format");
+ characterisationResultVersion.setValue(String.format("%s %s",format,propertyValue));
+ break;
+ case "fits":
+ results.forEach(res -> res.setFilePath(filepath));
+ break;
+ case "well-formed":
+ CharacterisationResult characterisationResultWellformed = results.get(results.size() - 1);
+ characterisationResultWellformed.setValue(propertyValue);
+ break;
+ case "filepath":
+ filepath = propertyValue;
+ default:
+ String property = elementNameLocalPart;
+ boolean isPresent = Arrays.stream(Property.values()).anyMatch(item -> property.equalsIgnoreCase(item.name()));
+ if (isPresent) {
+ CharacterisationResult characterisationResult = results.get(results.size() - 1);
+ characterisationResult.setValue(propertyValue);
+ }
+
+ }
+
+ }
+
+ private void handleText(String text) {
+ LOG.debug("Text: " + text);
+ propertyValue = text;
+ }
+}
diff --git a/fits-client/src/test/java/rocks/artur/FITSClient/FITSClientTest.java b/fits-client/src/test/java/rocks/artur/FITSClient/FITSClientTest.java
new file mode 100644
index 0000000..86b942e
--- /dev/null
+++ b/fits-client/src/test/java/rocks/artur/FITSClient/FITSClientTest.java
@@ -0,0 +1,425 @@
+package rocks.artur.FITSClient;
+
+
+import org.junit.Assert;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.mockserver.integration.ClientAndServer;
+import org.mockserver.model.HttpRequest;
+import org.mockserver.model.HttpResponse;
+import rocks.artur.api_impl.utils.ByteFile;
+import rocks.artur.domain.CharacterisationResult;
+
+import javax.xml.stream.XMLStreamException;
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.List;
+
+import static org.mockserver.model.HttpRequest.request;
+import static org.mockserver.model.HttpResponse.response;
+
+public class FITSClientTest {
+ private ClientAndServer mockServer;
+
+ private int MOCK_SERVER_PORT = 8888;
+
+ public static String VALID_FITS_RESULT = "\r\n" +
+ "\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " fmt/1149\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " 903\n" +
+ " /usr/local/tomcat/webapps/fits/upload/1582118786085/README.md\n" +
+ " README.md\n" +
+ " 133c6cf05a139fa2e472ce6fa11bb5d2\n" +
+ " 1582118786000\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ "\n" +
+ "\n" +
+ "\n";
+
+
+ public static String VALID_FITS_RESULT2 = "\r\n" +
+ "\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " 1.0\n" +
+ " fmt/11\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " /usr/local/tomcat/webapps/fits/upload/1596052237783/main\n" +
+ " main\n" +
+ " 1875256\n" +
+ " 926a7c8c079e4ccb837410746b2919e2\n" +
+ " 1596052237000\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " Deflate/Inflate\n" +
+ " Deflate\n" +
+ " 2400\n" +
+ " 1531\n" +
+ " normal*\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " Deflate/Inflate\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " 2400\n" +
+ " 1531\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " normal*\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ "\n";
+
+
+ public static String VALID_FITS_RESULT3 = "\r\n" +
+ "\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " 1.4\n" +
+ " fmt/18\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " 39586\n" +
+ " /XPP\n" +
+ " 2011:12:27 10:44:28+01:00\n" +
+ " 2002:04:25 13:02:24Z\n" +
+ " /home/petrov/taverna/tmp/000/000009.pdf\n" +
+ " /home/petrov/taverna/tmp/000/000009.pdf\n" +
+ " 92ddc75b3b59872e6656b54b8f236764\n" +
+ " 1324979068000\n" +
+ " \n" +
+ " \n" +
+ " true\n" +
+ " true\n" +
+ " \n" +
+ " \n" +
+ " \n" +
+ " Table DP-1. Profile of General Demographic Characteristics: 2000\n" +
+ " Census 2000 Profiles\n" +
+ " US Census Bureau\n" +
+ " 4\n" +
+ " no\n" +
+ " yes\n" +
+ " no\n" +
+ " no\n" +
+ " no\n" +
+ " no\n" +
+ " \n" +
+ " \n" +
+ "";
+
+ @BeforeEach
+ void setUp() {
+ mockServer = mockServer.startClientAndServer(MOCK_SERVER_PORT);
+ }
+
+ @AfterEach
+ public void stopServer() {
+ mockServer.stop();
+ }
+
+ @Test
+ void getVersionTest() throws IOException {
+ mockServer.when(
+ HttpRequest.request()
+ .withMethod("GET")
+ .withPath("/version")
+ .withHeader("\"Content-type\", \"application/json\""))
+ .respond(
+ HttpResponse.response()
+ .withStatusCode(200)
+ .withBody("1.5.0")
+ );
+
+
+ FITSClient fitsClient = new FITSClient();
+ fitsClient.setFITS_URL(String.format("http://localhost:%d/", MOCK_SERVER_PORT));
+
+ String s = fitsClient.getVersion();
+ Assert.assertEquals("1.5.0", s);
+ }
+
+ @Test
+ void processFileAsByteArrayTest() throws IOException {
+ mockServer.when(
+ HttpRequest.request()
+ .withMethod("POST")
+ .withPath("/fits/examine")
+ .withHeader("\"Content-type\", \"application/json\""))
+ .respond(
+ HttpResponse.response()
+ .withStatusCode(200)
+ .withBody(VALID_FITS_RESULT)
+ );
+
+
+ URL resource = getClass().getClassLoader().getResource("README.md");
+ byte[] array = Files.readAllBytes(Paths.get(resource.getPath()));
+
+ FITSClient fitsClient = new FITSClient();
+ fitsClient.setFITS_URL(String.format("http://localhost:%d", MOCK_SERVER_PORT));
+ ByteFile byteFile = new ByteFile(array, "testFileName" );
+ List output = fitsClient.processFile(byteFile);
+
+ Assert.assertEquals(9, output.size());
+ }
+
+
+ @Test
+ void processFileTest() throws IOException {
+
+ mockServer.when(
+ HttpRequest.request()
+ .withMethod("POST")
+ .withPath("/fits/examine")
+ .withHeader("\"Content-type\", \"application/json\""))
+ .respond(
+ HttpResponse.response()
+ .withStatusCode(200)
+ .withBody(VALID_FITS_RESULT)
+ );
+
+ FITSClient fitsClient = new FITSClient();
+ fitsClient.setFITS_URL(String.format("http://localhost:%d", MOCK_SERVER_PORT));
+
+ URL resource = getClass().getClassLoader().getResource("README.md");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(9, output.size());
+ }
+
+ @Test
+ void processFITSFileTest() throws IOException {
+
+ mockServer.when(
+ HttpRequest.request()
+ .withMethod("POST")
+ .withPath("/fits/examine")
+ .withHeader("\"Content-type\", \"application/json\""))
+ .respond(
+ HttpResponse.response()
+ .withStatusCode(200)
+ .withBody(VALID_FITS_RESULT)
+ );
+
+ FITSClient fitsClient = new FITSClient();
+ fitsClient.setFITS_URL(String.format("http://localhost:%d", MOCK_SERVER_PORT));
+
+ URL resource = getClass().getClassLoader().getResource("998003.csv.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(14, output.size());
+ }
+
+
+ //The test can be run against running FITS service, i.e. fits-docker
+ @Disabled
+ @Test
+ void processFileTestWithoutMock() throws IOException {
+ FITSClient fitsClient = new FITSClient();
+ fitsClient.setFITS_URL(String.format("http://localhost:%d", 8081));
+
+ URL resource = getClass().getClassLoader().getResource("README.md");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(12, output.size());
+ }
+
+
+ //The test can be run against running FITS service, i.e. fits-docker
+ @Disabled
+ @Test
+ void processByteArrayTestWithoutMock() throws IOException {
+ FITSClient fitsClient = new FITSClient();
+ fitsClient.setFITS_URL(String.format("http://localhost:%d", 8081));
+
+ URL resource = getClass().getClassLoader().getResource("README.md");
+ File file = new File(resource.getPath());
+ List output = fitsClient.processFile(file);
+
+ Assert.assertEquals(9, output.size());
+ }
+
+
+ @Test
+ void processFITSFileCSVTest() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("998003.csv.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(14, output.size());
+ }
+
+ @Test
+ void processFITSFileHTMLTest() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("002526.html.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(21, output.size());
+ }
+
+ @Test
+ void processFITSFilePDFTest() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("000009.pdf.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(28, output.size());
+ }
+
+ @Test
+ void processFITSFileDocTest() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("002392.doc.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(22, output.size());
+ }
+
+ @Test
+ void processFITSFileGZTest() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("002451.gz.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(10, output.size());
+ }
+
+
+ @Test
+ void processFITSFilPDF2Test() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("002838.pdf.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(30, output.size());
+ }
+
+ @Test
+ void processFITSFileTEXTest() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("002283.tex.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(8, output.size());
+ }
+
+
+ @Test
+ void processFITSFilPDF3Test() throws IOException {
+
+ FITSClient fitsClient = new FITSClient();
+
+ URL resource = getClass().getClassLoader().getResource("002729.pdf.fits.xml");
+ List output = fitsClient.processFile(new File(resource.getPath()));
+
+ Assert.assertEquals(11, output.size());
+ }
+
+ @Test
+ void extractCharResultsStax() throws XMLStreamException {
+ FITSClient fitsClient = new FITSClient();
+ List characterisationResults = fitsClient.extractCharacterisationResultsStax(VALID_FITS_RESULT2);
+ System.out.println(characterisationResults);
+ }
+
+ @Test
+ void compareExtractionStaxVsJson() throws XMLStreamException {
+ FITSClient fitsClient = new FITSClient();
+ List characterisationResultsStax = fitsClient.extractCharacterisationResultsStax(VALID_FITS_RESULT3);
+ List characterisationResultsJSON = fitsClient.extractCharacterisationResults(VALID_FITS_RESULT3);
+ Assert.assertEquals(characterisationResultsJSON.size(), characterisationResultsStax.size());
+ }
+
+}
\ No newline at end of file
diff --git a/fits-client/src/test/java/rocks/artur/FITSClient/JSONToolkitTest.java b/fits-client/src/test/java/rocks/artur/FITSClient/JSONToolkitTest.java
new file mode 100644
index 0000000..1e8bd6d
--- /dev/null
+++ b/fits-client/src/test/java/rocks/artur/FITSClient/JSONToolkitTest.java
@@ -0,0 +1,75 @@
+package rocks.artur.FITSClient;
+
+import org.json.JSONException;
+import org.junit.Assert;
+import org.junit.jupiter.api.Test;
+import org.springframework.test.context.ActiveProfiles;
+import rocks.artur.FITSObjects.FITSPropertyJsonPath;
+import rocks.artur.domain.CharacterisationResult;
+import rocks.artur.utils.JSONToolkit;
+
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+
+class JSONToolkitTest {
+
+ @Test
+ void translateXMLTest() throws JSONException {
+ String s = JSONToolkit.translateXML(FITSClientTest.VALID_FITS_RESULT);
+ System.out.println(s);
+ }
+
+ @Test
+ void getCharacterisationResult2Test() throws JSONException {
+ String jsonString = JSONToolkit.translateXML(FITSClientTest.VALID_FITS_RESULT2);
+ List results = JSONToolkit.getCharacterisationResults(FITSPropertyJsonPath.IDENTIFICATION,
+ jsonString);
+ System.out.println(results);
+ }
+
+ @Test
+ void getCharacterisationResultTest() throws JSONException {
+ String jsonString = JSONToolkit.translateXML(FITSClientTest.VALID_FITS_RESULT);
+ List results = JSONToolkit.getCharacterisationResults(FITSPropertyJsonPath.FILENAME,
+ jsonString);
+
+ }
+
+
+ @Test
+ void getCharacterisationResultIdentificationTest() throws JSONException {
+ String jsonString = JSONToolkit.translateXML(FITSClientTest.VALID_FITS_RESULT);
+ List results = JSONToolkit.getCharacterisationResults(FITSPropertyJsonPath.IDENTIFICATION,
+ jsonString);
+
+ System.out.println(results);
+ Assert.assertEquals(7, results.size());
+ }
+
+ @Test
+ void getAvailableFitsPropertiesTest() throws JSONException {
+ String jsonString = JSONToolkit.translateXML(FITSClientTest.VALID_FITS_RESULT3);
+ Set availableFitsProperties = JSONToolkit.getAvailableFitsProperties(jsonString);
+ List