diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..6325029dac1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +# Set update schedule for GitHub Actions +# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions daily + interval: "daily" diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml index c86d284e74b..c3f9e7bdc0d 100644 --- a/.github/workflows/container_app_pr.yml +++ b/.github/workflows/container_app_pr.yml @@ -20,14 +20,14 @@ jobs: if: ${{ github.repository_owner == 'IQSS' }} steps: # Checkout the pull request code as when merged - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge' - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: "17" distribution: 'adopt' - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -87,7 +87,7 @@ jobs: :ship: [See on GHCR](https://github.com/orgs/gdcc/packages/container). Use by referencing with full name as printed above, mind the registry name. # Leave a note when things have gone sideways - - uses: peter-evans/create-or-update-comment@v3 + - uses: peter-evans/create-or-update-comment@v4 if: ${{ failure() }} with: issue-number: ${{ github.event.client_payload.pull_request.number }} diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml index 3b7ce066d73..afb4f6f874b 100644 --- a/.github/workflows/container_app_push.yml +++ b/.github/workflows/container_app_push.yml @@ -68,15 +68,15 @@ jobs: if: ${{ github.event_name != 'pull_request' && github.ref_name == 'develop' && github.repository_owner == 'IQSS' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: peter-evans/dockerhub-description@v3 + - uses: actions/checkout@v4 + - uses: peter-evans/dockerhub-description@v4 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} repository: gdcc/dataverse short-description: "Dataverse Application Container Image providing the executable" readme-filepath: ./src/main/docker/README.md - - uses: peter-evans/dockerhub-description@v3 + - uses: peter-evans/dockerhub-description@v4 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 028f0140cc9..eca8416732a 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -5,14 +5,18 @@ on: branches: - develop +concurrency: + group: deploy-beta-testing + cancel-in-progress: false + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: distribution: 'zulu' java-version: '17' @@ -32,7 +36,7 @@ jobs: run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV - name: Upload war artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: built-app path: ./target/${{ env.war_file }} @@ -42,10 +46,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download war artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: built-app path: ./ diff --git a/.github/workflows/guides_build_sphinx.yml b/.github/workflows/guides_build_sphinx.yml index 86b59b11d35..fa3a876c418 100644 --- a/.github/workflows/guides_build_sphinx.yml +++ b/.github/workflows/guides_build_sphinx.yml @@ -10,7 +10,7 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: uncch-rdmc/sphinx-action@master with: docs-folder: "doc/sphinx-guides/" diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index a94b17a67ba..45180ea7aec 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -32,9 +32,9 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -57,7 +57,7 @@ jobs: # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub. # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dataverse-java${{ matrix.jdk }}.war path: target/dataverse*.war @@ -67,7 +67,7 @@ jobs: - run: | tar -cvf java-builddir.tar target tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: java-artifacts path: | @@ -98,16 +98,16 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin cache: maven # Get the build output from the unit test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: java-artifacts - run: | @@ -119,7 +119,7 @@ jobs: # Wrap up and send to coverage job - run: tar -cvf java-reportdir.tar target/site - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: java-reportdir path: java-reportdir.tar @@ -132,15 +132,15 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: temurin cache: maven # Get the build output from the integration test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: java-reportdir - run: tar -xvf java-reportdir.tar diff --git a/.github/workflows/pr_comment_commands.yml b/.github/workflows/pr_comment_commands.yml index 5ff75def623..06b11b1ac5b 100644 --- a/.github/workflows/pr_comment_commands.yml +++ b/.github/workflows/pr_comment_commands.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Dispatch - uses: peter-evans/slash-command-dispatch@v3 + uses: peter-evans/slash-command-dispatch@v4 with: # This token belongs to @dataversebot and has sufficient scope. token: ${{ secrets.GHCR_TOKEN }} diff --git a/.github/workflows/reviewdog_checkstyle.yml b/.github/workflows/reviewdog_checkstyle.yml index 90a0dd7d06b..804b04f696a 100644 --- a/.github/workflows/reviewdog_checkstyle.yml +++ b/.github/workflows/reviewdog_checkstyle.yml @@ -10,7 +10,7 @@ jobs: name: Checkstyle job steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Run check style uses: nikitasavinov/checkstyle-action@master with: diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index 56f7d648dc4..fb9cf5a0a1f 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -21,7 +21,7 @@ jobs: permissions: pull-requests: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: shellcheck uses: reviewdog/action-shellcheck@v1 with: diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml index 3320d9d08a4..cc09992edac 100644 --- a/.github/workflows/shellspec.yml +++ b/.github/workflows/shellspec.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s ${{ env.SHELLSPEC_VERSION }} --yes - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Run Shellspec run: | cd tests/shell @@ -30,7 +30,7 @@ jobs: container: image: rockylinux/rockylinux:9 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Install shellspec run: | curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share @@ -47,7 +47,7 @@ jobs: steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Run Shellspec run: | cd tests/shell diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml index 8ad74b3e4bb..6398edca412 100644 --- a/.github/workflows/spi_release.yml +++ b/.github/workflows/spi_release.yml @@ -37,15 +37,15 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'pull_request' && needs.check-secrets.outputs.available == 'true' steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: 'adopt' server-id: ossrh server-username: MAVEN_USERNAME server-password: MAVEN_PASSWORD - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -63,12 +63,12 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && needs.check-secrets.outputs.available == 'true' steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: 'adopt' - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -76,7 +76,7 @@ jobs: # Running setup-java again overwrites the settings.xml - IT'S MANDATORY TO DO THIS SECOND SETUP!!! - name: Set up Maven Central Repository - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '17' distribution: 'adopt' diff --git a/conf/keycloak/test-realm.json b/conf/keycloak/test-realm.json index efe71cc5d29..2e5ed1c4d69 100644 --- a/conf/keycloak/test-realm.json +++ b/conf/keycloak/test-realm.json @@ -45,287 +45,411 @@ "quickLoginCheckMilliSeconds" : 1000, "maxDeltaTimeSeconds" : 43200, "failureFactor" : 30, - "roles" : { - "realm" : [ { - "id" : "075daee1-5ab2-44b5-adbf-fa49a3da8305", - "name" : "uma_authorization", - "description" : "${role_uma_authorization}", - "composite" : false, - "clientRole" : false, - "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", - "attributes" : { } - }, { - "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71", - "name" : "default-roles-test", - "description" : "${role_default-roles}", - "composite" : true, - "composites" : { - "realm" : [ "offline_access", "uma_authorization" ], - "client" : { - "account" : [ "view-profile", "manage-account" ] - } + "roles": { + "realm": [ + { + "id": "075daee1-5ab2-44b5-adbf-fa49a3da8305", + "name": "uma_authorization", + "description": "${role_uma_authorization}", + "composite": false, + "clientRole": false, + "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes": {} }, - "clientRole" : false, - "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", - "attributes" : { } - }, { - "id" : "e6d31555-6be6-4dee-bc6a-40a53108e4c2", - "name" : "offline_access", - "description" : "${role_offline-access}", - "composite" : false, - "clientRole" : false, - "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", - "attributes" : { } - } ], - "client" : { - "realm-management" : [ { - "id" : "1955bd12-5f86-4a74-b130-d68a8ef6f0ee", - "name" : "impersonation", - "description" : "${role_impersonation}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "1109c350-9ab1-426c-9876-ef67d4310f35", - "name" : "view-authorization", - "description" : "${role_view-authorization}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "980c3fd3-1ae3-4b8f-9a00-d764c939035f", - "name" : "query-users", - "description" : "${role_query-users}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "5363e601-0f9d-4633-a8c8-28cb0f859b7b", - "name" : "query-groups", - "description" : "${role_query-groups}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "59aa7992-ad78-48db-868a-25d6e1d7db50", - "name" : "realm-admin", - "description" : "${role_realm-admin}", - "composite" : true, - "composites" : { - "client" : { - "realm-management" : [ "impersonation", "view-authorization", "query-users", "query-groups", "manage-clients", "manage-realm", "view-identity-providers", "query-realms", "manage-authorization", "manage-identity-providers", "manage-users", "view-users", "view-realm", "create-client", "view-clients", "manage-events", "query-clients", "view-events" ] + { + "id": "b4ff9091-ddf9-4536-b175-8cfa3e331d71", + "name": "default-roles-test", + "description": "${role_default-roles}", + "composite": true, + "composites": { + "realm": [ + "offline_access", + "uma_authorization" + ], + "client": { + "account": [ + "view-profile", + "manage-account" + ] } }, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "112f53c2-897d-4c01-81db-b8dc10c5b995", - "name" : "manage-clients", - "description" : "${role_manage-clients}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "c7f57bbd-ef32-4a64-9888-7b8abd90777a", - "name" : "manage-realm", - "description" : "${role_manage-realm}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "8885dac8-0af3-45af-94ce-eff5e801bb80", - "name" : "view-identity-providers", - "description" : "${role_view-identity-providers}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "2673346c-b0ef-4e01-8a90-be03866093af", - "name" : "manage-authorization", - "description" : "${role_manage-authorization}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "b7182885-9e57-445f-8dae-17c16eb31b5d", - "name" : "manage-identity-providers", - "description" : "${role_manage-identity-providers}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "ba7bfe0c-cb07-4a47-b92c-b8132b57e181", - "name" : "manage-users", - "description" : "${role_manage-users}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "13a8f0fc-647d-4bfe-b525-73956898e550", - "name" : "query-realms", - "description" : "${role_query-realms}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842", - "name" : "view-realm", - "description" : "${role_view-realm}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "2875da34-006c-4b7f-bfc8-9ae8e46af3a2", - "name" : "view-users", - "description" : "${role_view-users}", - "composite" : true, - "composites" : { - "client" : { - "realm-management" : [ "query-users", "query-groups" ] + "clientRole": false, + "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes": {} + }, + { + "id": "131ff85b-0c25-491b-8e13-dde779ec0854", + "name": "admin", + "description": "", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "impersonation", + "view-authorization", + "query-users", + "manage-realm", + "view-identity-providers", + "manage-authorization", + "view-clients", + "manage-events", + "query-clients", + "view-events", + "query-groups", + "realm-admin", + "manage-clients", + "query-realms", + "manage-identity-providers", + "manage-users", + "view-users", + "view-realm", + "create-client" + ], + "broker": [ + "read-token" + ], + "account": [ + "delete-account", + "manage-consent", + "view-consent", + "view-applications", + "view-groups", + "manage-account-links", + "view-profile", + "manage-account" + ] } }, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "c8c8f7dc-876b-4263-806f-3329f7cd5fd3", - "name" : "create-client", - "description" : "${role_create-client}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4", - "name" : "view-clients", - "description" : "${role_view-clients}", - "composite" : true, - "composites" : { - "client" : { - "realm-management" : [ "query-clients" ] - } + "clientRole": false, + "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes": {} + }, + { + "id": "e6d31555-6be6-4dee-bc6a-40a53108e4c2", + "name": "offline_access", + "description": "${role_offline-access}", + "composite": false, + "clientRole": false, + "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes": {} + } + ], + "client": { + "realm-management": [ + { + "id": "1955bd12-5f86-4a74-b130-d68a8ef6f0ee", + "name": "impersonation", + "description": "${role_impersonation}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} }, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "6fd64c94-d663-4501-ad77-0dcf8887d434", - "name" : "manage-events", - "description" : "${role_manage-events}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "b321927a-023c-4d2a-99ad-24baf7ff6d83", - "name" : "query-clients", - "description" : "${role_query-clients}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - }, { - "id" : "2fc21160-78de-457b-8594-e5c76cde1d5e", - "name" : "view-events", - "description" : "${role_view-events}", - "composite" : false, - "clientRole" : true, - "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", - "attributes" : { } - } ], - "test" : [ ], - "security-admin-console" : [ ], - "admin-cli" : [ ], - "account-console" : [ ], - "broker" : [ { - "id" : "07ee59b5-dca6-48fb-83d4-2994ef02850e", - "name" : "read-token", - "description" : "${role_read-token}", - "composite" : false, - "clientRole" : true, - "containerId" : "b57d62bb-77ff-42bd-b8ff-381c7288f327", - "attributes" : { } - } ], - "account" : [ { - "id" : "17d2f811-7bdf-4c73-83b4-1037001797b8", - "name" : "view-applications", - "description" : "${role_view-applications}", - "composite" : false, - "clientRole" : true, - "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", - "attributes" : { } - }, { - "id" : "d1ff44f9-419e-42fd-98e8-1add1169a972", - "name" : "delete-account", - "description" : "${role_delete-account}", - "composite" : false, - "clientRole" : true, - "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", - "attributes" : { } - }, { - "id" : "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0", - "name" : "manage-account-links", - "description" : "${role_manage-account-links}", - "composite" : false, - "clientRole" : true, - "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", - "attributes" : { } - }, { - "id" : "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55", - "name" : "view-profile", - "description" : "${role_view-profile}", - "composite" : false, - "clientRole" : true, - "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", - "attributes" : { } - }, { - "id" : "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48", - "name" : "manage-consent", - "description" : "${role_manage-consent}", - "composite" : true, - "composites" : { - "client" : { - "account" : [ "view-consent" ] - } + { + "id": "1109c350-9ab1-426c-9876-ef67d4310f35", + "name": "view-authorization", + "description": "${role_view-authorization}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} }, - "clientRole" : true, - "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", - "attributes" : { } - }, { - "id" : "782f3b0c-a17b-4a87-988b-1a711401f3b0", - "name" : "manage-account", - "description" : "${role_manage-account}", - "composite" : true, - "composites" : { - "client" : { - "account" : [ "manage-account-links" ] - } + { + "id": "980c3fd3-1ae3-4b8f-9a00-d764c939035f", + "name": "query-users", + "description": "${role_query-users}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "5363e601-0f9d-4633-a8c8-28cb0f859b7b", + "name": "query-groups", + "description": "${role_query-groups}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "59aa7992-ad78-48db-868a-25d6e1d7db50", + "name": "realm-admin", + "description": "${role_realm-admin}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "impersonation", + "view-authorization", + "query-users", + "query-groups", + "manage-clients", + "manage-realm", + "view-identity-providers", + "query-realms", + "manage-authorization", + "manage-identity-providers", + "manage-users", + "view-users", + "view-realm", + "create-client", + "view-clients", + "manage-events", + "query-clients", + "view-events" + ] + } + }, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "112f53c2-897d-4c01-81db-b8dc10c5b995", + "name": "manage-clients", + "description": "${role_manage-clients}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "c7f57bbd-ef32-4a64-9888-7b8abd90777a", + "name": "manage-realm", + "description": "${role_manage-realm}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "8885dac8-0af3-45af-94ce-eff5e801bb80", + "name": "view-identity-providers", + "description": "${role_view-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "2673346c-b0ef-4e01-8a90-be03866093af", + "name": "manage-authorization", + "description": "${role_manage-authorization}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "b7182885-9e57-445f-8dae-17c16eb31b5d", + "name": "manage-identity-providers", + "description": "${role_manage-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "ba7bfe0c-cb07-4a47-b92c-b8132b57e181", + "name": "manage-users", + "description": "${role_manage-users}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} }, - "clientRole" : true, - "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", - "attributes" : { } - }, { - "id" : "8a3bfe15-66d9-4f3d-83ac-801d682d42b0", - "name" : "view-consent", - "description" : "${role_view-consent}", - "composite" : false, - "clientRole" : true, - "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", - "attributes" : { } - } ] + { + "id": "13a8f0fc-647d-4bfe-b525-73956898e550", + "name": "query-realms", + "description": "${role_query-realms}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842", + "name": "view-realm", + "description": "${role_view-realm}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "2875da34-006c-4b7f-bfc8-9ae8e46af3a2", + "name": "view-users", + "description": "${role_view-users}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "query-users", + "query-groups" + ] + } + }, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "c8c8f7dc-876b-4263-806f-3329f7cd5fd3", + "name": "create-client", + "description": "${role_create-client}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4", + "name": "view-clients", + "description": "${role_view-clients}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "query-clients" + ] + } + }, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "6fd64c94-d663-4501-ad77-0dcf8887d434", + "name": "manage-events", + "description": "${role_manage-events}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "b321927a-023c-4d2a-99ad-24baf7ff6d83", + "name": "query-clients", + "description": "${role_query-clients}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + }, + { + "id": "2fc21160-78de-457b-8594-e5c76cde1d5e", + "name": "view-events", + "description": "${role_view-events}", + "composite": false, + "clientRole": true, + "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes": {} + } + ], + "test": [], + "security-admin-console": [], + "admin-cli": [], + "account-console": [], + "broker": [ + { + "id": "07ee59b5-dca6-48fb-83d4-2994ef02850e", + "name": "read-token", + "description": "${role_read-token}", + "composite": false, + "clientRole": true, + "containerId": "b57d62bb-77ff-42bd-b8ff-381c7288f327", + "attributes": {} + } + ], + "account": [ + { + "id": "17d2f811-7bdf-4c73-83b4-1037001797b8", + "name": "view-applications", + "description": "${role_view-applications}", + "composite": false, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + }, + { + "id": "f5918d56-bd4d-4035-8fa7-8622075ed690", + "name": "view-groups", + "description": "${role_view-groups}", + "composite": false, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + }, + { + "id": "d1ff44f9-419e-42fd-98e8-1add1169a972", + "name": "delete-account", + "description": "${role_delete-account}", + "composite": false, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + }, + { + "id": "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0", + "name": "manage-account-links", + "description": "${role_manage-account-links}", + "composite": false, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + }, + { + "id": "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55", + "name": "view-profile", + "description": "${role_view-profile}", + "composite": false, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + }, + { + "id": "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48", + "name": "manage-consent", + "description": "${role_manage-consent}", + "composite": true, + "composites": { + "client": { + "account": [ + "view-consent" + ] + } + }, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + }, + { + "id": "782f3b0c-a17b-4a87-988b-1a711401f3b0", + "name": "manage-account", + "description": "${role_manage-account}", + "composite": true, + "composites": { + "client": { + "account": [ + "manage-account-links" + ] + } + }, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + }, + { + "id": "8a3bfe15-66d9-4f3d-83ac-801d682d42b0", + "name": "view-consent", + "description": "${role_view-consent}", + "composite": false, + "clientRole": true, + "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes": {} + } + ] } }, "groups" : [ { @@ -409,7 +533,7 @@ } ], "disableableCredentialTypes" : [ ], "requiredActions" : [ ], - "realmRoles" : [ "default-roles-test" ], + "realmRoles" : [ "default-roles-test", "admin" ], "notBefore" : 0, "groups" : [ "/admins" ] }, { diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml index 2aed50e9998..f4121de97c1 100644 --- a/conf/solr/schema.xml +++ b/conf/solr/schema.xml @@ -167,6 +167,8 @@ + + @@ -201,6 +203,8 @@ + + @@ -234,6 +238,7 @@ + - 6.4 + 6.5 17 UTF-8 diff --git a/pom.xml b/pom.xml index 5ecbd7059c1..cb16f16c229 100644 --- a/pom.xml +++ b/pom.xml @@ -51,6 +51,16 @@ org.apache.abdera abdera-core 1.1.3 + + + org.apache.geronimo.specs + geronimo-stax-api_1.0_spec + + + org.apache.james + apache-mime4j-core + + org.apache.abdera @@ -125,18 +135,36 @@ io.gdcc sword2-server 2.0.0 + + + xml-apis + xml-apis + + org.apache.abdera abdera-core + + + org.apache.geronimo.specs + geronimo-stax-api_1.0_spec + + org.apache.abdera abdera-i18n + + + org.apache.geronimo.specs + geronimo-stax-api_1.0_spec + + @@ -247,7 +275,7 @@ org.eclipse.parsson jakarta.json - provided + test @@ -473,6 +501,16 @@ com.github.ben-manes.caffeine caffeine 3.1.8 + + + javax.xml.stream + stax-api + + + stax + stax-api + + @@ -559,6 +597,12 @@ org.apache.tika tika-parsers-standard-package ${tika.version} + + + xml-apis + xml-apis + + diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java index 2cb6f27c3e4..d880da5b4a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java @@ -2,7 +2,7 @@ import java.io.Serializable; import java.util.List; import jakarta.persistence.*; -import org.hibernate.validator.constraints.NotBlank; +import jakarta.validation.constraints.NotBlank; /** * @@ -41,7 +41,7 @@ public void setId(Long id) { private String questionType; @NotBlank(message = "{custom.questiontext}") - @Column( nullable = false ) + @Column( nullable = false, columnDefinition = "TEXT") private String questionString; private boolean required; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 98ac8ff387f..937f5693511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1407,8 +1407,7 @@ public UploadSessionQuotaLimit getUploadSessionQuotaLimit(DvObjectContainer pare } public boolean isInReleasedVersion(Long id) { - Query query = em.createQuery("SELECT fm.id FROM FileMetadata fm, DvObject dvo WHERE fm.datasetVersion.id=(SELECT dv.id FROM DatasetVersion dv WHERE dv.dataset.id=dvo.owner.id and dv.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND dvo.id=fm.dataFile.id AND fm.dataFile.id=:fid"); - query.setParameter("fid", id); + Query query = em.createNativeQuery("SELECT fm.id FROM filemetadata fm WHERE fm.datasetversion_id=(SELECT dv.id FROM datasetversion dv, dvobject dvo WHERE dv.dataset_id=dvo.owner_id AND dv.versionState='RELEASED' and dvo.id=" + id + " ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND fm.datafile_id=" + id); try { query.getSingleResult(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 8522f2733c7..33a093c8044 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -101,6 +101,7 @@ import jakarta.faces.view.ViewScoped; import jakarta.inject.Inject; import jakarta.inject.Named; +import jakarta.persistence.OptimisticLockException; import org.apache.commons.lang3.StringUtils; import org.primefaces.event.FileUploadEvent; @@ -1935,13 +1936,13 @@ public void updateOwnerDataverse() { if (selectedHostDataverse != null && selectedHostDataverse.getId() != null) { ownerId = selectedHostDataverse.getId(); dataset.setOwner(selectedHostDataverse); - logger.info("New host dataverse id: "+ownerId); + logger.info("New host dataverse id: " + ownerId); // discard the dataset already created //If a global ID was already assigned, as is true for direct upload, keep it (if files were already uploaded, they are at the path corresponding to the existing global id) GlobalId gid = dataset.getGlobalId(); dataset = new Dataset(); - if(gid!=null) { - dataset.setGlobalId(gid); + if (gid != null) { + dataset.setGlobalId(gid); } // initiate from scratch: (isolate the creation of a new dataset in its own method?) @@ -2287,8 +2288,17 @@ private String init(boolean initFull) { JsfHelper.addWarningMessage(message); } } + if(isAnonymizedAccess()){ + dataverseHeaderFragment.setBreadcrumbs(new ArrayList<>()); + } return null; } + + public void viewActionInitBreadcrumbs(){ + if(!isAnonymizedAccess()){ + dataverseHeaderFragment.initBreadcrumbs(dataset); + } + } private void displayWorkflowComments() { List comments = workingVersion.getWorkflowComments(); @@ -2888,6 +2898,9 @@ private String releaseDataset(boolean minor) { // the lock info system. JsfHelper.addErrorMessage(ex.getLocalizedMessage()); } + if(ex.getCause()!=null && ex.getCause() instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelPublishError")); + } logger.severe(ex.getMessage()); } @@ -4002,6 +4015,10 @@ public String save() { Throwable cause = ex; while (cause.getCause()!= null) { cause = cause.getCause(); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } error.append(cause).append(" "); error.append(cause.getMessage()).append(" "); } @@ -4011,6 +4028,15 @@ public String save() { } catch (CommandException ex) { //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); logger.log(Level.SEVERE, "CommandException, when attempting to update the dataset: " + ex.getMessage(), ex); + Throwable cause = ex; + while (cause.getCause()!= null) { + cause = cause.getCause(); + logger.info("Cause is: " + cause.getClass().getName() + ", Message: " + cause.getMessage()); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } + } populateDatasetUpdateFailureMessage(); return returnToDraftVersion(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index ac5923b95bf..a7bbc7c3ad4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -102,6 +102,10 @@ public int compare(DatasetVersion o1, DatasetVersion o2) { } } }; + public static final JsonObjectBuilder compareVersions(DatasetVersion originalVersion, DatasetVersion newVersion) { + DatasetVersionDifference diff = new DatasetVersionDifference(newVersion, originalVersion); + return diff.compareVersionsAsJson(); + } // TODO: Determine the UI implications of various version states //IMPORTANT: If you add a new value to this enum, you will also have to modify the diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index c32f49e985e..c5d6c31386c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -5,16 +5,24 @@ import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.util.StringUtil; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.logging.Logger; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.lang3.StringUtils; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; @@ -34,14 +42,13 @@ public final class DatasetVersionDifference { private List addedFiles = new ArrayList<>(); private List removedFiles = new ArrayList<>(); private List changedFileMetadata = new ArrayList<>(); + private Map>> changedFileMetadataDiff = new HashMap<>(); private List changedVariableMetadata = new ArrayList<>(); private List replacedFiles = new ArrayList<>(); private List changedTermsAccess = new ArrayList<>(); private List summaryDataForNote = new ArrayList<>(); private List blockDataForNote = new ArrayList<>(); - private VariableMetadataUtil variableMetadataUtil; - private List differenceSummaryGroups = new ArrayList<>(); public List getDifferenceSummaryGroups() { @@ -106,50 +113,70 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin addToSummary(null, dsfn); } } - - // TODO: ? - // It looks like we are going through the filemetadatas in both versions, - // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of - // changed, deleted and added files between the 2 versions... But why - // are we doing it, if we are doing virtually the same thing inside - // the initDatasetFilesDifferenceList(), below - but in a more efficient - // way (sorting both lists, then goint through them in parallel, at the - // cost of (N+M) max.? - // -- 4.6 Nov. 2016 - + long startTime = System.currentTimeMillis(); + Map originalFileMetadataMap = new HashMap<>(); + Map previousIDtoFileMetadataMap = new HashMap<>(); for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - boolean deleted = true; - for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - deleted = false; - if (!compareFileMetadatas(fmdo, fmdn)) { - changedFileMetadata.add(fmdo); - changedFileMetadata.add(fmdn); - } - if (!variableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { - changedVariableMetadata.add(fmdo); - changedVariableMetadata.add(fmdn); - } - break; - } - } - if (deleted) { - removedFiles.add(fmdo); - } + originalFileMetadataMap.put(fmdo.getDataFile().getId(), fmdo); } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - boolean added = true; - for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - added = false; - break; + DataFile ndf = fmdn.getDataFile(); + Long id = ndf.getId(); + FileMetadata fmdo = originalFileMetadataMap.get(id); + //If this file was in the original version + if(fmdo!= null) { + //Check for differences + Map> fileMetadataDiff = compareFileMetadatas(fmdo, fmdn); + if (!fileMetadataDiff.isEmpty()) { + changedFileMetadata.add(fmdo); + changedFileMetadata.add(fmdn); + // TODO: find a better key for the map. needs to be something that doesn't change + changedFileMetadataDiff.put(fmdo, fileMetadataDiff); + } + if (!VariableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { + changedVariableMetadata.add(fmdo); + changedVariableMetadata.add(fmdn); + } + // And drop it from the list since it can't be a deleted file + originalFileMetadataMap.remove(id); + } else { + //It wasn't in the original version + Long prevID = ndf.getPreviousDataFileId(); + //It might be a replacement file or an added file + if(prevID != null) { + //Add it to a map so we can check later to see if it's a replacement + previousIDtoFileMetadataMap.put(prevID, fmdn); + } else { + //Otherwise make it an added file now + addedFiles.add(fmdn); } } - if (added) { - addedFiles.add(fmdn); + } + //Finally check any remaining files from the original version that weren't in the new version' + for (Long removedId : originalFileMetadataMap.keySet()) { + //See if it has been replaced + FileMetadata replacingFmd = previousIDtoFileMetadataMap.get(removedId); + FileMetadata fmdRemoved = originalFileMetadataMap.get(removedId); + if (replacingFmd != null) { + //This is a replacement + replacedFiles.add(new FileMetadata[] { fmdRemoved, replacingFmd }); + //Drop if from the map + previousIDtoFileMetadataMap.remove(removedId); + } else { + //This is a removed file + removedFiles.add(fmdRemoved); } - } - getReplacedFiles(); + } + // Any fms left are not updating existing files and aren't replacing a file, but + // they are claiming a previous file id. That shouldn't be possible, but this will + // make sure they get listed in the difference if they do + for (Entry entry : previousIDtoFileMetadataMap.entrySet()) { + logger.warning("Previous file id claimed for a new file: fmd id: " + entry.getValue() + ", previous file id: " + entry.getKey()); + addedFiles.add(entry.getValue()); + } + + logger.fine("Main difference loop execution time: " + (System.currentTimeMillis() - startTime) + " ms"); initDatasetFilesDifferencesList(); //Sort within blocks by datasetfieldtype display order @@ -183,294 +210,62 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin getTermsDifferences(); } - private void getReplacedFiles() { - if (addedFiles.isEmpty() || removedFiles.isEmpty()) { - return; - } - List addedToReplaced = new ArrayList<>(); - List removedToReplaced = new ArrayList<>(); - for (FileMetadata added : addedFiles) { - DataFile addedDF = added.getDataFile(); - Long replacedId = addedDF.getPreviousDataFileId(); - if (added.getDataFile().getPreviousDataFileId() != null){ - } - for (FileMetadata removed : removedFiles) { - DataFile test = removed.getDataFile(); - if (test.getId().equals(replacedId)) { - addedToReplaced.add(added); - removedToReplaced.add(removed); - FileMetadata[] replacedArray = new FileMetadata[2]; - replacedArray[0] = removed; - replacedArray[1] = added; - replacedFiles.add(replacedArray); - } - } - } - if(addedToReplaced.isEmpty()){ - } else{ - addedToReplaced.stream().forEach((delete) -> { - addedFiles.remove(delete); - }); - removedToReplaced.stream().forEach((delete) -> { - removedFiles.remove(delete); - }); - } - } + private void getTermsDifferences() { - changedTermsAccess = new ArrayList<>(); - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } + TermsOfUseAndAccess originalTerms = originalVersion.getTermsOfUseAndAccess(); + if(originalTerms == null) { + originalTerms = new TermsOfUseAndAccess(); + } + // newTerms should never be null + TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess(); + if(newTerms == null) { + logger.warning("New version does not have TermsOfUseAndAccess"); + newTerms = new TermsOfUseAndAccess(); } - - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() == null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } - } - - if (newVersion.getTermsOfUseAndAccess() == null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), ""); - } - } - } - - private DifferenceSummaryItem createSummaryItem(){ - return null; - } - - private List addToSummaryGroup(String displayName, DifferenceSummaryItem differenceSummaryItem){ - return null; + checkAndAddToChangeList(originalTerms.getTermsOfUse(), newTerms.getTermsOfUse(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header")); + checkAndAddToChangeList(originalTerms.getConfidentialityDeclaration(), newTerms.getConfidentialityDeclaration(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration")); + checkAndAddToChangeList(originalTerms.getSpecialPermissions(), newTerms.getSpecialPermissions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions")); + checkAndAddToChangeList(originalTerms.getRestrictions(), newTerms.getRestrictions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions")); + checkAndAddToChangeList(originalTerms.getCitationRequirements(), newTerms.getCitationRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements")); + checkAndAddToChangeList(originalTerms.getDepositorRequirements(), newTerms.getDepositorRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements")); + checkAndAddToChangeList(originalTerms.getConditions(), newTerms.getConditions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions")); + checkAndAddToChangeList(originalTerms.getDisclaimer(), newTerms.getDisclaimer(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer")); + checkAndAddToChangeList(originalTerms.getTermsOfAccess(), newTerms.getTermsOfAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess")); + checkAndAddToChangeList(originalTerms.getDataAccessPlace(), newTerms.getDataAccessPlace(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace")); + checkAndAddToChangeList(originalTerms.getOriginalArchive(), newTerms.getOriginalArchive(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive")); + checkAndAddToChangeList(originalTerms.getAvailabilityStatus(), newTerms.getAvailabilityStatus(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus")); + checkAndAddToChangeList(originalTerms.getContactForAccess(), newTerms.getContactForAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess")); + checkAndAddToChangeList(originalTerms.getSizeOfCollection(), newTerms.getSizeOfCollection(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection")); + checkAndAddToChangeList(originalTerms.getStudyCompletion(), newTerms.getStudyCompletion(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion")); } - - private List addToTermsChangedList(List listIn, String label, String origVal, String newVal) { - String[] diffArray; - diffArray = new String[3]; - diffArray[0] = label; - diffArray[1] = origVal; - diffArray[2] = newVal; - listIn.add(diffArray); - return listIn; + + private void checkAndAddToChangeList(String originalTerm, String newTerm, + String termLabel) { + originalTerm = StringUtil.nullToEmpty(originalTerm); + newTerm = StringUtil.nullToEmpty(newTerm); + if(!originalTerm.equals(newTerm)) { + changedTermsAccess.add(new String[]{termLabel, originalTerm, newTerm}); + } } - private void addToList(List listIn, DatasetField dsfo, DatasetField dsfn) { DatasetField[] dsfArray; dsfArray = new DatasetField[2]; @@ -533,7 +328,7 @@ private void addToNoteSummary(DatasetField dsfo, int added, int deleted, int cha summaryDataForNote.add(noteArray); } - private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { + static boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { List vglo = fmdo.getVarGroups(); List vgln = fmdn.getVarGroups(); @@ -543,7 +338,7 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { int count = 0; for (VarGroup vgo : vglo) { for (VarGroup vgn : vgln) { - if (!variableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { + if (!VariableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { Set dvo = vgo.getVarsInGroup(); Set dvn = vgn.getVarsInGroup(); if (dvo.equals(dvn)) { @@ -561,25 +356,34 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { } } - public static boolean compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { - + public static Map> compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { + Map> fileMetadataChanged = new HashMap<>(); if (!StringUtils.equals(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))) { - return false; + fileMetadataChanged.put("Description", + List.of(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))); } if (!StringUtils.equals(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())) { - return false; + fileMetadataChanged.put("Categories", + List.of(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())); } if (!StringUtils.equals(fmdo.getLabel(), fmdn.getLabel())) { - return false; + fileMetadataChanged.put("Label", + List.of(fmdo.getLabel(), fmdn.getLabel())); } if (!StringUtils.equals(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())) { - return false; + fileMetadataChanged.put("ProvFreeForm", + List.of(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())); } - - return fmdo.isRestricted() == fmdn.isRestricted(); + + if (fmdo.isRestricted() != fmdn.isRestricted()) { + fileMetadataChanged.put("isRestricted", + List.of(String.valueOf(fmdo.isRestricted()), String.valueOf(fmdn.isRestricted()))); + } + + return fileMetadataChanged; } private void compareValues(DatasetField originalField, DatasetField newField, boolean compound) { @@ -1829,4 +1633,138 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie } return false; } + + List getChangedVariableMetadata() { + return changedVariableMetadata; + } + + List getReplacedFiles() { + return replacedFiles; + } + public JsonObjectBuilder compareVersionsAsJson() { + JsonObjectBuilder job = new NullSafeJsonBuilder(); + JsonObjectBuilder jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", originalVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(originalVersion.getLastUpdateTime())); + job.add("oldVersion", jobVersion); + jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", newVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(newVersion.getLastUpdateTime())); + job.add("newVersion", jobVersion); + + if (!this.detailDataByBlock.isEmpty()) { + JsonArrayBuilder jabMetadata = Json.createArrayBuilder(); + for (List blocks : detailDataByBlock) { + JsonObjectBuilder jobMetadata = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + String blockDisplay = blocks.get(0)[0].getDatasetFieldType().getMetadataBlock().getDisplayName(); + for (DatasetField[] dsfArray : blocks) { + JsonObjectBuilder jb = new NullSafeJsonBuilder(); + jb.add("fieldName", dsfArray[0].getDatasetFieldType().getTitle()); + if (dsfArray[0].getDatasetFieldType().isPrimitive()) { + jb.add("oldValue", dsfArray[0].getRawValue()); + } else { + jb.add("oldValue", dsfArray[0].getCompoundRawValue()); + } + if (dsfArray[1].getDatasetFieldType().isPrimitive()) { + jb.add("newValue", dsfArray[1].getRawValue()); + } else { + jb.add("newValue", dsfArray[1].getCompoundRawValue()); + } + jab.add(jb); + } + jobMetadata.add("blockName", blockDisplay); + jobMetadata.add("changed", jab); + jabMetadata.add(jobMetadata); + } + job.add("metadataChanges", jabMetadata); + } + + // Format added, removed, and modified files + JsonArrayBuilder jabDiffFiles = Json.createArrayBuilder(); + if (!addedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + addedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesAdded", jab); + } + if (!removedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + removedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesRemoved", jab); + } + if (!replacedFiles.isEmpty()) { + JsonArrayBuilder jabReplaced = Json.createArrayBuilder(); + replacedFiles.forEach(fm -> { + if (fm.length == 2) { + JsonObjectBuilder jobReplaced = new NullSafeJsonBuilder(); + jobReplaced.add("oldFile", filesDiffJson(fm[0])); + jobReplaced.add("newFile", filesDiffJson(fm[1])); + jabReplaced.add(jobReplaced); + } + }); + job.add("filesReplaced", jabReplaced); + } + if (!changedFileMetadata.isEmpty()) { + changedFileMetadataDiff.entrySet().forEach(entry -> { + JsonArrayBuilder jab = Json.createArrayBuilder(); + JsonObjectBuilder jobChanged = new NullSafeJsonBuilder(); + jobChanged.add("fileName", entry.getKey().getDataFile().getDisplayName()); + jobChanged.add(entry.getKey().getDataFile().getChecksumType().name(), entry.getKey().getDataFile().getChecksumValue()); + jobChanged.add("fileId", entry.getKey().getDataFile().getId()); + entry.getValue().entrySet().forEach(e -> { + JsonObjectBuilder jobDiffField = new NullSafeJsonBuilder(); + jobDiffField.add("fieldName",e.getKey()); + jobDiffField.add("oldValue",e.getValue().get(0)); + jobDiffField.add("newValue",e.getValue().get(1)); + jab.add(jobDiffField); + }); + jobChanged.add("changed", jab); + jabDiffFiles.add(jobChanged); + }); + job.add("fileChanges", jabDiffFiles); + } + + // Format Terms Of Access changes + if (!changedTermsAccess.isEmpty()) { + JsonObjectBuilder jobTOA = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + changedTermsAccess.forEach(toa -> { + JsonObjectBuilder jobValue = new NullSafeJsonBuilder(); + jobValue.add("fieldName",toa[0]); + jobValue.add("oldValue",toa[1]); + jobValue.add("newValue",toa[2]); + jab.add(jobValue); + }); + jobTOA.add("changed", jab); + job.add("TermsOfAccess", jobTOA); + } + + return job; + } + private JsonObjectBuilder filesDiffJson(FileMetadata fileMetadata) { + NullSafeJsonBuilder job = new NullSafeJsonBuilder(); + DataFile df = fileMetadata.getDataFile(); + job.add("fileName", df.getDisplayName()) + .add("filePath", fileMetadata.getDirectoryLabel()) + .add(df.getChecksumType().name(), df.getChecksumValue()) + .add("type",df.getContentType()) + .add("fileId", df.getId()) + .add("description", fileMetadata.getDescription()) + .add("isRestricted", df.isRestricted()); + if (fileMetadata.getCategories() != null && !fileMetadata.getCategories().isEmpty()) { + JsonArrayBuilder jabCategories = Json.createArrayBuilder(); + fileMetadata.getCategories().forEach(c -> jabCategories.add(c.getName())); + job.add("categories", jabCategories); + } + if (df.getTags() != null && !df.getTags().isEmpty()) { + JsonArrayBuilder jabTags = Json.createArrayBuilder(); + df.getTags().forEach(t -> jabTags.add(t.getTypeLabel())); + job.add("tags", jabTags); + } + return job; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 86e2e0207c1..1f11725e581 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -595,6 +595,10 @@ public void setMetadataBlocks(List metadataBlocks) { this.metadataBlocks = new ArrayList<>(metadataBlocks); } + public void clearMetadataBlocks() { + this.metadataBlocks.clear(); + } + public List getCitationDatasetFieldTypes() { return citationDatasetFieldTypes; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index 5c77989f6d6..56f522fa816 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -42,23 +42,24 @@ public void delete(DataverseFacet dataverseFacet) { cache.invalidate(); } - public void deleteFacetsFor( Dataverse d ) { - em.createNamedQuery("DataverseFacet.removeByOwnerId") - .setParameter("ownerId", d.getId()) - .executeUpdate(); + public void deleteFacetsFor(Dataverse d) { + em.createNamedQuery("DataverseFacet.removeByOwnerId") + .setParameter("ownerId", d.getId()) + .executeUpdate(); cache.invalidate(d.getId()); - - } - + + } + public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { DataverseFacet dataverseFacet = new DataverseFacet(); - + dataverseFacet.setDisplayOrder(displayOrder); dataverseFacet.setDatasetFieldType(fieldType); dataverseFacet.setDataverse(ownerDv); - ownerDv.getDataverseFacets().add(dataverseFacet); em.persist(dataverseFacet); + + cache.invalidate(ownerDv.getId()); return dataverseFacet; } diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java index f54b1fb6117..0922af007fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java @@ -322,7 +322,7 @@ public String save() { logger.info("Guestbook Page Command Exception. Dataverse: " + dataverse.getName()); logger.info(ex.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("guestbook.save.fail"), " - " + ex.toString())); - //logger.severe(ex.getMessage()); + return null; } editMode = null; String msg = (create)? BundleUtil.getStringFromBundle("guestbook.create"): BundleUtil.getStringFromBundle("guestbook.save"); diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 1ea7d02791d..830c7740e34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -15,6 +15,8 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; + +import edu.harvard.iq.dataverse.validation.ValidateEmail; import jakarta.persistence.*; import jakarta.validation.constraints.Size; import java.util.Collections; @@ -80,8 +82,8 @@ public class GuestbookResponse implements Serializable { @Size(max = 255, message = "{guestbook.response.nameLength}") private String name; - // TODO: Consider using EMailValidator as well. @Size(max = 255, message = "{guestbook.response.nameLength}") + @ValidateEmail(message = "{user.invalidEmail}") private String email; @Size(max = 255, message = "{guestbook.response.nameLength}") @@ -198,7 +200,8 @@ public String getEmail() { } public void setEmail(String email) { - this.email = email; + // ValidateEmail requires NULL or valid email. Empty String will fail validation + this.email = (email == null || email.trim().isEmpty()) ? null : email; } public Guestbook getGuestbook() { diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 2995c0c5f47..c67a0293847 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -283,7 +283,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme if (objectOfNotification != null){ String messageText = getMessageTextBasedOnNotification(notification, objectOfNotification, comment, requestor); String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, objectOfNotification); - if (!(messageText.isEmpty() || subjectText.isEmpty())){ + if (!(StringUtils.isEmpty(messageText) || StringUtils.isEmpty(subjectText))){ retval = sendSystemEmail(emailAddress, subjectText, messageText, isHtmlContent); } else { logger.warning("Skipping " + notification.getType() + " notification, because couldn't get valid message"); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 3257a3cc7ac..3c1074b75bb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -14,14 +14,11 @@ import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; -import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.engine.command.exception.*; import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.exception.RateLimitCommandException; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.pidproviders.PidUtil; @@ -56,6 +53,7 @@ import java.net.URI; import java.util.Arrays; import java.util.Collections; +import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.logging.Level; @@ -631,10 +629,22 @@ protected T execCommand( Command cmd ) throws WrappedResponse { * sometimes?) doesn't have much information in it: * * "User @jsmith is not permitted to perform requested action." + * + * Update (11/11/2024): + * + * An {@code isDetailedMessageRequired} flag has been added to {@code PermissionException} to selectively return more + * specific error messages when the generic message (e.g. "User :guest is not permitted to perform requested action") + * lacks sufficient context. This approach aims to provide valuable permission-related details in cases where it + * could help users better understand their permission issues without exposing unnecessary internal information. */ - throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, - "User " + cmd.getRequest().getUser().getIdentifier() + " is not permitted to perform requested action.") ); - + if (ex.isDetailedMessageRequired()) { + throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, ex.getMessage())); + } else { + throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, + "User " + cmd.getRequest().getUser().getIdentifier() + " is not permitted to perform requested action.")); + } + } catch (InvalidFieldsCommandException ex) { + throw new WrappedResponse(ex, badRequest(ex.getMessage(), ex.getFieldErrors())); } catch (CommandException ex) { Logger.getLogger(AbstractApiBean.class.getName()).log(Level.SEVERE, "Error while executing command " + cmd, ex); throw new WrappedResponse(ex, error(Status.INTERNAL_SERVER_ERROR, ex.getMessage())); @@ -809,6 +819,18 @@ protected Response badRequest( String msg ) { return error( Status.BAD_REQUEST, msg ); } + protected Response badRequest(String msg, Map fieldErrors) { + return Response.status(Status.BAD_REQUEST) + .entity(NullSafeJsonBuilder.jsonObjectBuilder() + .add("status", ApiConstants.STATUS_ERROR) + .add("message", msg) + .add("fieldErrors", Json.createObjectBuilder(fieldErrors).build()) + .build() + ) + .type(MediaType.APPLICATION_JSON_TYPE) + .build(); + } + protected Response forbidden( String msg ) { return error( Status.FORBIDDEN, msg ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 54e5eaf7b84..152bcf5066e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,28 +1,11 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.BannerMessage; -import edu.harvard.iq.dataverse.BannerMessageServiceBean; -import edu.harvard.iq.dataverse.BannerMessageText; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseRequestServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.DataverseSession; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.EMailValidator; -import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.Template; -import edu.harvard.iq.dataverse.TemplateServiceBean; -import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; @@ -66,8 +49,9 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.charset.StandardCharsets; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; +import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; @@ -81,7 +65,6 @@ import org.apache.commons.io.IOUtils; -import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -118,9 +101,7 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.rolesToJson; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; + import jakarta.inject.Inject; import jakarta.json.JsonArray; import jakarta.persistence.Query; @@ -128,7 +109,6 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; import java.nio.file.Paths; -import java.util.TreeMap; /** * Where the secure, setup API calls live. @@ -139,7 +119,7 @@ @Path("admin") public class Admin extends AbstractApiBean { - private static final Logger logger = Logger.getLogger(Admin.class.getName()); + private static final Logger logger = Logger.getLogger(Admin.class.getName()); @EJB AuthenticationProvidersRegistrationServiceBean authProvidersRegistrationSvc; @@ -184,53 +164,53 @@ public class Admin extends AbstractApiBean { @Inject DataverseSession session; - public static final String listUsersPartialAPIPath = "list-users"; - public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; - - @Path("settings") - @GET - public Response listAllSettings() { - JsonObjectBuilder bld = jsonObjectBuilder(); - settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); - return ok(bld); - } - - @Path("settings/{name}") - @PUT - public Response putSetting(@PathParam("name") String name, String content) { - Setting s = settingsSvc.set(name, content); - return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); - } - - @Path("settings/{name}/lang/{lang}") - @PUT - public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { - Setting s = settingsSvc.set(name, lang, content); - return ok("Setting " + name + " - " + lang + " - added."); - } - - @Path("settings/{name}") - @GET - public Response getSetting(@PathParam("name") String name) { - String s = settingsSvc.get(name); - - return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); - } - - @Path("settings/{name}") - @DELETE - public Response deleteSetting(@PathParam("name") String name) { - settingsSvc.delete(name); - - return ok("Setting " + name + " deleted."); - } - - @Path("settings/{name}/lang/{lang}") - @DELETE - public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { - settingsSvc.delete(name, lang); - return ok("Setting " + name + " - " + lang + " deleted."); - } + public static final String listUsersPartialAPIPath = "list-users"; + public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; + + @Path("settings") + @GET + public Response listAllSettings() { + JsonObjectBuilder bld = jsonObjectBuilder(); + settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); + return ok(bld); + } + + @Path("settings/{name}") + @PUT + public Response putSetting(@PathParam("name") String name, String content) { + Setting s = settingsSvc.set(name, content); + return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); + } + + @Path("settings/{name}/lang/{lang}") + @PUT + public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { + Setting s = settingsSvc.set(name, lang, content); + return ok("Setting " + name + " - " + lang + " - added."); + } + + @Path("settings/{name}") + @GET + public Response getSetting(@PathParam("name") String name) { + String s = settingsSvc.get(name); + + return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); + } + + @Path("settings/{name}") + @DELETE + public Response deleteSetting(@PathParam("name") String name) { + settingsSvc.delete(name); + + return ok("Setting " + name + " deleted."); + } + + @Path("settings/{name}/lang/{lang}") + @DELETE + public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { + settingsSvc.delete(name, lang); + return ok("Setting " + name + " - " + lang + " deleted."); + } @Path("template/{id}") @DELETE @@ -301,130 +281,130 @@ public Response findTemplates(@PathParam("alias") String alias) { } - @Path("authenticationProviderFactories") - @GET - public Response listAuthProviderFactories() { - return ok(authSvc.listProviderFactories().stream() - .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) - .collect(toJsonArray())); - } - - @Path("authenticationProviders") - @GET - public Response listAuthProviders() { - return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) - .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); - } - - @Path("authenticationProviders") - @POST - public Response addProvider(AuthenticationProviderRow row) { - try { - AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); - if (managed != null) { - managed = em.merge(row); - } else { - em.persist(row); - managed = row; - } - if (managed.isEnabled()) { - AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); - authProvidersRegistrationSvc.deregisterProvider(provider.getId()); - authProvidersRegistrationSvc.registerProvider(provider); - } - return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); - } catch (AuthorizationSetupException e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } - - @Path("authenticationProviders/{id}") - @GET - public Response showProvider(@PathParam("id") String id) { - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - return (row != null) ? ok(json(row)) - : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); - } - - @POST - @Path("authenticationProviders/{id}/:enabled") - public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { - return enableAuthenticationProvider(id, body); - } - - @PUT - @Path("authenticationProviders/{id}/enabled") - @Produces("application/json") - public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { - body = body.trim(); - if (!Util.isBoolean(body)) { - return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); - } - boolean enable = Util.isTrue(body); - - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row == null) { - return notFound("Can't find authentication provider with id '" + id + "'"); - } - - row.setEnabled(enable); - em.merge(row); - - if (enable) { - // enable a provider - if (authSvc.getAuthenticationProvider(id) != null) { - return ok(String.format("Authentication provider '%s' already enabled", id)); - } - try { - authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); - return ok(String.format("Authentication Provider %s enabled", row.getId())); - - } catch (AuthenticationProviderFactoryNotFoundException ex) { - return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", - row.getFactoryAlias())); - } catch (AuthorizationSetupException ex) { - logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); - return error(Status.INTERNAL_SERVER_ERROR, - String.format("Can't instantiate provider: %s", ex.getMessage())); - } - - } else { - // disable a provider - authProvidersRegistrationSvc.deregisterProvider(id); - return ok("Authentication Provider '" + id + "' disabled. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } - } - - @GET - @Path("authenticationProviders/{id}/enabled") - public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { - List prvs = em - .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) - .setParameter("id", id).getResultList(); - if (prvs.isEmpty()) { - return notFound("Can't find a provider with id '" + id + "'."); - } else { - return ok(Boolean.toString(prvs.get(0).isEnabled())); - } - } - - @DELETE - @Path("authenticationProviders/{id}/") - public Response deleteAuthenticationProvider(@PathParam("id") String id) { - authProvidersRegistrationSvc.deregisterProvider(id); - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row != null) { - em.remove(row); - } - - return ok("AuthenticationProvider " + id + " deleted. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } + @Path("authenticationProviderFactories") + @GET + public Response listAuthProviderFactories() { + return ok(authSvc.listProviderFactories().stream() + .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) + .collect(toJsonArray())); + } + + @Path("authenticationProviders") + @GET + public Response listAuthProviders() { + return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) + .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); + } + + @Path("authenticationProviders") + @POST + public Response addProvider(AuthenticationProviderRow row) { + try { + AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); + if (managed != null) { + managed = em.merge(row); + } else { + em.persist(row); + managed = row; + } + if (managed.isEnabled()) { + AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); + authProvidersRegistrationSvc.deregisterProvider(provider.getId()); + authProvidersRegistrationSvc.registerProvider(provider); + } + return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); + } catch (AuthorizationSetupException e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } + + @Path("authenticationProviders/{id}") + @GET + public Response showProvider(@PathParam("id") String id) { + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + return (row != null) ? ok(json(row)) + : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); + } + + @POST + @Path("authenticationProviders/{id}/:enabled") + public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { + return enableAuthenticationProvider(id, body); + } + + @PUT + @Path("authenticationProviders/{id}/enabled") + @Produces("application/json") + public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { + body = body.trim(); + if (!Util.isBoolean(body)) { + return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); + } + boolean enable = Util.isTrue(body); + + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row == null) { + return notFound("Can't find authentication provider with id '" + id + "'"); + } + + row.setEnabled(enable); + em.merge(row); + + if (enable) { + // enable a provider + if (authSvc.getAuthenticationProvider(id) != null) { + return ok(String.format("Authentication provider '%s' already enabled", id)); + } + try { + authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); + return ok(String.format("Authentication Provider %s enabled", row.getId())); + + } catch (AuthenticationProviderFactoryNotFoundException ex) { + return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", + row.getFactoryAlias())); + } catch (AuthorizationSetupException ex) { + logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); + return error(Status.INTERNAL_SERVER_ERROR, + String.format("Can't instantiate provider: %s", ex.getMessage())); + } + + } else { + // disable a provider + authProvidersRegistrationSvc.deregisterProvider(id); + return ok("Authentication Provider '" + id + "' disabled. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } + } + + @GET + @Path("authenticationProviders/{id}/enabled") + public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { + List prvs = em + .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) + .setParameter("id", id).getResultList(); + if (prvs.isEmpty()) { + return notFound("Can't find a provider with id '" + id + "'."); + } else { + return ok(Boolean.toString(prvs.get(0).isEnabled())); + } + } + + @DELETE + @Path("authenticationProviders/{id}/") + public Response deleteAuthenticationProvider(@PathParam("id") String id) { + authProvidersRegistrationSvc.deregisterProvider(id); + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row != null) { + em.remove(row); + } + + return ok("AuthenticationProvider " + id + " deleted. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } @GET @Path("authenticatedUsers/{identifier}/") @@ -509,520 +489,520 @@ private Response deactivateAuthenticatedUser(AuthenticatedUser userToDisable) { } } - @POST - @Path("publishDataverseAsCreator/{id}") - public Response publishDataverseAsCreator(@PathParam("id") long id) { - try { - Dataverse dataverse = dataverseSvc.find(id); - if (dataverse != null) { - AuthenticatedUser authenticatedUser = dataverse.getCreator(); - return ok(json(execCommand( - new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); - } else { - return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - } - - @Deprecated - @GET - @AuthRequired - @Path("authenticatedUsers") - public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - JsonArrayBuilder userArray = Json.createArrayBuilder(); - authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { - userArray.add(json(user)); - }); - return ok(userArray); - } - - @GET - @AuthRequired - @Path(listUsersPartialAPIPath) - @Produces({ "application/json" }) - public Response filterAuthenticatedUsers( - @Context ContainerRequestContext crc, - @QueryParam("searchTerm") String searchTerm, - @QueryParam("selectedPage") Integer selectedPage, - @QueryParam("itemsPerPage") Integer itemsPerPage, - @QueryParam("sortKey") String sortKey - ) { - - User authUser = getRequestUser(crc); - - if (!authUser.isSuperuser()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); - } - - UserListMaker userListMaker = new UserListMaker(userService); - - // String sortKey = null; - UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); - - return ok(userListResult.toJSON()); - } - - /** - * @todo Make this support creation of BuiltInUsers. - * - * @todo Add way more error checking. Only the happy path is tested by AdminIT. - */ - @POST - @Path("authenticatedUsers") - public Response createAuthenicatedUser(JsonObject jsonObject) { - logger.fine("JSON in: " + jsonObject); - String persistentUserId = jsonObject.getString("persistentUserId"); - String identifier = jsonObject.getString("identifier"); - String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); - String firstName = jsonObject.getString("firstName"); - String lastName = jsonObject.getString("lastName"); - String emailAddress = jsonObject.getString("email"); - String position = null; - String affiliation = null; - UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), - persistentUserId); - AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, - emailAddress, affiliation, position); - boolean generateUniqueIdentifier = true; - AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, - proposedAuthenticatedUserIdentifier, userDisplayInfo, true); - return ok(json(authenticatedUser)); - } + @POST + @Path("publishDataverseAsCreator/{id}") + public Response publishDataverseAsCreator(@PathParam("id") long id) { + try { + Dataverse dataverse = dataverseSvc.find(id); + if (dataverse != null) { + AuthenticatedUser authenticatedUser = dataverse.getCreator(); + return ok(json(execCommand( + new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); + } else { + return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @Deprecated + @GET + @AuthRequired + @Path("authenticatedUsers") + public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + JsonArrayBuilder userArray = Json.createArrayBuilder(); + authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { + userArray.add(json(user)); + }); + return ok(userArray); + } + + @GET + @AuthRequired + @Path(listUsersPartialAPIPath) + @Produces({ "application/json" }) + public Response filterAuthenticatedUsers( + @Context ContainerRequestContext crc, + @QueryParam("searchTerm") String searchTerm, + @QueryParam("selectedPage") Integer selectedPage, + @QueryParam("itemsPerPage") Integer itemsPerPage, + @QueryParam("sortKey") String sortKey + ) { + + User authUser = getRequestUser(crc); + + if (!authUser.isSuperuser()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); + } + + UserListMaker userListMaker = new UserListMaker(userService); + + // String sortKey = null; + UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); + + return ok(userListResult.toJSON()); + } + + /** + * @todo Make this support creation of BuiltInUsers. + * + * @todo Add way more error checking. Only the happy path is tested by AdminIT. + */ + @POST + @Path("authenticatedUsers") + public Response createAuthenicatedUser(JsonObject jsonObject) { + logger.fine("JSON in: " + jsonObject); + String persistentUserId = jsonObject.getString("persistentUserId"); + String identifier = jsonObject.getString("identifier"); + String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); + String firstName = jsonObject.getString("firstName"); + String lastName = jsonObject.getString("lastName"); + String emailAddress = jsonObject.getString("email"); + String position = null; + String affiliation = null; + UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), + persistentUserId); + AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, + emailAddress, affiliation, position); + boolean generateUniqueIdentifier = true; + AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, + proposedAuthenticatedUserIdentifier, userDisplayInfo, true); + return ok(json(authenticatedUser)); + } //TODO: Delete this endpoint after 4.9.3. Was updated with change in docs. --MAD - /** - * curl -X PUT -d "shib@mailinator.com" - * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn - * - * @deprecated We have documented this API endpoint so we'll keep in around for - * a while but we should encourage everyone to switch to the - * "convertRemoteToBuiltIn" endpoint and then remove this - * Shib-specfic one. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") - @Deprecated - public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); - } + /** + * curl -X PUT -d "shib@mailinator.com" + * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn + * + * @deprecated We have documented this API endpoint so we'll keep in around for + * a while but we should encourage everyone to switch to the + * "convertRemoteToBuiltIn" endpoint and then remove this + * Shib-specfic one. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") + @Deprecated + public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id - + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") - public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") + public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); //AuthenticatedUser authUser = authService.getAuthenticatedUser(aUser.getUserName()); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from remote to BuiltIn. An Exception was not thrown."); - } + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from remote to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " - + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2shib") - public Response builtin2shib(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2shib..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - idPEntityId = ShibUtil.testShibIdpEntityId; - String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - ShibServiceBean.DevShibAccountType.RANDOM); - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " + + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2shib") + public Response builtin2shib(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2shib..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + idPEntityId = ShibUtil.testShibIdpEntityId; + String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + ShibServiceBean.DevShibAccountType.RANDOM); + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { if (builtInUserToConvert.isDeactivated()) { problems.add("builtin account has been deactivated"); return error(Status.BAD_REQUEST, problems.build().toString()); } - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, - newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", shibProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2oauth") - public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2oauth..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - String newProviderId; - String newPersistentUserIdInLookupTable; - logger.info("content: " + content); - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - newProviderId = args[3]; - newPersistentUserIdInLookupTable = args[4]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - // UserIdentifier newUserIdentifierInLookupTable = new - // UserIdentifier(idPEntityId + separator + eppn, notUsed); - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - // idPEntityId = ShibUtil.testShibIdpEntityId; - // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - // ShibServiceBean.DevShibAccountType.RANDOM); - String overwriteAffiliation = null; - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, - newProviderId, newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", newProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - - - - @Path("roles") - @POST - public Response createNewBuiltinRole(RoleDTO roleDto) { - ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") - .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); - try { - return ok(json(rolesSvc.save(roleDto.asRole()))); - } catch (Exception e) { - alr.setActionResult(ActionLogRecord.Result.InternalError); - alr.setInfo(alr.getInfo() + "// " + e.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } finally { - actionLogSvc.log(alr); - } - } - - @Path("roles") - @GET - public Response listBuiltinRoles() { - try { - return ok(rolesToJson(rolesSvc.findBuiltinRoles())); - } catch (Exception e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, + newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", shibProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2oauth") + public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2oauth..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + String newProviderId; + String newPersistentUserIdInLookupTable; + logger.info("content: " + content); + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + newProviderId = args[3]; + newPersistentUserIdInLookupTable = args[4]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + // UserIdentifier newUserIdentifierInLookupTable = new + // UserIdentifier(idPEntityId + separator + eppn, notUsed); + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + // idPEntityId = ShibUtil.testShibIdpEntityId; + // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + // ShibServiceBean.DevShibAccountType.RANDOM); + String overwriteAffiliation = null; + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, + newProviderId, newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", newProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + + + + @Path("roles") + @POST + public Response createNewBuiltinRole(RoleDTO roleDto) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") + .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); + try { + return ok(json(rolesSvc.save(roleDto.asRole()))); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } + + @Path("roles") + @GET + public Response listBuiltinRoles() { + try { + return ok(rolesToJson(rolesSvc.findBuiltinRoles())); + } catch (Exception e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } @DELETE - @AuthRequired + @AuthRequired @Path("roles/{id}") public Response deleteRole(@Context ContainerRequestContext crc, @PathParam("id") String id) { @@ -1284,77 +1264,77 @@ public void write(OutputStream os) throws IOException, return Response.ok(stream).build(); } - @Path("assignments/assignees/{raIdtf: .*}") - @GET - public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { - - JsonArrayBuilder arr = Json.createArrayBuilder(); - roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); - - return ok(arr); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - * @return The confirm email token. - */ - @Path("confirmEmail/{userId}") - @GET - public Response getConfirmEmailToken(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); - if (confirmEmailData != null) { - return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); - } - } - return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - */ - @Path("confirmEmail/{userId}") - @POST - public Response startConfirmEmailProcess(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - try { - ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); - ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); - return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) - .add("identifier", user.getUserIdentifier())); - } catch (ConfirmEmailException ex) { - return error(Status.BAD_REQUEST, - "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); - } - } - return error(Status.BAD_REQUEST, "Could not find user based on " + userId); - } - - /** - * This method is used by an integration test in UsersIT.java to exercise bug - * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! - */ - @Path("convertUserFromBcryptToSha1") - @POST - public Response convertUserFromBcryptToSha1(String json) { - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); - BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); - builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means - // SHA-1 - BuiltinUser savedUser = builtinUserService.save(builtinUser); - return ok("foo: " + savedUser); - - } + @Path("assignments/assignees/{raIdtf: .*}") + @GET + public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { + + JsonArrayBuilder arr = Json.createArrayBuilder(); + roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); + + return ok(arr); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + * @return The confirm email token. + */ + @Path("confirmEmail/{userId}") + @GET + public Response getConfirmEmailToken(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); + if (confirmEmailData != null) { + return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); + } + } + return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + */ + @Path("confirmEmail/{userId}") + @POST + public Response startConfirmEmailProcess(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + try { + ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); + ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); + return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) + .add("identifier", user.getUserIdentifier())); + } catch (ConfirmEmailException ex) { + return error(Status.BAD_REQUEST, + "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); + } + } + return error(Status.BAD_REQUEST, "Could not find user based on " + userId); + } + + /** + * This method is used by an integration test in UsersIT.java to exercise bug + * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! + */ + @Path("convertUserFromBcryptToSha1") + @POST + public Response convertUserFromBcryptToSha1(String json) { + JsonReader jsonReader = Json.createReader(new StringReader(json)); + JsonObject object = jsonReader.readObject(); + jsonReader.close(); + BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); + builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means + // SHA-1 + BuiltinUser savedUser = builtinUserService.save(builtinUser); + return ok("foo: " + savedUser); + + } @Path("permissions/{dvo}") @AuthRequired @@ -1375,43 +1355,43 @@ public Response findPermissonsOn(@Context final ContainerRequestContext crc, @Pa } } - @Path("assignee/{idtf}") - @GET - public Response findRoleAssignee(@PathParam("idtf") String idtf) { - RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); - return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); - } - - @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") - @POST - public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, - @QueryParam("forceRecalculate") boolean forceRecalculate) { - JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); - return ok(info); - } - - @Path("datafiles/integrity/fixmissingoriginaltypes") - @GET - public Response fixMissingOriginalTypes() { - JsonObjectBuilder info = Json.createObjectBuilder(); - - List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); - - if (affectedFileIds.isEmpty()) { - info.add("message", - "All the tabular files in the database already have the original types set correctly; exiting."); - } else { - for (Long fileid : affectedFileIds) { - logger.fine("found file id: " + fileid); - } - info.add("message", "Found " + affectedFileIds.size() - + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); - } - - ingestService.fixMissingOriginalTypes(affectedFileIds); - - return ok(info); - } + @Path("assignee/{idtf}") + @GET + public Response findRoleAssignee(@PathParam("idtf") String idtf) { + RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); + return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); + } + + @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") + @POST + public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, + @QueryParam("forceRecalculate") boolean forceRecalculate) { + JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); + return ok(info); + } + + @Path("datafiles/integrity/fixmissingoriginaltypes") + @GET + public Response fixMissingOriginalTypes() { + JsonObjectBuilder info = Json.createObjectBuilder(); + + List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); + + if (affectedFileIds.isEmpty()) { + info.add("message", + "All the tabular files in the database already have the original types set correctly; exiting."); + } else { + for (Long fileid : affectedFileIds) { + logger.fine("found file id: " + fileid); + } + info.add("message", "Found " + affectedFileIds.size() + + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); + } + + ingestService.fixMissingOriginalTypes(affectedFileIds); + + return ok(info); + } @Path("datafiles/integrity/fixmissingoriginalsizes") @GET @@ -1441,60 +1421,60 @@ public Response fixMissingOriginalSizes(@QueryParam("limit") Integer limit) { return ok(info); } - /** - * This method is used in API tests, called from UtilIt.java. - */ - @GET - @Path("datasets/thumbnailMetadata/{id}") - public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { - Dataset dataset = datasetSvc.find(idSupplied); - if (dataset == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); - } - JsonObjectBuilder data = Json.createObjectBuilder(); - DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); - data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); - if (datasetThumbnail != null) { - data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); - DataFile dataFile = datasetThumbnail.getDataFile(); - if (dataFile != null) { - /** - * @todo Change this from a String to a long. - */ - data.add("dataFileId", dataFile.getId().toString()); - } - } - return ok(data); - } - - /** - * validatePassword - *

- * Validate a password with an API call - * - * @param password - * The password - * @return A response with the validation result. - */ - @Path("validatePassword") - @POST - public Response validatePassword(String password) { - - final List errors = passwordValidatorService.validate(password, new Date(), false); - final JsonArrayBuilder errorArray = Json.createArrayBuilder(); - errors.forEach(errorArray::add); - return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); - } - - @GET - @Path("/isOrcid") - public Response isOrcidEnabled() { - return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); - } + /** + * This method is used in API tests, called from UtilIt.java. + */ + @GET + @Path("datasets/thumbnailMetadata/{id}") + public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { + Dataset dataset = datasetSvc.find(idSupplied); + if (dataset == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); + } + JsonObjectBuilder data = Json.createObjectBuilder(); + DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); + data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); + if (datasetThumbnail != null) { + data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); + DataFile dataFile = datasetThumbnail.getDataFile(); + if (dataFile != null) { + /** + * @todo Change this from a String to a long. + */ + data.add("dataFileId", dataFile.getId().toString()); + } + } + return ok(data); + } + + /** + * validatePassword + *

+ * Validate a password with an API call + * + * @param password + * The password + * @return A response with the validation result. + */ + @Path("validatePassword") + @POST + public Response validatePassword(String password) { + + final List errors = passwordValidatorService.validate(password, new Date(), false); + final JsonArrayBuilder errorArray = Json.createArrayBuilder(); + errors.forEach(errorArray::add); + return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); + } + + @GET + @Path("/isOrcid") + public Response isOrcidEnabled() { + return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); + } @POST - @AuthRequired + @AuthRequired @Path("{id}/reregisterHDLToPID") public Response reregisterHdlToPID(@Context ContainerRequestContext crc, @PathParam("id") String id) { logger.info("Starting to reregister " + id + " Dataset Id. (from hdl to doi)" + new Date()); @@ -1825,7 +1805,7 @@ public Response updateHashValues(@Context ContainerRequestContext crc, @PathPara } @POST - @AuthRequired + @AuthRequired @Path("/computeDataFileHashValue/{fileId}/algorithm/{alg}") public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @PathParam("alg") String alg) { @@ -1887,7 +1867,7 @@ public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @ } @POST - @AuthRequired + @AuthRequired @Path("/validateDataFileHashValue/{fileId}") public Response validateDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId) { @@ -1954,7 +1934,7 @@ public Response validateDataFileHashValue(@Context ContainerRequestContext crc, } @POST - @AuthRequired + @AuthRequired @Path("/submitDatasetVersionToArchive/{id}/{version}") public Response submitDatasetVersionToArchive(@Context ContainerRequestContext crc, @PathParam("id") String dsid, @PathParam("version") String versionNumber) { @@ -2027,7 +2007,7 @@ public void run() { * @return */ @POST - @AuthRequired + @AuthRequired @Path("/archiveAllUnarchivedDatasetVersions") public Response archiveAllUnarchivedDatasetVersions(@Context ContainerRequestContext crc, @QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { @@ -2126,7 +2106,7 @@ public Response clearMetricsCacheByName(@PathParam("name") String name) { } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/addRoleAssignmentsToChildren") public Response addRoleAssignementsToChildren(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse owner = dataverseSvc.findByAlias(alias); @@ -2157,90 +2137,90 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(dataverse.getStorageDriverId()); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver + return ok(dataverse.getStorageDriverId()); } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response setStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias, String label) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { - if(store.getKey().equals(label)) { - dataverse.setStorageDriverId(store.getValue()); - return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); - } - } - return error(Response.Status.BAD_REQUEST, - "No Storage Driver found for : " + label); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { + if(store.getKey().equals(label)) { + dataverse.setStorageDriverId(store.getValue()); + return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); + } + } + return error(Response.Status.BAD_REQUEST, + "No Storage Driver found for : " + label); } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response resetStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - dataverse.setStorageDriverId(""); - return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + dataverse.setStorageDriverId(""); + return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/storageDrivers") public Response listStorageDrivers(@Context ContainerRequestContext crc) throws WrappedResponse { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder bld = jsonObjectBuilder(); - DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); - return ok(bld); + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder bld = jsonObjectBuilder(); + DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); + return ok(bld); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2262,7 +2242,7 @@ public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias, @QueryParam("name") String name) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2293,7 +2273,7 @@ public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2313,7 +2293,7 @@ public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @Pat } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/curationLabelSets") public Response listCurationLabelSets(@Context ContainerRequestContext crc) throws WrappedResponse { try { @@ -2423,7 +2403,7 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } @POST - @AuthRequired + @AuthRequired @Consumes("application/json") @Path("/requestSignedUrl") public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject urlInfo) { @@ -2541,4 +2521,160 @@ public Response getFeatureFlag(@PathParam("flag") String flagIn) { } } + @GET + @AuthRequired + @Path("/datafiles/auditFiles") + public Response getAuditFiles(@Context ContainerRequestContext crc, + @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId, + @QueryParam("datasetIdentifierList") String datasetIdentifierList) throws WrappedResponse { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + int datasetsChecked = 0; + long startId = (firstId == null ? 0 : firstId); + long endId = (lastId == null ? Long.MAX_VALUE : lastId); + + List datasetIdentifiers; + if (datasetIdentifierList == null || datasetIdentifierList.isEmpty()) { + datasetIdentifiers = Collections.emptyList(); + } else { + startId = 0; + endId = Long.MAX_VALUE; + datasetIdentifiers = List.of(datasetIdentifierList.split(",")); + } + if (endId < startId) { + return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId"); + } + + NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder(); + JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); + JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); + + if (startId > 0) { + jsonObjectBuilder.add("firstId", startId); + } + if (endId < Long.MAX_VALUE) { + jsonObjectBuilder.add("lastId", endId); + } + + // compile the list of ids to process + List datasetIds; + if (datasetIdentifiers.isEmpty()) { + datasetIds = datasetService.findAllLocalDatasetIds(); + } else { + datasetIds = new ArrayList<>(datasetIdentifiers.size()); + JsonArrayBuilder jab = Json.createArrayBuilder(); + datasetIdentifiers.forEach(id -> { + String dId = id.trim(); + jab.add(dId); + Dataset d = datasetService.findByGlobalId(dId); + if (d != null) { + datasetIds.add(d.getId()); + } else { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetIdentifier",dId); + job.add("reason","Not Found"); + jsonFailuresArrayBuilder.add(job); + } + }); + jsonObjectBuilder.add("datasetIdentifierList", jab); + } + + for (Long datasetId : datasetIds) { + if (datasetId < startId) { + continue; + } else if (datasetId > endId) { + break; + } + Dataset dataset; + try { + dataset = findDatasetOrDie(String.valueOf(datasetId)); + datasetsChecked++; + } catch (WrappedResponse e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + continue; + } + + List missingFiles = new ArrayList<>(); + List missingFileMetadata = new ArrayList<>(); + try { + Predicate filter = s -> true; + StorageIO datasetIO = DataAccess.getStorageIO(dataset); + final List result = datasetIO.cleanUp(filter, true); + // add files that are in dataset files but not in cleanup result or DataFiles with missing FileMetadata + dataset.getFiles().forEach(df -> { + try { + StorageIO datafileIO = df.getStorageIO(); + String storageId = df.getStorageIdentifier(); + FileMetadata fm = df.getFileMetadata(); + if (!datafileIO.exists()) { + missingFiles.add(storageId + "," + (fm != null ? + (fm.getDirectoryLabel() != null || !fm.getDirectoryLabel().isEmpty() ? "directoryLabel,"+fm.getDirectoryLabel()+"," : "") + +"label,"+fm.getLabel() : "type,"+df.getContentType())); + } + if (fm == null) { + missingFileMetadata.add(storageId + ",dataFileId," + df.getId()); + } + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("dataFileId", df.getId()); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + }); + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + + JsonObjectBuilder job = Json.createObjectBuilder(); + if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { + job.add("id", dataset.getId()); + job.add("pid", dataset.getProtocol() + ":" + dataset.getAuthority() + "/" + dataset.getIdentifier()); + job.add("persistentURL", dataset.getPersistentURL()); + if (!missingFileMetadata.isEmpty()) { + JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); + missingFileMetadata.forEach(mm -> { + String[] missingMetadata = mm.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingMetadata[0]) + .add(missingMetadata[1], missingMetadata[2]); + jabMissingFileMetadata.add(jobj); + }); + job.add("missingFileMetadata", jabMissingFileMetadata); + } + if (!missingFiles.isEmpty()) { + JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder(); + missingFiles.forEach(mf -> { + String[] missingFile = mf.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingFile[0]); + for (int i = 2; i < missingFile.length; i+=2) { + jobj.add(missingFile[i-1], missingFile[i]); + } + jabMissingFiles.add(jobj); + }); + job.add("missingFiles", jabMissingFiles); + } + jsonDatasetsArrayBuilder.add(job); + } + } + + jsonObjectBuilder.add("datasetsChecked", datasetsChecked); + jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder); + jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); + + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 01c51dc2b4c..907295ad848 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -126,7 +126,7 @@ public Response getByName(@PathParam("name") String name) { String solrFieldSearchable = dsf.getSolrField().getNameSearchable(); String solrFieldFacetable = dsf.getSolrField().getNameFacetable(); String metadataBlock = dsf.getMetadataBlock().getName(); - String uri=dsf.getUri(); + String uri = dsf.getUri(); boolean hasParent = dsf.isHasParent(); boolean allowsMultiples = dsf.isAllowMultiples(); boolean isRequired = dsf.isRequired(); @@ -243,7 +243,9 @@ public Response loadDatasetFields(File file) { br = new BufferedReader(new FileReader("/" + file)); while ((line = br.readLine()) != null) { lineNumber++; - values = line.split(splitBy); + values = Arrays.stream(line.split(splitBy)) + .map(String::trim) + .toArray(String[]::new); if (values[0].startsWith("#")) { // Header row switch (values[0]) { case "#metadataBlock": @@ -326,7 +328,7 @@ public Response loadDatasetFields(File file) { */ public String getGeneralErrorMessage(HeaderType header, int lineNumber, String message) { List arguments = new ArrayList<>(); - arguments.add(header.name()); + arguments.add(header != null ? header.name() : "unknown"); arguments.add(String.valueOf(lineNumber)); arguments.add(message); return BundleUtil.getStringFromBundle("api.admin.datasetfield.load.GeneralErrorMessage", arguments); @@ -334,9 +336,9 @@ public String getGeneralErrorMessage(HeaderType header, int lineNumber, String m /** * Turn ArrayIndexOutOfBoundsException into an informative error message - * @param lineNumber * @param header - * @param e + * @param lineNumber + * @param wrongIndex * @return */ public String getArrayIndexOutOfBoundMessage(HeaderType header, diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 369a22fe8d7..2ec10816acc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2171,8 +2171,32 @@ public Response getAssignments(@Context ContainerRequestContext crc, @PathParam( @GET @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return getPreviewUrlData(crc, idSupplied); + } + + @POST + @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") + @Path("{id}/privateUrl") + public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @DefaultValue("false") @QueryParam("anonymizedAccess") boolean anonymizedAccess) { + return createPreviewUrl(crc, idSupplied, anonymizedAccess); + } + + @DELETE + @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") + @Path("{id}/privateUrl") + public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return deletePreviewUrl(crc, idSupplied); + } + + @GET + @AuthRequired + @Path("{id}/previewUrl") + public Response getPreviewUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); return (privateUrl != null) ? ok(json(privateUrl)) @@ -2182,8 +2206,8 @@ public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathPar @POST @AuthRequired - @Path("{id}/privateUrl") - public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { + @Path("{id}/previewUrl") + public Response createPreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) { throw new NotAcceptableException("Anonymized Access not enabled"); } @@ -2194,8 +2218,8 @@ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathPara @DELETE @AuthRequired - @Path("{id}/privateUrl") - public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + @Path("{id}/previewUrl") + public Response deletePreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { Dataset dataset = findDatasetOrDie(idSupplied); PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); @@ -2208,6 +2232,7 @@ public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathPara }, getRequestUser(crc)); } + @GET @AuthRequired @Path("{id}/thumbnail/candidates") @@ -2992,6 +3017,26 @@ public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("i } + @GET + @AuthRequired + @Path("{id}/versions/{versionId1}/compare/{versionId2}") + public Response getCompareVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, + @PathParam("versionId1") String versionId1, + @PathParam("versionId2") String versionId2, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + try { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers); + DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers); + if (dsv1.getCreateTime().getTime() > dsv2.getCreateTime().getTime()) { + return error(BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order")); + } + return ok(DatasetVersion.compareVersions(dsv1, dsv2)); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + private static Set getDatasetFilenames(Dataset dataset) { Set files = new HashSet<>(); for (DataFile dataFile: dataset.getFiles()) { @@ -4833,6 +4878,33 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String } return ok(responseJson); } + + @GET + @Path("previewUrlDatasetVersion/{previewUrlToken}") + public Response getPreviewUrlDatasetVersion(@PathParam("previewUrlToken") String previewUrlToken, @QueryParam("returnOwners") boolean returnOwners) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } + boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess(); + String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + if(isAnonymizedAccess && anonymizedFieldTypeNames == null) { + throw new NotAcceptableException("Anonymized Access not enabled"); + } + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken); + if (dsv == null || dsv.getId() == null) { + return notFound("Dataset version not found"); + } + JsonObjectBuilder responseJson; + if (isAnonymizedAccess) { + List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); + responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners); + } else { + responseJson = json(dsv, null, true, returnOwners); + } + return ok(responseJson); + } + @GET @Path("privateUrlDatasetVersion/{privateUrlToken}/citation") @@ -4845,6 +4917,18 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken" return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess())); } + + @GET + @Path("previewUrlDatasetVersion/{previewUrlToken}/citation") + public Response getPreviewUrlDatasetVersionCitation(@PathParam("previewUrlToken") String previewUrlToken) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken); + return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") + : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess())); + } @GET @AuthRequired diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f05bba8830e..f864a5a9d1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -195,7 +195,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List facets = parseFacets(body); AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO)); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true)); return ok(json(dataverse)); } catch (WrappedResponse ww) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index 257519677d3..2439c996816 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -12,12 +12,17 @@ import jakarta.ws.rs.Produces; import org.apache.commons.io.IOUtils; +import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import io.gdcc.spi.export.Exporter; +import io.gdcc.spi.export.ExportException; +import io.gdcc.spi.export.XMLExporter; import jakarta.ejb.EJB; import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; @@ -92,6 +97,32 @@ public Response getZipDownloadLimit() { return ok(zipDownloadLimit); } + @GET + @Path("exportFormats") + public Response getExportFormats() { + JsonObjectBuilder responseModel = Json.createObjectBuilder(); + ExportService instance = ExportService.getInstance(); + for (String[] labels : instance.getExportersLabels()) { + try { + Exporter exporter = instance.getExporter(labels[1]); + JsonObjectBuilder exporterObject = Json.createObjectBuilder().add("displayName", labels[0]) + .add("mediaType", exporter.getMediaType()).add("isHarvestable", exporter.isHarvestable()) + .add("isVisibleInUserInterface", exporter.isAvailableToUsers()); + if (exporter instanceof XMLExporter xmlExporter) { + exporterObject.add("XMLNameSpace", xmlExporter.getXMLNameSpace()) + .add("XMLSchemaLocation", xmlExporter.getXMLSchemaLocation()) + .add("XMLSchemaVersion", xmlExporter.getXMLSchemaVersion()); + } + responseModel.add(labels[1], exporterObject); + } + catch (ExportException ex){ + logger.warning("Failed to get: " + labels[1]); + logger.warning(ex.getLocalizedMessage()); + } + } + return ok(responseModel); + } + private Response getSettingResponseByKey(SettingsServiceBean.Key key) { String setting = settingsService.getValueForKey(key); if (setting != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java index 33a11a2df23..e6519c9ff36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java @@ -181,7 +181,7 @@ public Response delete(@PathParam("id") long doomedId, @QueryParam("unlink") boo try { wasDeleted = savedSearchSvc.delete(doomedId, unlink); } catch (Exception e) { - return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId); + return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId + ". Exception: " + e.getLocalizedMessage()); } if (wasDeleted) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index 6b9fcb38305..ba82f8f758b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -73,6 +73,7 @@ public Response search( @QueryParam("metadata_fields") List metadataFields, @QueryParam("geo_point") String geoPointRequested, @QueryParam("geo_radius") String geoRadiusRequested, + @QueryParam("show_type_counts") boolean showTypeCounts, @Context HttpServletResponse response ) { @@ -175,7 +176,7 @@ public Response search( JsonArrayBuilder itemsArrayBuilder = Json.createArrayBuilder(); List solrSearchResults = solrQueryResponse.getSolrSearchResults(); for (SolrSearchResult solrSearchResult : solrSearchResults) { - itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields, getDatasetFileCount(solrSearchResult))); + itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields)); } JsonObjectBuilder spelling_alternatives = Json.createObjectBuilder(); @@ -210,6 +211,15 @@ public Response search( } value.add("count_in_response", solrSearchResults.size()); + if (showTypeCounts && !solrQueryResponse.getTypeFacetCategories().isEmpty()) { + JsonObjectBuilder objectTypeCounts = Json.createObjectBuilder(); + for (FacetCategory facetCategory : solrQueryResponse.getTypeFacetCategories()) { + for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { + objectTypeCounts.add(facetLabel.getName(), facetLabel.getCount()); + } + } + value.add("total_count_per_object_type", objectTypeCounts); + } /** * @todo Returning the fq might be useful as a troubleshooting aid * but we don't want to expose the raw dataverse database ids in @@ -229,15 +239,6 @@ public Response search( } } - private Long getDatasetFileCount(SolrSearchResult solrSearchResult) { - DvObject dvObject = solrSearchResult.getEntity(); - if (dvObject.isInstanceofDataset()) { - DatasetVersion datasetVersion = ((Dataset) dvObject).getVersionFromId(solrSearchResult.getDatasetVersionId()); - return datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion); - } - return null; - } - private User getUser(ContainerRequestContext crc) throws WrappedResponse { User userToExecuteSearchAs = GuestUser.get(); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index ecf7839e616..166465115c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -8,29 +8,33 @@ import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetUserTracesCommand; -import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand; +import edu.harvard.iq.dataverse.engine.command.impl.*; +import edu.harvard.iq.dataverse.settings.FeatureFlags; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; +import static edu.harvard.iq.dataverse.api.auth.AuthUtil.extractBearerTokenFromHeaderParam; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; +import java.text.MessageFormat; import java.util.Arrays; import java.util.List; +import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; + +import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.ejb.Stateless; import jakarta.json.JsonArray; +import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; +import jakarta.json.stream.JsonParsingException; import jakarta.ws.rs.*; import jakarta.ws.rs.container.ContainerRequestContext; -import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Request; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.Variant; +import jakarta.ws.rs.core.*; /** * @@ -266,4 +270,24 @@ public Response getTracesElement(@Context ContainerRequestContext crc, @Context } } + @POST + @Path("register") + public Response registerOIDCUser(String body) { + if (!FeatureFlags.API_BEARER_AUTH.enabled()) { + return error(Response.Status.INTERNAL_SERVER_ERROR, BundleUtil.getStringFromBundle("users.api.errors.bearerAuthFeatureFlagDisabled")); + } + Optional bearerToken = extractBearerTokenFromHeaderParam(httpRequest.getHeader(HttpHeaders.AUTHORIZATION)); + if (bearerToken.isEmpty()) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("users.api.errors.bearerTokenRequired")); + } + try { + JsonObject userJson = JsonUtil.getJsonObject(body); + execCommand(new RegisterOIDCUserCommand(createDataverseRequest(GuestUser.get()), bearerToken.get(), jsonParser().parseUserDTO(userJson))); + } catch (JsonParseException | JsonParsingException e) { + return error(Response.Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("users.api.errors.jsonParseToUserDTO"), e.getMessage())); + } catch (WrappedResponse e) { + return e.getResponse(); + } + return ok(BundleUtil.getStringFromBundle("users.api.userRegistered")); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java index 0dd8a28baca..fbb0b484b58 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java @@ -9,6 +9,7 @@ import jakarta.inject.Inject; import jakarta.ws.rs.container.ContainerRequestContext; + import java.util.logging.Logger; /** @@ -49,7 +50,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext) authUser = userSvc.updateLastApiUseTime(authUser); return authUser; } - throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY); + throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY); } private String getRequestApiKey(ContainerRequestContext containerRequestContext) { @@ -59,7 +60,7 @@ private String getRequestApiKey(ContainerRequestContext containerRequestContext) return headerParamApiKey != null ? headerParamApiKey : queryParamApiKey; } - private void checkAnonymizedAccessToRequestPath(String requestPath, PrivateUrlUser privateUrlUser) throws WrappedAuthErrorResponse { + private void checkAnonymizedAccessToRequestPath(String requestPath, PrivateUrlUser privateUrlUser) throws WrappedUnauthorizedAuthErrorResponse { if (!privateUrlUser.hasAnonymizedAccess()) { return; } @@ -67,7 +68,7 @@ private void checkAnonymizedAccessToRequestPath(String requestPath, PrivateUrlUs // to download the file or image thumbs if (!(requestPath.startsWith(ACCESS_DATAFILE_PATH_PREFIX) && !requestPath.substring(ACCESS_DATAFILE_PATH_PREFIX.length()).contains("/"))) { logger.info("Anonymized access request for " + requestPath); - throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY); + throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY); } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthUtil.java new file mode 100644 index 00000000000..36cd7c7f1df --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthUtil.java @@ -0,0 +1,24 @@ +package edu.harvard.iq.dataverse.api.auth; + +import java.util.Optional; + +public class AuthUtil { + + private static final String BEARER_AUTH_SCHEME = "Bearer"; + + /** + * Extracts the Bearer token from the provided HTTP Authorization header value. + *

+ * Validates that the header value starts with the "Bearer" scheme as defined in RFC 6750. + * If the header is null, empty, or does not start with "Bearer ", an empty {@link Optional} is returned. + * + * @param headerParamBearerToken the raw HTTP Authorization header value containing the Bearer token + * @return An {@link Optional} containing the raw Bearer token if present and valid; otherwise, an empty {@link Optional} + */ + public static Optional extractBearerTokenFromHeaderParam(String headerParamBearerToken) { + if (headerParamBearerToken != null && headerParamBearerToken.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) { + return Optional.of(headerParamBearerToken); + } + return Optional.empty(); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index 31f524af3f0..3ee9bb909f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -1,124 +1,65 @@ package edu.harvard.iq.dataverse.api.auth; -import com.nimbusds.oauth2.sdk.ParseException; -import com.nimbusds.oauth2.sdk.token.BearerAccessToken; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; -import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; +import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.FeatureFlags; +import edu.harvard.iq.dataverse.util.BundleUtil; import jakarta.inject.Inject; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.HttpHeaders; -import java.io.IOException; -import java.util.List; + import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; -import java.util.stream.Collectors; + +import static edu.harvard.iq.dataverse.api.auth.AuthUtil.extractBearerTokenFromHeaderParam; public class BearerTokenAuthMechanism implements AuthMechanism { - private static final String BEARER_AUTH_SCHEME = "Bearer"; private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName()); - - public static final String UNAUTHORIZED_BEARER_TOKEN = "Unauthorized bearer token"; - public static final String INVALID_BEARER_TOKEN = "Could not parse bearer token"; - public static final String BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED = "Bearer token detected, no OIDC provider configured"; @Inject protected AuthenticationServiceBean authSvc; @Inject protected UserServiceBean userSvc; - + @Override public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse { - if (FeatureFlags.API_BEARER_AUTH.enabled()) { - Optional bearerToken = getRequestApiKey(containerRequestContext); - // No Bearer Token present, hence no user can be authenticated - if (bearerToken.isEmpty()) { - return null; - } - - // Validate and verify provided Bearer Token, and retrieve UserRecordIdentifier - // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token. Tokens in the cache should be removed after some (configurable) time. - UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIdentifier(bearerToken.get()); + if (!FeatureFlags.API_BEARER_AUTH.enabled()) { + return null; + } - // retrieve Authenticated User from AuthService - AuthenticatedUser authUser = authSvc.lookupUser(userInfo); - if (authUser != null) { - // track the API usage - authUser = userSvc.updateLastApiUseTime(authUser); - return authUser; - } else { - // a valid Token was presented, but we have no associated user account. - logger.log(Level.WARNING, "Bearer token detected, OIDC provider {0} validated Token but no linked UserAccount", userInfo.getUserRepoId()); - // TODO: Instead of returning null, we should throw a meaningful error to the client. - // Probably this will be a wrapped auth error response with an error code and a string describing the problem. - return null; - } + Optional bearerToken = getRequestBearerToken(containerRequestContext); + if (bearerToken.isEmpty()) { + return null; } - return null; - } - /** - * Verifies the given Bearer token and obtain information about the corresponding user within respective AuthProvider. - * - * @param token The string containing the encoded JWT - * @return - */ - private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String token) throws WrappedAuthErrorResponse { + AuthenticatedUser authUser; try { - BearerAccessToken accessToken = BearerAccessToken.parse(token); - // Get list of all authentication providers using Open ID Connect - // @TASK: Limited to OIDCAuthProviders, could be widened to OAuth2Providers. - List providers = authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class).stream() - .map(providerId -> (OIDCAuthProvider) authSvc.getAuthenticationProvider(providerId)) - .collect(Collectors.toUnmodifiableList()); - // If not OIDC Provider are configured we cannot validate a Token - if(providers.isEmpty()){ - logger.log(Level.WARNING, "Bearer token detected, no OIDC provider configured"); - throw new WrappedAuthErrorResponse(BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED); - } + authUser = authSvc.lookupUserByOIDCBearerToken(bearerToken.get()); + } catch (AuthorizationException e) { + logger.log(Level.WARNING, "Authorization failed: {0}", e.getMessage()); + throw new WrappedUnauthorizedAuthErrorResponse(e.getMessage()); + } - // Iterate over all OIDC providers if multiple. Sadly needed as do not know which provided the Token. - for (OIDCAuthProvider provider : providers) { - try { - // The OIDCAuthProvider need to verify a Bearer Token and equip the client means to identify the corresponding AuthenticatedUser. - Optional userInfo = provider.getUserIdentifier(accessToken); - if(userInfo.isPresent()) { - logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided identifier", provider.getId()); - return userInfo.get(); - } - } catch (IOException e) { - // TODO: Just logging this is not sufficient - if there is an IO error with the one provider - // which would have validated successfully, this is not the users fault. We need to - // take note and refer to that later when occurred. - logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); - } - } - } catch (ParseException e) { - logger.log(Level.FINE, "Bearer token detected, unable to parse bearer token (invalid Token)", e); - throw new WrappedAuthErrorResponse(INVALID_BEARER_TOKEN); + if (authUser == null) { + logger.log(Level.WARNING, "Bearer token detected, OIDC provider validated the token but no linked UserAccount"); + throw new WrappedForbiddenAuthErrorResponse(BundleUtil.getStringFromBundle("bearerTokenAuthMechanism.errors.tokenValidatedButNoRegisteredUser")); } - // No UserInfo returned means we have an invalid access token. - logger.log(Level.FINE, "Bearer token detected, yet no configured OIDC provider validated it."); - throw new WrappedAuthErrorResponse(UNAUTHORIZED_BEARER_TOKEN); + return userSvc.updateLastApiUseTime(authUser); } /** * Retrieve the raw, encoded token value from the Authorization Bearer HTTP header as defined in RFC 6750 + * * @return An {@link Optional} either empty if not present or the raw token from the header */ - private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) { - String headerParamApiKey = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION); - if (headerParamApiKey != null && headerParamApiKey.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) { - return Optional.of(headerParamApiKey); - } else { - return Optional.empty(); - } + public static Optional getRequestBearerToken(ContainerRequestContext containerRequestContext) { + String headerParamBearerToken = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION); + return extractBearerTokenFromHeaderParam(headerParamBearerToken); } -} \ No newline at end of file +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java index 801e2752b9e..e5be5144897 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java @@ -5,6 +5,7 @@ import jakarta.inject.Inject; import jakarta.ws.rs.container.ContainerRequestContext; + import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -19,9 +20,9 @@ public class CompoundAuthMechanism implements AuthMechanism { private final List authMechanisms = new ArrayList<>(); @Inject - public CompoundAuthMechanism(ApiKeyAuthMechanism apiKeyAuthMechanism, WorkflowKeyAuthMechanism workflowKeyAuthMechanism, SignedUrlAuthMechanism signedUrlAuthMechanism, SessionCookieAuthMechanism sessionCookieAuthMechanism, BearerTokenAuthMechanism bearerTokenAuthMechanism) { + public CompoundAuthMechanism(ApiKeyAuthMechanism apiKeyAuthMechanism, WorkflowKeyAuthMechanism workflowKeyAuthMechanism, SignedUrlAuthMechanism signedUrlAuthMechanism, BearerTokenAuthMechanism bearerTokenAuthMechanism, SessionCookieAuthMechanism sessionCookieAuthMechanism) { // Auth mechanisms should be ordered by priority here - add(apiKeyAuthMechanism, workflowKeyAuthMechanism, signedUrlAuthMechanism, sessionCookieAuthMechanism,bearerTokenAuthMechanism); + add(apiKeyAuthMechanism, workflowKeyAuthMechanism, signedUrlAuthMechanism, bearerTokenAuthMechanism, sessionCookieAuthMechanism); } public CompoundAuthMechanism(AuthMechanism... authMechanisms) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java index 258661f6495..30e8a3b9ca4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java @@ -43,7 +43,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext) if (user != null) { return user; } - throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_SIGNED_URL); + throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_SIGNED_URL); } private String getSignedUrlRequestParameter(ContainerRequestContext containerRequestContext) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java index bbd67713e85..df54b69af96 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java @@ -30,7 +30,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext) if (authUser != null) { return authUser; } - throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY); + throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY); } private String getRequestWorkflowKey(ContainerRequestContext containerRequestContext) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java index 40431557261..da92d882197 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java @@ -6,18 +6,24 @@ import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; -public class WrappedAuthErrorResponse extends Exception { +public abstract class WrappedAuthErrorResponse extends Exception { private final String message; private final Response response; - public WrappedAuthErrorResponse(String message) { + public WrappedAuthErrorResponse(Response.Status status, String message) { this.message = message; - this.response = Response.status(Response.Status.UNAUTHORIZED) + this.response = createErrorResponse(status, message); + } + + protected Response createErrorResponse(Response.Status status, String message) { + return Response.status(status) .entity(NullSafeJsonBuilder.jsonObjectBuilder() .add("status", ApiConstants.STATUS_ERROR) .add("message", message).build() - ).type(MediaType.APPLICATION_JSON_TYPE).build(); + ) + .type(MediaType.APPLICATION_JSON_TYPE) + .build(); } public String getMessage() { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedForbiddenAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedForbiddenAuthErrorResponse.java new file mode 100644 index 00000000000..082ed3ca8d8 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedForbiddenAuthErrorResponse.java @@ -0,0 +1,10 @@ +package edu.harvard.iq.dataverse.api.auth; + +import jakarta.ws.rs.core.Response; + +public class WrappedForbiddenAuthErrorResponse extends WrappedAuthErrorResponse { + + public WrappedForbiddenAuthErrorResponse(String message) { + super(Response.Status.FORBIDDEN, message); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedUnauthorizedAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedUnauthorizedAuthErrorResponse.java new file mode 100644 index 00000000000..1d2eb8f8bd8 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedUnauthorizedAuthErrorResponse.java @@ -0,0 +1,10 @@ +package edu.harvard.iq.dataverse.api.auth; + +import jakarta.ws.rs.core.Response; + +public class WrappedUnauthorizedAuthErrorResponse extends WrappedAuthErrorResponse { + + public WrappedUnauthorizedAuthErrorResponse(String message) { + super(Response.Status.UNAUTHORIZED, message); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/UserDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/UserDTO.java new file mode 100644 index 00000000000..df1920c4d25 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/UserDTO.java @@ -0,0 +1,67 @@ +package edu.harvard.iq.dataverse.api.dto; + +public class UserDTO { + private String username; + private String firstName; + private String lastName; + private String emailAddress; + private String affiliation; + private String position; + private boolean termsAccepted; + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getFirstName() { + return firstName; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public String getLastName() { + return lastName; + } + + public void setLastName(String lastName) { + this.lastName = lastName; + } + + public String getEmailAddress() { + return emailAddress; + } + + public void setEmailAddress(String emailAddress) { + this.emailAddress = emailAddress; + } + + public String getAffiliation() { + return affiliation; + } + + public void setAffiliation(String affiliation) { + this.affiliation = affiliation; + } + + public String getPosition() { + return position; + } + + public void setPosition(String position) { + this.position = position; + } + + public boolean isTermsAccepted() { + return termsAccepted; + } + + public void setTermsAccepted(boolean termsAccepted) { + this.termsAccepted = termsAccepted; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 35d35316f73..31941d3c8c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -5,13 +5,21 @@ import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; -import edu.harvard.iq.dataverse.api.dto.*; +import edu.harvard.iq.dataverse.api.dto.LicenseDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; +import edu.harvard.iq.dataverse.api.dto.DatasetDTO; +import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; +import edu.harvard.iq.dataverse.api.dto.FileMetadataDTO; +import edu.harvard.iq.dataverse.api.dto.DataFileDTO; +import edu.harvard.iq.dataverse.api.dto.DataTableDTO; + import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType; import static edu.harvard.iq.dataverse.export.ddi.DdiExportUtil.NOTE_TYPE_CONTENTTYPE; import static edu.harvard.iq.dataverse.export.ddi.DdiExportUtil.NOTE_TYPE_TERMS_OF_ACCESS; +import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.util.StringUtil; import java.io.File; import java.io.FileInputStream; @@ -32,6 +40,9 @@ import org.apache.commons.lang3.StringUtils; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + /** * * @author ellenk @@ -103,6 +114,8 @@ public class ImportDDIServiceBean { @EJB DatasetFieldServiceBean datasetFieldService; @EJB ImportGenericServiceBean importGenericService; + + @EJB LicenseServiceBean licenseService; // TODO: stop passing the xml source as a string; (it could be huge!) -- L.A. 4.5 @@ -1180,7 +1193,24 @@ private void processDataAccs(XMLStreamReader xmlr, DatasetVersionDTO dvDTO) thro String noteType = xmlr.getAttributeValue(null, "type"); if (NOTE_TYPE_TERMS_OF_USE.equalsIgnoreCase(noteType) ) { if ( LEVEL_DV.equalsIgnoreCase(xmlr.getAttributeValue(null, "level"))) { - dvDTO.setTermsOfUse(parseText(xmlr, "notes")); + String termsOfUseStr = parseText(xmlr, "notes").trim(); + Pattern pattern = Pattern.compile("(.*)", Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(termsOfUseStr); + boolean matchFound = matcher.find(); + if (matchFound) { + String uri = matcher.group(1); + String license = matcher.group(2); + License lic = licenseService.getByNameOrUri(license); + if (lic != null) { + LicenseDTO licenseDTO = new LicenseDTO(); + licenseDTO.setName(license); + licenseDTO.setUri(uri); + dvDTO.setLicense(licenseDTO); + } + + } else { + dvDTO.setTermsOfUse(termsOfUseStr); + } } } else if (NOTE_TYPE_TERMS_OF_ACCESS.equalsIgnoreCase(noteType) ) { if (LEVEL_DV.equalsIgnoreCase(xmlr.getAttributeValue(null, "level"))) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index 0f7587671e7..aa5b25e3967 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -156,10 +156,10 @@ public DatasetDTO processXML( XMLStreamReader xmlr, ForeignMetadataFormatMapping // Note that arbitrary formatting tags are supported for the outer xml // wrapper. -- L.A. 4.5 public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException { - return processOAIDCxml(DcXmlToParse, null); + return processOAIDCxml(DcXmlToParse, null, false); } - public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) throws XMLStreamException { + public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier, boolean preferSuppliedIdentifier) throws XMLStreamException { // look up DC metadata mapping: ForeignMetadataFormatMapping dublinCoreMapping = findFormatMappingByName(DCTERMS); @@ -208,7 +208,7 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) thr // can parse and recognize as the global id for the imported dataset // (note that this is the default behavior during harvesting), // so we need to reaassign it accordingly: - String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion()); + String identifier = selectIdentifier(datasetDTO.getDatasetVersion(), oaiIdentifier, preferSuppliedIdentifier); logger.fine("Imported identifier: " + identifier); globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); @@ -228,8 +228,17 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) thr private void processXMLElement(XMLStreamReader xmlr, String currentPath, String openingTag, ForeignMetadataFormatMapping foreignFormatMapping, DatasetDTO datasetDTO) throws XMLStreamException { logger.fine("entering processXMLElement; ("+currentPath+")"); - - for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { + + while (xmlr.hasNext()) { + + int event; + try { + event = xmlr.next(); + } catch (XMLStreamException ex) { + logger.warning("Error occurred in the XML parsing : " + ex.getMessage()); + continue; // Skip Undeclared namespace prefix and Unexpected close tag related to com.ctc.wstx.exc.WstxParsingException + } + if (event == XMLStreamConstants.START_ELEMENT) { String currentElement = xmlr.getLocalName(); @@ -358,8 +367,20 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St return value; } - private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { + public String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier) { + return selectIdentifier(datasetVersionDTO, suppliedIdentifier, false); + } + + private String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier, boolean preferSuppliedIdentifier) { List otherIds = new ArrayList<>(); + + if (suppliedIdentifier != null && preferSuppliedIdentifier) { + // This supplied identifier (in practice, his is likely the OAI-PMH + // identifier from the

section) will be our first + // choice candidate for the pid of the imported dataset: + otherIds.add(suppliedIdentifier); + } + for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); @@ -377,6 +398,16 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { } } } + + if (suppliedIdentifier != null && !preferSuppliedIdentifier) { + // Unless specifically instructed to prefer this extra identifier + // (in practice, this is likely the OAI-PMH identifier from the + //
section), we will try to use it as the *last* + // possible candidate for the pid, so, adding it to the end of the + // list: + otherIds.add(suppliedIdentifier); + } + if (!otherIds.isEmpty()) { // We prefer doi or hdl identifiers like "doi:10.7910/DVN/1HE30F" for (String otherId : otherIds) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index d0a0629e1ae..7dc2aed799e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -250,11 +250,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) { logger.fine("importing DC "+metadataFile.getAbsolutePath()); try { - String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); - String suggestedIdentifier = harvestingClient.isUseOaiIdentifiersAsPids() - ? harvestIdentifier - : null; - dsDTO = importGenericService.processOAIDCxml(xmlToParse, suggestedIdentifier); + String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); + dsDTO = importGenericService.processOAIDCxml(xmlToParse, harvestIdentifier, harvestingClient.isUseOaiIdentifiersAsPids()); } catch (IOException | XMLStreamException e) { throw new ImportException("Failed to process Dublin Core XML record: "+ e.getClass() + " (" + e.getMessage() + ")"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 4a8fb123fd4..032c1dd5164 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -1,11 +1,18 @@ package edu.harvard.iq.dataverse.authorization; +import com.nimbusds.oauth2.sdk.ParseException; +import com.nimbusds.oauth2.sdk.token.BearerAccessToken; +import com.nimbusds.openid.connect.sdk.claims.UserInfo; import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import edu.harvard.iq.dataverse.DvObjectServiceBean; import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.UserServiceBean; +import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; @@ -34,21 +41,14 @@ import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; import edu.harvard.iq.dataverse.workflow.PendingWorkflowInvocation; import edu.harvard.iq.dataverse.workflows.WorkflowComment; + +import java.io.IOException; import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collection; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; +import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; -import jakarta.annotation.PostConstruct; + import jakarta.ejb.EJB; import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; @@ -126,9 +126,8 @@ public class AuthenticationServiceBean { PrivateUrlServiceBean privateUrlService; @PersistenceContext(unitName = "VDCNet-ejbPU") - private EntityManager em; - - + EntityManager em; + public AbstractOAuth2AuthenticationProvider getOAuth2Provider( String id ) { return authProvidersRegistrationService.getOAuth2AuthProvidersMap().get(id); } @@ -978,4 +977,70 @@ public ApiToken getValidApiTokenForUser(User user) { } return apiToken; } + + /** + * Looks up an authenticated user based on the provided OIDC bearer token. + * + * @param bearerToken The OIDC bearer token. + * @return An instance of {@link AuthenticatedUser} representing the authenticated user. + * @throws AuthorizationException If the token is invalid or no OIDC provider is configured. + */ + public AuthenticatedUser lookupUserByOIDCBearerToken(String bearerToken) throws AuthorizationException { + // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token. + // Tokens in the cache should be removed after some (configurable) time. + OAuth2UserRecord oAuth2UserRecord = verifyOIDCBearerTokenAndGetOAuth2UserRecord(bearerToken); + return lookupUser(oAuth2UserRecord.getUserRecordIdentifier()); + } + + /** + * Verifies the given OIDC bearer token and retrieves the corresponding OAuth2UserRecord. + * + * @param bearerToken The OIDC bearer token. + * @return An {@link OAuth2UserRecord} containing the user's info. + * @throws AuthorizationException If the token is invalid or if no OIDC providers are available. + */ + public OAuth2UserRecord verifyOIDCBearerTokenAndGetOAuth2UserRecord(String bearerToken) throws AuthorizationException { + try { + BearerAccessToken accessToken = BearerAccessToken.parse(bearerToken); + List providers = getAvailableOidcProviders(); + + // Ensure at least one OIDC provider is configured to validate the token. + if (providers.isEmpty()) { + logger.log(Level.WARNING, "Bearer token detected, no OIDC provider configured"); + throw new AuthorizationException(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.bearerTokenDetectedNoOIDCProviderConfigured")); + } + + // Attempt to validate the token with each configured OIDC provider. + for (OIDCAuthProvider provider : providers) { + try { + // Retrieve OAuth2UserRecord if UserInfo is present + Optional userInfo = provider.getUserInfo(accessToken); + if (userInfo.isPresent()) { + logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided user info", provider.getId()); + return provider.getUserRecord(userInfo.get()); + } + } catch (IOException | OAuth2Exception e) { + logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); + } + } + } catch (ParseException e) { + logger.log(Level.FINE, "Bearer token detected, unable to parse bearer token (invalid Token)", e); + throw new AuthorizationException(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.invalidBearerToken")); + } + + // If no provider validated the token, throw an authorization exception. + logger.log(Level.FINE, "Bearer token detected, yet no configured OIDC provider validated it."); + throw new AuthorizationException(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.unauthorizedBearerToken")); + } + + /** + * Retrieves a list of configured OIDC authentication providers. + * + * @return A list of available OIDCAuthProviders. + */ + private List getAvailableOidcProviders() { + return getAuthenticationProviderIdsOfType(OIDCAuthProvider.class).stream() + .map(providerId -> (OIDCAuthProvider) getAuthenticationProvider(providerId)) + .toList(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 5eb2b391eb7..f396ebf6487 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -242,7 +242,7 @@ public OAuth2UserRecord getUserRecord(String code, String state, String redirect * @param userInfo * @return the usable user record for processing ing {@link edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2LoginBackingBean} */ - OAuth2UserRecord getUserRecord(UserInfo userInfo) { + public OAuth2UserRecord getUserRecord(UserInfo userInfo) { return new OAuth2UserRecord( this.getId(), userInfo.getSubject().getValue(), @@ -291,7 +291,7 @@ Optional getAccessToken(AuthorizationGrant grant) throws IOEx * Retrieve User Info from provider. Encapsulate for testing. * @param accessToken The access token to enable reading data from userinfo endpoint */ - Optional getUserInfo(BearerAccessToken accessToken) throws IOException, OAuth2Exception { + public Optional getUserInfo(BearerAccessToken accessToken) throws IOException, OAuth2Exception { // Retrieve data HTTPResponse response = new UserInfoRequest(this.idpMetadata.getUserInfoEndpointURI(), accessToken) .toHTTPRequest() @@ -316,44 +316,4 @@ Optional getUserInfo(BearerAccessToken accessToken) throws IOException throw new OAuth2Exception(-1, ex.getMessage(), BundleUtil.getStringFromBundle("auth.providers.exception.userinfo", Arrays.asList(this.getTitle()))); } } - - /** - * Trades an access token for an {@link UserRecordIdentifier} (if valid). - * - * @apiNote The resulting {@link UserRecordIdentifier} may be used with - * {@link edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean#lookupUser(UserRecordIdentifier)} - * to look up an {@link edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser} from the database. - * @see edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism - * - * @param accessToken The token to use when requesting user information from the provider - * @return Returns an {@link UserRecordIdentifier} for a valid access token or an empty {@link Optional}. - * @throws IOException In case communication with the endpoint fails to succeed for an I/O reason - */ - public Optional getUserIdentifier(BearerAccessToken accessToken) throws IOException { - OAuth2UserRecord userRecord; - try { - // Try to retrieve with given token (throws if invalid token) - Optional userInfo = getUserInfo(accessToken); - - if (userInfo.isPresent()) { - // Take this detour to avoid code duplication and potentially hard to track conversion errors. - userRecord = getUserRecord(userInfo.get()); - } else { - // This should not happen - an error at the provider side will lead to an exception. - logger.log(Level.WARNING, - "User info retrieval from {0} returned empty optional but expected exception for token {1}.", - List.of(getId(), accessToken).toArray() - ); - return Optional.empty(); - } - } catch (OAuth2Exception e) { - logger.log(Level.FINE, - "Could not retrieve user info with token {0} at provider {1}: {2}", - List.of(accessToken, getId(), e.getMessage()).toArray()); - logger.log(Level.FINER, "Retrieval failed, details as follows: ", e); - return Optional.empty(); - } - - return Optional.of(userRecord.getUserRecordIdentifier()); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 3bf2107e52b..d0da66c38e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -215,7 +215,7 @@ public long retrieveSizeFromMedia() { JsonArray dataArray = responseJson.getJsonArray("DATA"); if (dataArray != null && dataArray.size() != 0) { //File found - return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getJsonNumber("size").longValueExact(); + return (long) dataArray.getJsonObject(0).getJsonNumber("size").longValueExact(); } } else { logger.warning("Response from " + get.getURI().toString() + " was " diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index d2fdec7b323..5b9e496281f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -753,6 +753,12 @@ public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { @Override public boolean exists() { + try { + key = getMainFileKey(); + } catch (IOException e) { + logger.warning("Caught an IOException in S3AccessIO.exists(): " + e.getMessage()); + return false; + } String destinationKey = null; if (dvObject instanceof DataFile) { destinationKey = key; diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index a470f08f736..6b98848021c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -136,8 +136,6 @@ public class AddReplaceFileHelper{ private String newFileName; // step 30 private String newFileContentType; // step 30 private String newStorageIdentifier; // step 30 - private String newCheckSum; // step 30 - private ChecksumType newCheckSumType; //step 30 // -- Optional private DataFile fileToReplace; // step 25 @@ -146,6 +144,7 @@ public class AddReplaceFileHelper{ private DatasetVersion clone; List initialFileList; List finalFileList; + private boolean trustSuppliedFileSizes; // ----------------------------------- // Ingested files @@ -610,15 +609,9 @@ private boolean runAddReplacePhase1(Dataset owner, return false; } - if(optionalFileParams != null) { - if(optionalFileParams.hasCheckSum()) { - newCheckSum = optionalFileParams.getCheckSum(); - newCheckSumType = optionalFileParams.getCheckSumType(); - } - } msgt("step_030_createNewFilesViaIngest"); - if (!this.step_030_createNewFilesViaIngest()){ + if (!this.step_030_createNewFilesViaIngest(optionalFileParams)){ return false; } @@ -1191,7 +1184,7 @@ private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile } - private boolean step_030_createNewFilesViaIngest(){ + private boolean step_030_createNewFilesViaIngest(OptionalFileParams optionalFileParams){ if (this.hasError()){ return false; @@ -1203,21 +1196,28 @@ private boolean step_030_createNewFilesViaIngest(){ //Don't repeatedly update the clone (losing changes) in multifile case clone = workingVersion.cloneDatasetVersion(); } + + Long suppliedFileSize = null; + String newCheckSum = null; + ChecksumType newCheckSumType = null; + + + if (optionalFileParams != null) { + if (optionalFileParams.hasCheckSum()) { + newCheckSum = optionalFileParams.getCheckSum(); + newCheckSumType = optionalFileParams.getCheckSumType(); + } + if (trustSuppliedFileSizes && optionalFileParams.hasFileSize()) { + suppliedFileSize = optionalFileParams.getFileSize(); + } + } + try { - /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, - this.newFileInputStream, - this.newFileName, - this.newFileContentType, - this.newStorageIdentifier, - this.newCheckSum, - this.newCheckSumType, - this.systemConfig);*/ - UploadSessionQuotaLimit quota = null; if (systemConfig.isStorageQuotasEnforced()) { quota = fileService.getUploadSessionQuotaLimit(dataset); } - Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType); + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, suppliedFileSize); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); initialFileList = createDataFilesResult.getDataFiles(); @@ -2033,9 +2033,15 @@ public void setDuplicateFileWarning(String duplicateFileWarning) { * @param jsonData - an array of jsonData entries (one per file) using the single add file jsonData format * @param dataset * @param authUser + * @param trustSuppliedSizes - whether to accept the fileSize values passed + * in jsonData (we don't want to trust the users of the S3 direct + * upload API with that information - we will verify the status of + * the files in the S3 bucket and confirm the sizes in the process. + * we do want GlobusService to be able to pass the file sizes, since + * they are obtained and verified via a Globus API lookup). * @return */ - public Response addFiles(String jsonData, Dataset dataset, User authUser) { + public Response addFiles(String jsonData, Dataset dataset, User authUser, boolean trustSuppliedFileSizes) { msgt("(addFilesToDataset) jsonData: " + jsonData.toString()); JsonArrayBuilder jarr = Json.createArrayBuilder(); @@ -2044,6 +2050,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { int totalNumberofFiles = 0; int successNumberofFiles = 0; + this.trustSuppliedFileSizes = trustSuppliedFileSizes; // ----------------------------------------------------------- // Read jsonData and Parse files information from jsondata : // ----------------------------------------------------------- @@ -2176,6 +2183,10 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { .add("data", Json.createObjectBuilder().add("Files", jarr).add("Result", result)).build() ).build(); } + public Response addFiles(String jsonData, Dataset dataset, User authUser) { + return addFiles(jsonData, dataset, authUser, false); + } + /** * Replace multiple files with prepositioned replacements as listed in the * jsonData. Works with direct upload, Globus, and other out-of-band methods. diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 959dbc4e262..54844160163 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -39,6 +39,12 @@ * - Provenance related information * * @author rmp553 + * @todo (?) We may want to consider renaming this class to DataFileParams or + * DataFileInfo... it was originally created to encode some bits of info - + * the file "tags" specifically, that didn't fit in elsewhere in the normal + * workflow; but it's been expanded to cover pretty much everything else associated + * with DataFiles and it's not really "optional" anymore when, for example, used + * in the direct upload workflow. (?) */ public class OptionalFileParams { @@ -76,6 +82,8 @@ public class OptionalFileParams { public static final String MIME_TYPE_ATTR_NAME = "mimeType"; private String checkSumValue; private ChecksumType checkSumType; + public static final String FILE_SIZE_ATTR_NAME = "fileSize"; + private Long fileSize; public static final String LEGACY_CHECKSUM_ATTR_NAME = "md5Hash"; public static final String CHECKSUM_OBJECT_NAME = "checksum"; public static final String CHECKSUM_OBJECT_TYPE = "@type"; @@ -268,6 +276,18 @@ public String getCheckSum() { public ChecksumType getCheckSumType() { return checkSumType; } + + public boolean hasFileSize() { + return fileSize != null; + } + + public Long getFileSize() { + return fileSize; + } + + public void setFileSize(long fileSize) { + this.fileSize = fileSize; + } /** * Set tags @@ -416,7 +436,13 @@ else if ((jsonObj.has(CHECKSUM_OBJECT_NAME)) && (!jsonObj.get(CHECKSUM_OBJECT_NA this.checkSumType = ChecksumType.fromString(((JsonObject) jsonObj.get(CHECKSUM_OBJECT_NAME)).get(CHECKSUM_OBJECT_TYPE).getAsString()); } - + // ------------------------------- + // get file size as a Long, if supplied + // ------------------------------- + if ((jsonObj.has(FILE_SIZE_ATTR_NAME)) && (!jsonObj.get(FILE_SIZE_ATTR_NAME).isJsonNull())){ + + this.fileSize = jsonObj.get(FILE_SIZE_ATTR_NAME).getAsLong(); + } // ------------------------------- // get tags // ------------------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidFieldsCommandException.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidFieldsCommandException.java new file mode 100644 index 00000000000..9bd1869f8a9 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidFieldsCommandException.java @@ -0,0 +1,42 @@ +package edu.harvard.iq.dataverse.engine.command.exception; + +import edu.harvard.iq.dataverse.engine.command.Command; +import java.util.Map; + +public class InvalidFieldsCommandException extends CommandException { + + private final Map fieldErrors; + + /** + * Constructs a new InvalidFieldsCommandException with the specified detail message, + * command, and a map of field errors. + * + * @param message The detail message. + * @param aCommand The command where the exception was encountered. + * @param fieldErrors A map containing the fields as keys and the reasons for their errors as values. + */ + public InvalidFieldsCommandException(String message, Command aCommand, Map fieldErrors) { + super(message, aCommand); + this.fieldErrors = fieldErrors; + } + + /** + * Gets the map of fields and their corresponding error messages. + * + * @return The map of field errors. + */ + public Map getFieldErrors() { + return fieldErrors; + } + + /** + * Returns a string representation of this exception, including the + * message and details of the invalid fields and their errors. + * + * @return A string representation of this exception. + */ + @Override + public String toString() { + return super.toString() + ", fieldErrors=" + fieldErrors; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java index a7881fc7b6e..2ca63c9c4aa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.Command; + import java.util.Set; /** @@ -12,22 +13,31 @@ * @author michael */ public class PermissionException extends CommandException { - - private final Set required; - private final DvObject dvObject; - - public PermissionException(String message, Command failedCommand, Set required, DvObject aDvObject ) { - super(message, failedCommand); - this.required = required; - dvObject = aDvObject; - } - - public Set getRequiredPermissions() { - return required; - } - - public DvObject getDvObject() { - return dvObject; - } - + + private final Set required; + private final DvObject dvObject; + private final boolean isDetailedMessageRequired; + + public PermissionException(String message, Command failedCommand, Set required, DvObject dvObject, boolean isDetailedMessageRequired) { + super(message, failedCommand); + this.required = required; + this.dvObject = dvObject; + this.isDetailedMessageRequired = isDetailedMessageRequired; + } + + public PermissionException(String message, Command failedCommand, Set required, DvObject dvObject) { + this(message, failedCommand, required, dvObject, false); + } + + public Set getRequiredPermissions() { + return required; + } + + public DvObject getDvObject() { + return dvObject; + } + + public boolean isDetailedMessageRequired() { + return isDetailedMessageRequired; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 40c2abf5d21..91f3a5b823c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -19,13 +19,15 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand private final List inputLevels; private final List facets; protected final List metadataBlocks; + private final boolean resetRelationsOnNullValues; public AbstractWriteDataverseCommand(Dataverse dataverse, Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, - List metadataBlocks) { + List metadataBlocks, + boolean resetRelationsOnNullValues) { super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { @@ -43,42 +45,61 @@ public AbstractWriteDataverseCommand(Dataverse dataverse, } else { this.metadataBlocks = null; } + this.resetRelationsOnNullValues = resetRelationsOnNullValues; } @Override public Dataverse execute(CommandContext ctxt) throws CommandException { dataverse = innerExecute(ctxt); + processMetadataBlocks(); + processFacets(ctxt); + processInputLevels(ctxt); + + return ctxt.dataverses().save(dataverse); + } + + private void processMetadataBlocks() { if (metadataBlocks != null && !metadataBlocks.isEmpty()) { dataverse.setMetadataBlockRoot(true); dataverse.setMetadataBlocks(metadataBlocks); + } else if (resetRelationsOnNullValues) { + dataverse.setMetadataBlockRoot(false); + dataverse.clearMetadataBlocks(); } + } + private void processFacets(CommandContext ctxt) { if (facets != null) { ctxt.facets().deleteFacetsFor(dataverse); - + dataverse.setDataverseFacets(new ArrayList<>()); + if (!facets.isEmpty()) { dataverse.setFacetRoot(true); } - int i = 0; - for (DatasetFieldType df : facets) { - ctxt.facets().create(i++, df, dataverse); + for (int i = 0; i < facets.size(); i++) { + ctxt.facets().create(i, facets.get(i), dataverse); } + } else if (resetRelationsOnNullValues) { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setFacetRoot(false); } + } + private void processInputLevels(CommandContext ctxt) { if (inputLevels != null) { if (!inputLevels.isEmpty()) { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); } ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); - for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevels.forEach(inputLevel -> { inputLevel.setDataverse(dataverse); ctxt.fieldTypeInputLevels().create(inputLevel); - } + }); + } else if (resetRelationsOnNullValues) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); } - - return ctxt.dataverses().save(dataverse); } abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 145cfb6199c..3728f3ee6ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 76939751899..e9a2025b112 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -93,6 +93,10 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, null, null); } + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType, Long newFileSize) { + this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, newFileSize, null); + } + // This version of the command must be used when files are created in the // context of creating a brand new dataset (from the Add Dataset page): diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index e6e8279a314..e378e2e2ef7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -131,7 +131,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasetversion.update.failure"), this); } else { - metadataUpdated = DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd); + metadataUpdated = !DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd).isEmpty(); publishedFmd.setLabel(draftFmd.getLabel()); publishedFmd.setDescription(draftFmd.getDescription()); publishedFmd.setCategories(draftFmd.getCategories()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 1ac41105237..902bea7f833 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -11,6 +11,9 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; + +import jakarta.persistence.OptimisticLockException; + import java.util.Optional; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; @@ -105,10 +108,15 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException Optional prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset); if ( prePubWf.isPresent() ) { // We start a workflow - theDataset = ctxt.em().merge(theDataset); - ctxt.em().flush(); - ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); - return new PublishDatasetResult(theDataset, Status.Workflow); + try { + theDataset = ctxt.em().merge(theDataset); + ctxt.em().flush(); + ctxt.workflows().start(prePubWf.get(), + buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); + return new PublishDatasetResult(theDataset, Status.Workflow); + } catch (OptimisticLockException e) { + throw new CommandException(e.getMessage(), e, this); + } } else{ // We will skip trying to register the global identifiers for datafiles @@ -157,7 +165,12 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException lock.setInfo(info); ctxt.datasets().addDatasetLock(theDataset, lock); } - theDataset = ctxt.em().merge(theDataset); + try { + theDataset = ctxt.em().merge(theDataset); + } catch (OptimisticLockException e) { + ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.finalizePublication); + throw new CommandException(e.getMessage(), e, this); + } // The call to FinalizePublicationCommand has been moved to the new @onSuccess() // method: //ctxt.datasets().callFinalizePublishCommandAsynchronously(theDataset.getId(), ctxt, request, datasetExternallyReleased); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommand.java new file mode 100644 index 00000000000..c7745c75aa9 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommand.java @@ -0,0 +1,204 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.api.dto.UserDTO; +import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; +import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; +import edu.harvard.iq.dataverse.engine.command.*; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.engine.command.exception.InvalidFieldsCommandException; +import edu.harvard.iq.dataverse.settings.FeatureFlags; +import edu.harvard.iq.dataverse.util.BundleUtil; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@RequiredPermissions({}) +public class RegisterOIDCUserCommand extends AbstractVoidCommand { + + private static final String FIELD_USERNAME = "username"; + private static final String FIELD_FIRST_NAME = "firstName"; + private static final String FIELD_LAST_NAME = "lastName"; + private static final String FIELD_EMAIL_ADDRESS = "emailAddress"; + private static final String FIELD_TERMS_ACCEPTED = "termsAccepted"; + + private final String bearerToken; + private final UserDTO userDTO; + + public RegisterOIDCUserCommand(DataverseRequest aRequest, String bearerToken, UserDTO userDTO) { + super(aRequest, (DvObject) null); + this.bearerToken = bearerToken; + this.userDTO = userDTO; + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { + try { + OAuth2UserRecord oAuth2UserRecord = ctxt.authentication().verifyOIDCBearerTokenAndGetOAuth2UserRecord(bearerToken); + UserRecordIdentifier userRecordIdentifier = oAuth2UserRecord.getUserRecordIdentifier(); + + if (ctxt.authentication().lookupUser(userRecordIdentifier) != null) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.userAlreadyRegisteredWithToken"), this); + } + + boolean provideMissingClaimsEnabled = FeatureFlags.API_BEARER_AUTH_PROVIDE_MISSING_CLAIMS.enabled(); + + updateUserDTO(oAuth2UserRecord, provideMissingClaimsEnabled); + + AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo( + userDTO.getFirstName(), + userDTO.getLastName(), + userDTO.getEmailAddress(), + userDTO.getAffiliation() != null ? userDTO.getAffiliation() : "", + userDTO.getPosition() != null ? userDTO.getPosition() : "" + ); + + validateUserFields(ctxt, provideMissingClaimsEnabled); + + ctxt.authentication().createAuthenticatedUser(userRecordIdentifier, userDTO.getUsername(), userDisplayInfo, true); + + } catch (AuthorizationException ex) { + throw new PermissionException(ex.getMessage(), this, null, null, true); + } + } + + private void updateUserDTO(OAuth2UserRecord oAuth2UserRecord, boolean provideMissingClaimsEnabled) throws InvalidFieldsCommandException { + if (provideMissingClaimsEnabled) { + Map fieldErrors = validateConflictingClaims(oAuth2UserRecord); + throwInvalidFieldsCommandExceptionIfErrorsExist(fieldErrors); + updateUserDTOWithClaims(oAuth2UserRecord); + } else { + Map fieldErrors = validateUserDTOHasNoClaims(); + throwInvalidFieldsCommandExceptionIfErrorsExist(fieldErrors); + overwriteUserDTOWithClaims(oAuth2UserRecord); + } + } + + private Map validateConflictingClaims(OAuth2UserRecord oAuth2UserRecord) { + Map fieldErrors = new HashMap<>(); + + addFieldErrorIfConflict(FIELD_USERNAME, oAuth2UserRecord.getUsername(), userDTO.getUsername(), fieldErrors); + addFieldErrorIfConflict(FIELD_FIRST_NAME, oAuth2UserRecord.getDisplayInfo().getFirstName(), userDTO.getFirstName(), fieldErrors); + addFieldErrorIfConflict(FIELD_LAST_NAME, oAuth2UserRecord.getDisplayInfo().getLastName(), userDTO.getLastName(), fieldErrors); + addFieldErrorIfConflict(FIELD_EMAIL_ADDRESS, oAuth2UserRecord.getDisplayInfo().getEmailAddress(), userDTO.getEmailAddress(), fieldErrors); + + return fieldErrors; + } + + private void addFieldErrorIfConflict(String fieldName, String claimValue, String existingValue, Map fieldErrors) { + if (claimValue != null && !claimValue.trim().isEmpty() && existingValue != null && !claimValue.equals(existingValue)) { + String errorMessage = BundleUtil.getStringFromBundle( + "registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", + List.of(fieldName) + ); + fieldErrors.put(fieldName, errorMessage); + } + } + + private Map validateUserDTOHasNoClaims() { + Map fieldErrors = new HashMap<>(); + if (userDTO.getUsername() != null) { + String errorMessage = BundleUtil.getStringFromBundle( + "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", + List.of(FIELD_USERNAME) + ); + fieldErrors.put(FIELD_USERNAME, errorMessage); + } + if (userDTO.getEmailAddress() != null) { + String errorMessage = BundleUtil.getStringFromBundle( + "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", + List.of(FIELD_EMAIL_ADDRESS) + ); + fieldErrors.put(FIELD_EMAIL_ADDRESS, errorMessage); + } + if (userDTO.getFirstName() != null) { + String errorMessage = BundleUtil.getStringFromBundle( + "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", + List.of(FIELD_FIRST_NAME) + ); + fieldErrors.put(FIELD_FIRST_NAME, errorMessage); + } + if (userDTO.getLastName() != null) { + String errorMessage = BundleUtil.getStringFromBundle( + "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", + List.of(FIELD_LAST_NAME) + ); + fieldErrors.put(FIELD_LAST_NAME, errorMessage); + } + return fieldErrors; + } + + private void updateUserDTOWithClaims(OAuth2UserRecord oAuth2UserRecord) { + userDTO.setUsername(getValueOrDefault(oAuth2UserRecord.getUsername(), userDTO.getUsername())); + userDTO.setFirstName(getValueOrDefault(oAuth2UserRecord.getDisplayInfo().getFirstName(), userDTO.getFirstName())); + userDTO.setLastName(getValueOrDefault(oAuth2UserRecord.getDisplayInfo().getLastName(), userDTO.getLastName())); + userDTO.setEmailAddress(getValueOrDefault(oAuth2UserRecord.getDisplayInfo().getEmailAddress(), userDTO.getEmailAddress())); + } + + private void overwriteUserDTOWithClaims(OAuth2UserRecord oAuth2UserRecord) { + userDTO.setUsername(oAuth2UserRecord.getUsername()); + userDTO.setFirstName(oAuth2UserRecord.getDisplayInfo().getFirstName()); + userDTO.setLastName(oAuth2UserRecord.getDisplayInfo().getLastName()); + userDTO.setEmailAddress(oAuth2UserRecord.getDisplayInfo().getEmailAddress()); + } + + private void throwInvalidFieldsCommandExceptionIfErrorsExist(Map fieldErrors) throws InvalidFieldsCommandException { + if (!fieldErrors.isEmpty()) { + throw new InvalidFieldsCommandException( + BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.invalidFields"), + this, + fieldErrors + ); + } + } + + private String getValueOrDefault(String oidcValue, String dtoValue) { + return (oidcValue == null || oidcValue.trim().isEmpty()) ? dtoValue : oidcValue; + } + + private void validateUserFields(CommandContext ctxt, boolean provideMissingClaimsEnabled) throws InvalidFieldsCommandException { + Map fieldErrors = new HashMap<>(); + + if (!FeatureFlags.API_BEARER_AUTH_HANDLE_TOS_ACCEPTANCE_IN_IDP.enabled()) { + validateTermsAccepted(fieldErrors); + } + + validateField(fieldErrors, FIELD_EMAIL_ADDRESS, userDTO.getEmailAddress(), ctxt, provideMissingClaimsEnabled); + validateField(fieldErrors, FIELD_USERNAME, userDTO.getUsername(), ctxt, provideMissingClaimsEnabled); + validateField(fieldErrors, FIELD_FIRST_NAME, userDTO.getFirstName(), ctxt, provideMissingClaimsEnabled); + validateField(fieldErrors, FIELD_LAST_NAME, userDTO.getLastName(), ctxt, provideMissingClaimsEnabled); + + throwInvalidFieldsCommandExceptionIfErrorsExist(fieldErrors); + } + + private void validateTermsAccepted(Map fieldErrors) { + if (!userDTO.isTermsAccepted()) { + fieldErrors.put(FIELD_TERMS_ACCEPTED, BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.userShouldAcceptTerms")); + } + } + + private void validateField(Map fieldErrors, String fieldName, String fieldValue, CommandContext ctxt, boolean provideMissingClaimsEnabled) { + if (fieldValue == null || fieldValue.isEmpty()) { + String errorKey = provideMissingClaimsEnabled ? + "registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired" : + "registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired"; + fieldErrors.put(fieldName, BundleUtil.getStringFromBundle(errorKey, List.of(fieldName))); + } else if (isFieldInUse(ctxt, fieldName, fieldValue)) { + fieldErrors.put(fieldName, BundleUtil.getStringFromBundle("registerOidcUserCommand.errors." + fieldName + "InUse")); + } + } + + private boolean isFieldInUse(CommandContext ctxt, String fieldName, String value) { + if (FIELD_EMAIL_ADDRESS.equals(fieldName)) { + return ctxt.authentication().getAuthenticatedUserByEmail(value) != null; + } else if (FIELD_USERNAME.equals(fieldName)) { + return ctxt.authentication().getAuthenticatedUser(value) != null; + } + return false; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 55cc3708097..6dc4ab4d00d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -32,7 +32,7 @@ public UpdateDataverseCommand(Dataverse dataverse, List featuredDataverses, DataverseRequest request, List inputLevels) { - this(dataverse, facets, featuredDataverses, request, inputLevels, null, null); + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false); } public UpdateDataverseCommand(Dataverse dataverse, @@ -41,8 +41,9 @@ public UpdateDataverseCommand(Dataverse dataverse, DataverseRequest request, List inputLevels, List metadataBlocks, - DataverseDTO updatedDataverseDTO) { - super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks); + DataverseDTO updatedDataverseDTO, + boolean resetRelationsOnNullValues) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues); if (featuredDataverses != null) { this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 05ddbe83e78..8fab6a6704d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -5,11 +5,13 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; -import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.api.dto.FileDTO; -import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; +import edu.harvard.iq.dataverse.api.dto.FieldDTO; +import edu.harvard.iq.dataverse.api.dto.LicenseDTO; + import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG; @@ -313,8 +315,16 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver XmlWriterUtil.writeFullElement(xmlw, "conditions", version.getConditions()); XmlWriterUtil.writeFullElement(xmlw, "disclaimer", version.getDisclaimer()); xmlw.writeEndElement(); //useStmt - + /* any s: */ + if (version.getTermsOfUse() != null && !version.getTermsOfUse().trim().equals("")) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("type", NOTE_TYPE_TERMS_OF_USE); + xmlw.writeAttribute("level", LEVEL_DV); + xmlw.writeCharacters(version.getTermsOfUse()); + xmlw.writeEndElement(); //notes + } + if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) { xmlw.writeStartElement("notes"); xmlw.writeAttribute("type", NOTE_TYPE_TERMS_OF_ACCESS); @@ -322,6 +332,19 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver xmlw.writeCharacters(version.getTermsOfAccess()); xmlw.writeEndElement(); //notes } + + LicenseDTO license = version.getLicense(); + if (license != null) { + String name = license.getName(); + String uri = license.getUri(); + if ((name != null && !name.trim().equals("")) && (uri != null && !uri.trim().equals(""))) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("type", NOTE_TYPE_TERMS_OF_USE); + xmlw.writeAttribute("level", LEVEL_DV); + xmlw.writeCharacters("" + name + ""); + xmlw.writeEndElement(); //notes + } + } xmlw.writeEndElement(); //dataAccs } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index ac3c81622fc..58992805dc8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -74,6 +74,7 @@ import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import jakarta.json.JsonNumber; import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -284,6 +285,52 @@ private int makeDir(GlobusEndpoint endpoint, String dir) { return result.status; } + private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { + MakeRequestResponse result; + + try { + logger.fine("Attempting to look up the contents of the Globus folder "+dir); + URL url = new URL( + "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + + "/ls?path=" + dir); + result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null); + + switch (result.status) { + case 200: + logger.fine("Looked up directory " + dir + " successfully."); + break; + default: + logger.warning("Status " + result.status + " received when looking up dir " + dir); + logger.fine("Response: " + result.jsonResponse); + return null; + } + } catch (MalformedURLException ex) { + // Misconfiguration + logger.warning("Failed to list the contents of the directory "+ dir + " on endpoint " + endpoint.getId()); + return null; + } + + Map ret = new HashMap<>(); + + JsonObject listObject = JsonUtil.getJsonObject(result.jsonResponse); + JsonArray dataArray = listObject.getJsonArray("DATA"); + + if (dataArray != null && !dataArray.isEmpty()) { + for (int i = 0; i < dataArray.size(); i++) { + String dataType = dataArray.getJsonObject(i).getString("DATA_TYPE", null); + if (dataType != null && dataType.equals("file")) { + // is it safe to assume that any entry with a valid "DATA_TYPE": "file" + // will also have valid "name" and "size" entries? + String fileName = dataArray.getJsonObject(i).getString("name"); + long fileSize = dataArray.getJsonObject(i).getJsonNumber("size").longValueExact(); + ret.put(fileName, fileSize); + } + } + } + + return ret; + } + private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) { Gson gson = new GsonBuilder().create(); MakeRequestResponse result = null; @@ -938,9 +985,20 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); } + + Map fileSizeMap = null; + + if (filesJsonArray.size() >= systemConfig.getGlobusBatchLookupSize()) { + // Look up the sizes of all the files in the dataset folder, to avoid + // looking them up one by one later: + // @todo: we should only be doing this if this is a managed store, probably (?) + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath()); + } // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, myLogger); + JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); logger.fine("Size: " + newfilesJsonArray.size()); logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0))); @@ -964,20 +1022,26 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut if (newfileJsonObject != null) { logger.fine("List Size: " + newfileJsonObject.size()); // if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { - JsonPatch path = Json.createPatchBuilder() + JsonPatch patch = Json.createPatchBuilder() .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); - fileJsonObject = path.apply(fileJsonObject); - path = Json.createPatchBuilder() + fileJsonObject = patch.apply(fileJsonObject); + patch = Json.createPatchBuilder() .add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); - fileJsonObject = path.apply(fileJsonObject); + fileJsonObject = patch.apply(fileJsonObject); + // If we already know the size of this file on the Globus end, + // we'll pass it to /addFiles, to avoid looking up file sizes + // one by one: + if (fileSizeMap != null && fileSizeMap.get(fileId) != null) { + Long uploadedFileSize = fileSizeMap.get(fileId); + myLogger.info("Found size for file " + fileId + ": " + uploadedFileSize + " bytes"); + patch = Json.createPatchBuilder() + .add("/fileSize", Json.createValue(uploadedFileSize)).build(); + fileJsonObject = patch.apply(fileJsonObject); + } else { + logger.fine("No file size entry found for file "+fileId); + } addFilesJsonData.add(fileJsonObject); countSuccess++; - // } else { - // globusLogger.info(fileName - // + " will be skipped from adding to dataset by second API due to missing - // values "); - // countError++; - // } } else { myLogger.info(fileName + " will be skipped from adding to dataset in the final AddReplaceFileHelper.addFiles() call. "); @@ -1029,7 +1093,7 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut // The old code had 2 sec. of sleep, so ... Thread.sleep(2000); - Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser); + Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser, true); if (addFilesResponse == null) { logger.info("null response from addFiles call"); @@ -1211,7 +1275,7 @@ private GlobusTaskState globusStatusCheck(GlobusEndpoint endpoint, String taskId return task; } - public JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) + private JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) throws InterruptedException, ExecutionException, IOException { List> hashvalueCompletableFutures = inputList.stream() @@ -1230,7 +1294,7 @@ public JsonObject calculateMissingMetadataFields(List inputList, Logger }); JsonArrayBuilder filesObject = (JsonArrayBuilder) completableFuture.get(); - + JsonObject output = Json.createObjectBuilder().add("files", filesObject).build(); return output; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index b42fd950528..71c498a4d0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -344,10 +344,20 @@ public List saveAndAddFilesToDataset(DatasetVersion version, try { StorageIO dataAccess = DataAccess.getStorageIO(dataFile); //Populate metadata - dataAccess.open(DataAccessOption.READ_ACCESS); - // (the .open() above makes a remote call to check if - // the file exists and obtains its size) - confirmedFileSize = dataAccess.getSize(); + + // There are direct upload sub-cases where the file size + // is already known at this point. For example, direct uploads + // to S3 that go through the jsf dataset page. Or the Globus + // uploads, where the file sizes are looked up in bulk on + // the completion of the remote upload task. + if (dataFile.getFilesize() >= 0) { + confirmedFileSize = dataFile.getFilesize(); + } else { + dataAccess.open(DataAccessOption.READ_ACCESS); + // (the .open() above makes a remote call to check if + // the file exists and obtains its size) + confirmedFileSize = dataAccess.getSize(); + } // For directly-uploaded files, we will perform the file size // limit and quota checks here. Perform them *again*, in @@ -362,13 +372,16 @@ public List saveAndAddFilesToDataset(DatasetVersion version, if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) { //set file size - logger.fine("Setting file size: " + confirmedFileSize); - dataFile.setFilesize(confirmedFileSize); + if (dataFile.getFilesize() < 0) { + logger.fine("Setting file size: " + confirmedFileSize); + dataFile.setFilesize(confirmedFileSize); + } if (dataAccess instanceof S3AccessIO) { ((S3AccessIO) dataAccess).removeTempTag(); } savedSuccess = true; + logger.info("directly uploaded file successfully saved. file size: "+dataFile.getFilesize()); } } catch (IOException ioex) { logger.warning("Failed to get file size, storage id, or failed to remove the temp tag on the saved S3 object" + dataFile.getStorageIdentifier() + " (" diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java index beb676f60d1..63b5bf03ea7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java @@ -30,7 +30,7 @@ public class PrivateUrl { public PrivateUrl(RoleAssignment roleAssignment, Dataset dataset, String dataverseSiteUrl) { this.token = roleAssignment.getPrivateUrlToken(); - this.link = dataverseSiteUrl + "/privateurl.xhtml?token=" + token; + this.link = dataverseSiteUrl + "/previewurl.xhtml?token=" + token; this.dataset = dataset; this.roleAssignment = roleAssignment; } diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index 9af4bb6af9e..17c622be9e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -1,6 +1,10 @@ package edu.harvard.iq.dataverse.privateurl; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.PermissionsWrapper; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import java.io.Serializable; import java.util.logging.Logger; @@ -20,8 +24,14 @@ public class PrivateUrlPage implements Serializable { @EJB PrivateUrlServiceBean privateUrlService; + @EJB + DatasetServiceBean datasetServiceBean; @Inject DataverseSession session; + @Inject + PermissionsWrapper permissionsWrapper; + @Inject + DataverseRequestServiceBean dvRequestService; /** * The unique string used to look up a PrivateUrlUser and the associated @@ -34,7 +44,16 @@ public String init() { PrivateUrlRedirectData privateUrlRedirectData = privateUrlService.getPrivateUrlRedirectDataFromToken(token); String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true"; PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser(); - session.setUser(privateUrlUser); + boolean sessionUserCanViewUnpublishedDataset = false; + if (session.getUser().isAuthenticated()){ + Long datasetId = privateUrlUser.getDatasetId(); + Dataset dataset = datasetServiceBean.find(datasetId); + sessionUserCanViewUnpublishedDataset = permissionsWrapper.canViewUnpublishedDataset(dvRequestService.getDataverseRequest(), dataset); + } + if(!sessionUserCanViewUnpublishedDataset){ + //Only Reset if user cannot view this Draft Version + session.setUser(privateUrlUser); + } logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo); return draftDatasetPageToBeRedirectedTo; } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java index 6e939c1bb6d..1310e0eb199 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java @@ -1,19 +1,19 @@ /** - * Private URL for unpublished datasets. + * Preview URL for unpublished datasets. *

- * The Private URL feature has been implemented as a specialized role assignment + * The Preview (formerly Private) URL feature has been implemented as a specialized role assignment * with an associated token that permits read-only access to the metadata and * all files (regardless of if the files are restricted or not) of a draft * version of a dataset. *

- * As of this note, a second option - to create a Private URL that provides an + * As of this note, a second option - to create a Preview URL that provides an * anonymized view of the dataset has been added. This option works the same as * the original except that it hides author names in the citation block, hides * the values for an admin specified list of metadata fields, disables citation * downloads, and disables API access (except for file and file thumbnail * downloads which are used by the UI). *

- * The primary use case for a Private URL is for journal editors to send a link + * The primary use case for a Preview URL is for journal editors to send a link * to reviewers of a dataset before publication. In most cases, these journal * editors do not permit depositors to publish on their own, which is to say * they only allow depositors to have the "Contributor" role on the datasets @@ -24,42 +24,42 @@ * the depositor, who is in charge of both the security of the dataset and the * timing of when the dataset is published. *

- * A secondary use case for a Private URL is for depositors who have the ability + * A secondary use case for a Preview URL is for depositors who have the ability * to manage permissions on their dataset (depositors who have the "Curator" or * "Admin" role, which grants much more power than the "Contributor" role) to * send a link to coauthors or other trusted parties to preview the dataset * before the depositors publish the dataset on their own. For better security, * these depositors could ask their coauthors to create Dataverse accounts and - * assign roles to them directly, rather than using a Private URL which requires + * assign roles to them directly, rather than using a Preview URL which requires * no username or password. *

* As of this note, a second option aimed specifically at the review use case - - * to create a Private URL that provides an anonymized view of the dataset - has + * to create a Preview URL that provides an anonymized view of the dataset - has * been added. This option works the same as the original except that it hides * author names in the citation block, hides the values for an admin specified * list of metadata fields, disables citation downloads, and disables API access * (except for file and file thumbnail downloads which are used by the UI). *

- * The token associated with the Private URL role assignment that can be used + * The token associated with the Preview URL role assignment that can be used * either in the GUI or, for the non-anonymized-access option, via the API to * elevate privileges beyond what a "Guest" can see. The ability to use a - * Private URL token via API was added mostly to facilitate automated testing of - * the feature but the far more common case is expected to be use of the Private + * Preview URL token via API was added mostly to facilitate automated testing of + * the feature but the far more common case is expected to be use of the Preview * URL token in a link that is clicked to open a browser, similar to links * shared via Dropbox, Google, etc. *

- * When reviewers click a Private URL their browser sessions are set to the + * When reviewers click a Preview URL their browser sessions are set to the * "{@link edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser}" that * has the "Member" role only on the dataset in question and redirected to that * dataset, where they will see an indication in blue at the top of the page * that they are viewing an unpublished dataset. If the reviewer happens to be * logged into Dataverse already, clicking the link will log them out because * the review is meant to be blind. Because the dataset is always in draft when - * a Private URL is in effect, no downloads or any other activity by the - * reviewer are logged to the guestbook. All reviewers click the same Private + * a Preview URL is in effect, no downloads or any other activity by the + * reviewer are logged to the guestbook. All reviewers click the same Preview * URL containing the same token, and with the exception of an IP address being * logged, it should be impossible to trace which reviewers have clicked a - * Private URL. If the reviewer navigates to the home page, the session is set + * Preview URL. If the reviewer navigates to the home page, the session is set * to the Guest user and they will see what a Guest would see. *

* The "Member" role is used because it contains the necessary read-only @@ -76,51 +76,51 @@ * version. A Member can also download restricted files that have been deleted * from previously published versions. *

- * Likewise, when a Private URL token is used via API, commands are executed + * Likewise, when a Preview URL token is used via API, commands are executed * using the "PrivateUrlUser" that has the "Member" role only on the dataset in * question. This means that read-only operations such as downloads of the - * dataset's files are permitted. The Search API does not respect the Private + * dataset's files are permitted. The Search API does not respect the Preview * URL token but you can download files using the Access API, and, with the * non-anonymized-access option, download unpublished metadata using the Native * API. *

- * A Private URL cannot be created for a published version of a dataset. In the + * A Preview URL cannot be created for a published version of a dataset. In the * GUI, you will be reminded of this fact with a popup. The API will explain * this as well. *

- * An anonymized-access Private URL can't be created if any published dataset + * An anonymized-access Preview URL can't be created if any published dataset * version exists. The primary reason for this is that, since datasets have * DOIs, the full metadata about published versions is available directly from * the DOI provider. (While the metadata for that version could be somewhat * different, in practice it would probably provide a means of identifying * some/all of the authors). *

- * If a draft dataset containing a Private URL is - * published, the Private URL is deleted. This means that reviewers who click + * If a draft dataset containing a Preview URL is + * published, the Preview URL is deleted. This means that reviewers who click * the link after publication will see a 404. *

- * If a post-publication draft containing a Private URL is deleted, the Private + * If a post-publication draft containing a Preview URL is deleted, the Preview * URL is deleted. This is to ensure that if a new draft is created in the * future, a new token will be used. *

- * The creation and deletion of a Private URL are limited to the "Curator" and + * The creation and deletion of a Preview URL are limited to the "Curator" and * "Admin" roles because only those roles have the permission called * "ManageDatasetPermissions", which is the permission used by the * "AssignRoleCommand" and "RevokeRoleCommand" commands. If you have the - * permission to create or delete a Private URL, the fact that a Private URL is + * permission to create or delete a Preview URL, the fact that a Preview URL is * enabled for a dataset will be indicated in blue at the top of the page. * Success messages are shown at the top of the page when you create or delete a - * Private URL. In the GUI, deleting a Private URL is called "disabling" and you + * Preview URL. In the GUI, deleting a Preview URL is called "disabling" and you * will be prompted for a confirmation. No matter what you call it the role is - * revoked. You can also delete a Private URL by revoking the role. + * revoked. You can also delete a Preview URL by revoking the role. *

* A "Contributor" does not have the "ManageDatasetPermissions" permission and - * cannot see "Permissions" nor "Private URL" under the "Edit" menu of their - * dataset. When a Curator or Admin has enabled a Private URL on a Contributor's - * dataset, the Contributor does not see a visual indication that a Private URL + * cannot see "Permissions" nor "Preview URL" under the "Edit" menu of their + * dataset. When a Curator or Admin has enabled a Preview URL on a Contributor's + * dataset, the Contributor does not see a visual indication that a Preview URL * has been enabled for their dataset. *

- * There is no way for an "Admin" or "Curator" to see when a Private URL was + * There is no way for an "Admin" or "Curator" to see when a Preview URL was * created or deleted for a dataset but someone who has access to the database * can see that the following commands are logged to the "actionlogrecord" * database table: @@ -129,7 +129,7 @@ *

  • {@link edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand}
  • *
  • {@link edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand}
  • * - * See also the Private URL To Unpublished Dataset BRD at * https://docs.google.com/document/d/1FT47QkZKcmjSgRnePaJO2g1nzcotLyN3Yb2ORvBr6cs/edit?usp=sharing */ diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index d0dcf3461cf..9b7998b0a8e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -135,6 +135,9 @@ public class IndexServiceBean { @EJB DatasetFieldServiceBean datasetFieldService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + public static final String solrDocIdentifierDataverse = "dataverse_"; public static final String solrDocIdentifierFile = "datafile_"; public static final String solrDocIdentifierDataset = "dataset_"; @@ -1018,6 +1021,8 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, SetFeature Request/Idea: Harvest metadata values that aren't from a list of controlled values #9992 @@ -1296,7 +1299,6 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set variables = fileMetadata.getDataFile().getDataTable().getDataVariables(); + Long observations = fileMetadata.getDataFile().getDataTable().getCaseQuantity(); + datafileSolrInputDocument.addField(SearchFields.OBSERVATIONS, observations); + datafileSolrInputDocument.addField(SearchFields.VARIABLE_COUNT, variables.size()); Map variableMap = null; List variablesByMetadata = variableService.findVarMetByFileMetaId(fileMetadata.getId()); @@ -2230,8 +2237,7 @@ public List findPermissionsInSolrOnly() throws SearchException { String dtype = dvObjectService.getDtype(id); if (dtype == null) { permissionInSolrOnly.add(docId); - } - if (dtype.equals(DType.Dataset.getDType())) { + }else if (dtype.equals(DType.Dataset.getDType())) { List states = datasetService.getVersionStates(id); if (states != null) { String latestState = states.get(states.size() - 1); @@ -2252,7 +2258,7 @@ public List findPermissionsInSolrOnly() throws SearchException { } else if (dtype.equals(DType.DataFile.getDType())) { List states = dataFileService.findVersionStates(id); Set strings = states.stream().map(VersionState::toString).collect(Collectors.toSet()); - logger.fine("States for " + docId + ": " + String.join(", ", strings)); + logger.finest("States for " + docId + ": " + String.join(", ", strings)); if (docId.endsWith("draft_permission")) { if (!states.contains(VersionState.DRAFT)) { permissionInSolrOnly.add(docId); @@ -2266,7 +2272,7 @@ public List findPermissionsInSolrOnly() throws SearchException { permissionInSolrOnly.add(docId); } else { if (!dataFileService.isInReleasedVersion(id)) { - logger.fine("Adding doc " + docId + " to list of permissions in Solr only"); + logger.finest("Adding doc " + docId + " to list of permissions in Solr only"); permissionInSolrOnly.add(docId); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java index ef27a5eefaf..712f90186f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java @@ -171,6 +171,7 @@ public class SearchFields { public static final String FILE_CHECKSUM_TYPE = "fileChecksumType"; public static final String FILE_CHECKSUM_VALUE = "fileChecksumValue"; public static final String FILENAME_WITHOUT_EXTENSION = "fileNameWithoutExtension"; + public static final String FILE_RESTRICTED = "fileRestricted"; /** * Indexed as a string so we can facet on it. */ @@ -270,6 +271,8 @@ more targeted results for just datasets. The format is YYYY (i.e. */ public static final String DATASET_TYPE = "datasetType"; + public static final String OBSERVATIONS = "observations"; + public static final String VARIABLE_COUNT = "variableCount"; public static final String VARIABLE_NAME = "variableName"; public static final String VARIABLE_LABEL = "variableLabel"; public static final String LITERAL_QUESTION = "literalQuestion"; @@ -291,5 +294,6 @@ more targeted results for just datasets. The format is YYYY (i.e. public static final String DATASET_VALID = "datasetValid"; public static final String DATASET_LICENSE = "license"; + public static final String FILE_COUNT = "fileCount"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index de75c88009f..60bcc9f846e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.search; import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -18,6 +19,7 @@ import java.util.Calendar; import java.util.Collections; import java.util.Date; +import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -75,6 +77,8 @@ public class SearchServiceBean { SystemConfig systemConfig; @EJB SolrClientService solrClientService; + @EJB + PermissionServiceBean permissionService; @Inject ThumbnailServiceWrapper thumbnailServiceWrapper; @@ -497,7 +501,8 @@ public SolrQueryResponse search( Long retentionEndDate = (Long) solrDocument.getFieldValue(SearchFields.RETENTION_END_DATE); // Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID); - + Long fileCount = (Long) solrDocument.getFieldValue(SearchFields.FILE_COUNT); + List matchedFields = new ArrayList<>(); SolrSearchResult solrSearchResult = new SolrSearchResult(query, name); @@ -570,6 +575,7 @@ public SolrQueryResponse search( solrSearchResult.setDeaccessionReason(deaccessionReason); solrSearchResult.setDvTree(dvTree); solrSearchResult.setDatasetValid(datasetValid); + solrSearchResult.setFileCount(fileCount); if (Boolean.TRUE.equals((Boolean) solrDocument.getFieldValue(SearchFields.IS_HARVESTED))) { solrSearchResult.setHarvested(true); @@ -675,6 +681,15 @@ public SolrQueryResponse search( logger.info("Exception setting setFileChecksumType: " + ex); } solrSearchResult.setFileChecksumValue((String) solrDocument.getFieldValue(SearchFields.FILE_CHECKSUM_VALUE)); + + if (solrDocument.getFieldValue(SearchFields.FILE_RESTRICTED) != null) { + solrSearchResult.setFileRestricted((Boolean) solrDocument.getFieldValue(SearchFields.FILE_RESTRICTED)); + } + + if (solrSearchResult.getEntity() != null) { + solrSearchResult.setCanDownloadFile(permissionService.hasPermissionsFor(dataverseRequest, solrSearchResult.getEntity(), EnumSet.of(Permission.DownloadFile))); + } + solrSearchResult.setUnf((String) solrDocument.getFieldValue(SearchFields.UNF)); solrSearchResult.setDatasetVersionId(datasetVersionId); List fileCategories = (List) solrDocument.getFieldValues(SearchFields.FILE_TAG); @@ -686,6 +701,10 @@ public SolrQueryResponse search( Collections.sort(tabularDataTags); solrSearchResult.setTabularDataTags(tabularDataTags); } + Long observations = (Long) solrDocument.getFieldValue(SearchFields.OBSERVATIONS); + solrSearchResult.setObservations(observations); + Long tabCount = (Long) solrDocument.getFieldValue(SearchFields.VARIABLE_COUNT); + solrSearchResult.setTabularDataCount(tabCount); String filePID = (String) solrDocument.getFieldValue(SearchFields.FILE_PERSISTENT_ID); if(null != filePID && !"".equals(filePID) && !"".equals("null")) { solrSearchResult.setFilePersistentId(filePID); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index 27900bac63f..2250a245dab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -78,6 +78,10 @@ public class SolrSearchResult { private String citation; private String citationHtml; private String datasetType; + /** + * Only Dataset can have a file count + */ + private Long fileCount; /** * Files and datasets might have a UNF. Dataverses don't. */ @@ -93,6 +97,8 @@ public class SolrSearchResult { private String fileMd5; private DataFile.ChecksumType fileChecksumType; private String fileChecksumValue; + private Boolean fileRestricted; + private Boolean canDownloadFile; private String dataverseAlias; private String dataverseParentAlias; private String dataverseParentName; @@ -118,6 +124,8 @@ public class SolrSearchResult { private String harvestingDescription = null; private List fileCategories = null; private List tabularDataTags = null; + private Long tabularDataCount; + private Long observations; private String identifierOfDataverse = null; private String nameOfDataverse = null; @@ -456,10 +464,10 @@ public JsonObjectBuilder getJsonForMyData(boolean isValid) { } // getJsonForMydata public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) { - return json(showRelevance, showEntityIds, showApiUrls, null, null); + return json(showRelevance, showEntityIds, showApiUrls, null); } - public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields, Long datasetFileCount) { + public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields) { if (this.type == null) { return jsonObjectBuilder(); } @@ -561,7 +569,12 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool .add("citationHtml", this.citationHtml) .add("identifier_of_dataverse", this.identifierOfDataverse) .add("name_of_dataverse", this.nameOfDataverse) - .add("citation", this.citation); + .add("citation", this.citation) + .add("restricted", this.fileRestricted) + .add("variables", this.tabularDataCount) + .add("observations", this.observations) + .add("canDownloadFile", this.canDownloadFile); + // Now that nullSafeJsonBuilder has been instatiated, check for null before adding to it! if (showRelevance) { nullSafeJsonBuilder.add("matches", getRelevance()); @@ -575,6 +588,12 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool if (!getPublicationStatuses().isEmpty()) { nullSafeJsonBuilder.add("publicationStatuses", getPublicationStatusesAsJSON()); } + if (this.fileCategories != null && !this.fileCategories.isEmpty()) { + nullSafeJsonBuilder.add("categories", JsonPrinter.asJsonArray(this.fileCategories)); + } + if (this.tabularDataTags != null && !this.tabularDataTags.isEmpty()) { + nullSafeJsonBuilder.add("tabularTags", JsonPrinter.asJsonArray(this.tabularDataTags)); + } if (this.entity == null) { @@ -597,7 +616,7 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool subjects.add(subject); } nullSafeJsonBuilder.add("subjects", subjects); - nullSafeJsonBuilder.add("fileCount", datasetFileCount); + nullSafeJsonBuilder.add("fileCount", this.fileCount); nullSafeJsonBuilder.add("versionId", dv.getId()); nullSafeJsonBuilder.add("versionState", dv.getVersionState().toString()); if (this.isPublishedState()) { @@ -952,6 +971,18 @@ public List getTabularDataTags() { public void setTabularDataTags(List tabularDataTags) { this.tabularDataTags = tabularDataTags; } + public void setTabularDataCount(Long tabularDataCount) { + this.tabularDataCount = tabularDataCount; + } + public Long getTabularDataCount() { + return tabularDataCount; + } + public Long getObservations() { + return observations; + } + public void setObservations(Long observations) { + this.observations = observations; + } public Map getParent() { return parent; @@ -1074,6 +1105,21 @@ public void setFileChecksumValue(String fileChecksumValue) { this.fileChecksumValue = fileChecksumValue; } + public Boolean getFileRestricted() { + return fileRestricted; + } + + public void setFileRestricted(Boolean fileRestricted) { + this.fileRestricted = fileRestricted; + } + public Boolean getCanDownloadFile() { + return canDownloadFile; + } + + public void setCanDownloadFile(Boolean canDownloadFile) { + this.canDownloadFile = canDownloadFile; + } + public String getNameSort() { return nameSort; } @@ -1348,4 +1394,12 @@ public boolean isValid(Predicate canUpdateDataset) { } return !canUpdateDataset.test(this); } + + public Long getFileCount() { + return fileCount; + } + + public void setFileCount(Long fileCount) { + this.fileCount = fileCount; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 20632c170e4..2242b0f51c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -33,9 +33,32 @@ public enum FeatureFlags { /** * Enables API authentication via Bearer Token. * @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth" - * @since Dataverse @TODO: + * @since Dataverse 5.14: */ API_BEARER_AUTH("api-bearer-auth"), + /** + * Enables sending the missing user claims in the request JSON provided during OIDC user registration + * (see API endpoint /users/register) when these claims are not returned by the identity provider + * but are necessary for registering the user in Dataverse. + * + *

    The value of this feature flag is only considered when the feature flag + * {@link #API_BEARER_AUTH} is enabled.

    + * + * @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth-provide-missing-claims" + * @since Dataverse @TODO: + */ + API_BEARER_AUTH_PROVIDE_MISSING_CLAIMS("api-bearer-auth-provide-missing-claims"), + /** + * Specifies that Terms of Service acceptance is handled by the IdP, eliminating the need to include + * ToS acceptance boolean parameter (termsAccepted) in the OIDC user registration request body. + * + *

    The value of this feature flag is only considered when the feature flag + * {@link #API_BEARER_AUTH} is enabled.

    + * + * @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth-handle-tos-acceptance-in-idp" + * @since Dataverse @TODO: + */ + API_BEARER_AUTH_HANDLE_TOS_ACCEPTANCE_IN_IDP("api-bearer-auth-handle-tos-acceptance-in-idp"), /** * For published (public) objects, don't use a join when searching Solr. * Experimental! Requires a reindex with the following feature flag enabled, diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 8ed96690e84..b5eb483c2c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -539,6 +539,12 @@ Whether Harvesting (OAI) service is enabled * */ GlobusSingleFileTransfer, + /** Lower limit of the number of files in a Globus upload task where + * the batch mode should be utilized in looking up the file information + * on the remote end node (file sizes, primarily), instead of individual + * lookups. + */ + GlobusBatchLookupSize, /** * Optional external executables to run on the metadata for dataverses * and datasets being published; as an extra validation step, to diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index 922e6ff5d28..771cf5fd0f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -111,7 +111,7 @@ public static ResourceBundle getResourceBundle(String propertyFileName, Locale c ClassLoader loader = getClassLoader(filesRootDirectory); bundle = ResourceBundle.getBundle(propertyFileName, currentLocale, loader); } catch (MissingResourceException mre) { - logger.warning("No property file named " + propertyFileName + "_" + currentLocale.getLanguage() + logger.fine("No property file named " + propertyFileName + "_" + currentLocale.getLanguage() + " found in " + filesRootDirectory + ", using untranslated values"); bundle = ResourceBundle.getBundle("propertyFiles/" + propertyFileName, currentLocale); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 434b3bd8f8f..e769cacfdb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -78,6 +78,7 @@ public class SystemConfig { public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours + private static final int defaultGlobusBatchLookupSize = 50; private String buildNumber = null; @@ -954,6 +955,11 @@ public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } + public int getGlobusBatchLookupSize() { + String batchSizeOption = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusBatchLookupSize); + return getIntLimitFromStringOrDefault(batchSizeOption, defaultGlobusBatchLookupSize); + } + private Boolean getMethodAvailable(String method, boolean upload) { String methods = settingsService.getValueForKey( upload ? SettingsServiceBean.Key.UploadMethods : SettingsServiceBean.Key.DownloadMethods); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index efbf36e53d9..308213b5cc0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -21,6 +21,7 @@ import edu.harvard.iq.dataverse.api.Util; import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; +import edu.harvard.iq.dataverse.api.dto.UserDTO; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddressRange; @@ -31,6 +32,7 @@ import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.license.LicenseServiceBean; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.workflow.Workflow; @@ -49,6 +51,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Function; import java.util.function.Consumer; import java.util.logging.Logger; import java.util.stream.Collectors; @@ -76,11 +79,11 @@ public class JsonParser { DatasetTypeServiceBean datasetTypeService; HarvestingClient harvestingClient = null; boolean allowHarvestingMissingCVV = false; - + /** * if lenient, we will accept alternate spellings for controlled vocabulary values */ - boolean lenient = false; + boolean lenient = false; @Deprecated public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService) { @@ -92,7 +95,7 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, DatasetTypeServiceBean datasetTypeService) { this(datasetFieldSvc, blockService, settingsService, licenseService, datasetTypeService, null); } - + public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, DatasetTypeServiceBean datasetTypeService, HarvestingClient harvestingClient) { this.datasetFieldSvc = datasetFieldSvc; this.blockService = blockService; @@ -106,7 +109,7 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB public JsonParser() { this( null,null,null ); } - + public boolean isLenient() { return lenient; } @@ -282,11 +285,19 @@ public DataverseTheme parseDataverseTheme(JsonObject obj) { return theme; } - private static String getMandatoryString(JsonObject jobj, String name) throws JsonParseException { + private static T getMandatoryField(JsonObject jobj, String name, Function getter) throws JsonParseException { if (jobj.containsKey(name)) { - return jobj.getString(name); + return getter.apply(name); } - throw new JsonParseException("Field " + name + " is mandatory"); + throw new JsonParseException("Field '" + name + "' is mandatory"); + } + + private static String getMandatoryString(JsonObject jobj, String name) throws JsonParseException { + return getMandatoryField(jobj, name, jobj::getString); + } + + private static Boolean getMandatoryBoolean(JsonObject jobj, String name) throws JsonParseException { + return getMandatoryField(jobj, name, jobj::getBoolean); } public IpGroup parseIpGroup(JsonObject obj) { @@ -318,10 +329,10 @@ public IpGroup parseIpGroup(JsonObject obj) { return retVal; } - + public MailDomainGroup parseMailDomainGroup(JsonObject obj) throws JsonParseException { MailDomainGroup grp = new MailDomainGroup(); - + if (obj.containsKey("id")) { grp.setId(obj.getJsonNumber("id").longValue()); } @@ -345,7 +356,7 @@ public MailDomainGroup parseMailDomainGroup(JsonObject obj) throws JsonParseExce } else { throw new JsonParseException("Field domains is mandatory."); } - + return grp; } @@ -383,7 +394,7 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException { throw new JsonParseException("Invalid dataset type: " + datasetTypeIn); } - DatasetVersion dsv = new DatasetVersion(); + DatasetVersion dsv = new DatasetVersion(); dsv.setDataset(dataset); dsv = parseDatasetVersion(obj.getJsonObject("datasetVersion"), dsv); List versions = new ArrayList<>(1); @@ -414,7 +425,7 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th if (dsv.getId()==null) { dsv.setId(parseLong(obj.getString("id", null))); } - + String versionStateStr = obj.getString("versionState", null); if (versionStateStr != null) { dsv.setVersionState(DatasetVersion.VersionState.valueOf(versionStateStr)); @@ -427,8 +438,8 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th // Terms of Use related fields TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); - License license = null; - + License license = null; + try { // This method will attempt to parse the license in the format // in which it appears in our json exports, as a compound @@ -447,7 +458,7 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th // "license" : "CC0 1.0" license = parseLicense(obj.getString("license", null)); } - + if (license == null) { terms.setLicense(license); terms.setTermsOfUse(obj.getString("termsOfUse", null)); @@ -485,13 +496,13 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th dsv.setFileMetadatas(parseFiles(filesJson, dsv)); } return dsv; - } catch (ParseException ex) { + } catch (ParseException ex) { throw new JsonParseException(BundleUtil.getStringFromBundle("jsonparser.error.parsing.date", Arrays.asList(ex.getMessage())) , ex); } catch (NumberFormatException ex) { throw new JsonParseException(BundleUtil.getStringFromBundle("jsonparser.error.parsing.number", Arrays.asList(ex.getMessage())), ex); } } - + private edu.harvard.iq.dataverse.license.License parseLicense(String licenseNameOrUri) throws JsonParseException { if (licenseNameOrUri == null){ boolean safeDefaultIfKeyNotFound = true; @@ -505,7 +516,7 @@ private edu.harvard.iq.dataverse.license.License parseLicense(String licenseName if (license == null) throw new JsonParseException("Invalid license: " + licenseNameOrUri); return license; } - + private edu.harvard.iq.dataverse.license.License parseLicense(JsonObject licenseObj) throws JsonParseException { if (licenseObj == null){ boolean safeDefaultIfKeyNotFound = true; @@ -515,12 +526,12 @@ private edu.harvard.iq.dataverse.license.License parseLicense(JsonObject license return licenseService.getDefault(); } } - + String licenseName = licenseObj.getString("name", null); String licenseUri = licenseObj.getString("uri", null); - - License license = null; - + + License license = null; + // If uri is provided, we'll try that first. This is an easier lookup // method; the uri is always the same. The name may have been customized // (translated) on this instance, so we may be dealing with such translated @@ -530,17 +541,17 @@ private edu.harvard.iq.dataverse.license.License parseLicense(JsonObject license if (licenseUri != null) { license = licenseService.getByNameOrUri(licenseUri); } - + if (license != null) { return license; } - + if (licenseName == null) { - String exMsg = "Invalid or unsupported license section submitted" + String exMsg = "Invalid or unsupported license section submitted" + (licenseUri != null ? ": " + licenseUri : "."); - throw new JsonParseException("Invalid or unsupported license section submitted."); + throw new JsonParseException("Invalid or unsupported license section submitted."); } - + license = licenseService.getByPotentiallyLocalizedName(licenseName); if (license == null) { throw new JsonParseException("Invalid or unsupported license: " + licenseName); @@ -559,13 +570,13 @@ public List parseMetadataBlocks(JsonObject json) throws JsonParseE } return fields; } - + public List parseMultipleFields(JsonObject json) throws JsonParseException { JsonArray fieldsJson = json.getJsonArray("fields"); List fields = parseFieldsFromArray(fieldsJson, false); return fields; } - + public List parseMultipleFieldsForDelete(JsonObject json) throws JsonParseException { List fields = new LinkedList<>(); for (JsonObject fieldJson : json.getJsonArray("fields").getValuesAs(JsonObject.class)) { @@ -573,7 +584,7 @@ public List parseMultipleFieldsForDelete(JsonObject json) throws J } return fields; } - + private List parseFieldsFromArray(JsonArray fieldsArray, Boolean testType) throws JsonParseException { List fields = new LinkedList<>(); for (JsonObject fieldJson : fieldsArray.getValuesAs(JsonObject.class)) { @@ -585,18 +596,18 @@ private List parseFieldsFromArray(JsonArray fieldsArray, Boolean t } catch (CompoundVocabularyException ex) { DatasetFieldType fieldType = datasetFieldSvc.findByNameOpt(fieldJson.getString("typeName", "")); if (lenient && (DatasetFieldConstant.geographicCoverage).equals(fieldType.getName())) { - fields.add(remapGeographicCoverage( ex)); + fields.add(remapGeographicCoverage( ex)); } else { // if not lenient mode, re-throw exception throw ex; } - } + } } return fields; - + } - + public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv) throws JsonParseException { List fileMetadatas = new LinkedList<>(); if (metadatasJson != null) { @@ -610,7 +621,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv fileMetadata.setDirectoryLabel(directoryLabel); fileMetadata.setDescription(description); fileMetadata.setDatasetVersion(dsv); - + if ( filemetadataJson.containsKey("dataFile") ) { DataFile dataFile = parseDataFile(filemetadataJson.getJsonObject("dataFile")); dataFile.getFileMetadatas().add(fileMetadata); @@ -623,7 +634,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv dsv.getDataset().getFiles().add(dataFile); } } - + fileMetadatas.add(fileMetadata); fileMetadata.setCategories(getCategories(filemetadataJson, dsv.getDataset())); } @@ -631,19 +642,19 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv return fileMetadatas; } - + public DataFile parseDataFile(JsonObject datafileJson) { DataFile dataFile = new DataFile(); - + Timestamp timestamp = new Timestamp(new Date().getTime()); dataFile.setCreateDate(timestamp); dataFile.setModificationTime(timestamp); dataFile.setPermissionModificationTime(timestamp); - + if ( datafileJson.containsKey("filesize") ) { dataFile.setFilesize(datafileJson.getJsonNumber("filesize").longValueExact()); } - + String contentType = datafileJson.getString("contentType", null); if (contentType == null) { contentType = "application/octet-stream"; @@ -706,21 +717,21 @@ public DataFile parseDataFile(JsonObject datafileJson) { // TODO: // unf (if available)... etc.? - + dataFile.setContentType(contentType); dataFile.setStorageIdentifier(storageIdentifier); - + return dataFile; } /** * Special processing for GeographicCoverage compound field: * Handle parsing exceptions caused by invalid controlled vocabulary in the "country" field by * putting the invalid data in "otherGeographicCoverage" in a new compound value. - * + * * @param ex - contains the invalid values to be processed - * @return a compound DatasetField that contains the newly created values, in addition to + * @return a compound DatasetField that contains the newly created values, in addition to * the original valid values. - * @throws JsonParseException + * @throws JsonParseException */ private DatasetField remapGeographicCoverage(CompoundVocabularyException ex) throws JsonParseException{ List> geoCoverageList = new ArrayList<>(); @@ -747,23 +758,23 @@ private DatasetField remapGeographicCoverage(CompoundVocabularyException ex) thr } return geoCoverageField; } - - + + public DatasetField parseFieldForDelete(JsonObject json) throws JsonParseException{ DatasetField ret = new DatasetField(); - DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", "")); + DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", "")); if (type == null) { throw new JsonParseException("Can't find type '" + json.getString("typeName", "") + "'"); } return ret; } - - + + public DatasetField parseField(JsonObject json) throws JsonParseException{ return parseField(json, true); } - - + + public DatasetField parseField(JsonObject json, Boolean testType) throws JsonParseException { if (json == null) { return null; @@ -771,7 +782,7 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar DatasetField ret = new DatasetField(); DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", "")); - + if (type == null) { logger.fine("Can't find type '" + json.getString("typeName", "") + "'"); @@ -789,8 +800,8 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar if (testType && type.isControlledVocabulary() && !json.getString("typeClass").equals("controlledVocabulary")) { throw new JsonParseException("incorrect typeClass for field " + json.getString("typeName", "") + ", should be controlledVocabulary"); } - - + + ret.setDatasetFieldType(type); if (type.isCompound()) { @@ -803,11 +814,11 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar return ret; } - + public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json) throws JsonParseException { parseCompoundValue(dsf, compoundType, json, true); } - + public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json, Boolean testType) throws JsonParseException { List vocabExceptions = new ArrayList<>(); List vals = new LinkedList<>(); @@ -829,7 +840,7 @@ public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, } catch(ControlledVocabularyException ex) { vocabExceptions.add(ex); } - + if (f!=null) { if (!compoundType.getChildDatasetFieldTypes().contains(f.getDatasetFieldType())) { throw new JsonParseException("field " + f.getDatasetFieldType().getName() + " is not a child of " + compoundType.getName()); @@ -846,10 +857,10 @@ public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, order++; } - + } else { - + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); List fields = new LinkedList<>(); JsonObject value = json.getJsonObject("value"); @@ -870,7 +881,7 @@ public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, cv.setChildDatasetFields(fields); vals.add(cv); } - + } if (!vocabExceptions.isEmpty()) { throw new CompoundVocabularyException( "Invalid controlled vocabulary in compound field ", vocabExceptions, vals); @@ -909,7 +920,7 @@ public void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObj try {json.getString("value");} catch (ClassCastException cce) { throw new JsonParseException("Invalid value submitted for " + dft.getName() + ". It should be a single value."); - } + } DatasetFieldValue datasetFieldValue = new DatasetFieldValue(); datasetFieldValue.setValue(json.getString("value", "").trim()); datasetFieldValue.setDatasetField(dsf); @@ -923,7 +934,7 @@ public void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObj dsf.setDatasetFieldValues(vals); } - + public Workflow parseWorkflow(JsonObject json) throws JsonParseException { Workflow retVal = new Workflow(); validate("", json, "name", ValueType.STRING); @@ -937,12 +948,12 @@ public Workflow parseWorkflow(JsonObject json) throws JsonParseException { retVal.setSteps(steps); return retVal; } - + public WorkflowStepData parseStepData( JsonObject json ) throws JsonParseException { WorkflowStepData wsd = new WorkflowStepData(); validate("step", json, "provider", ValueType.STRING); validate("step", json, "stepType", ValueType.STRING); - + wsd.setProviderId(json.getString("provider")); wsd.setStepType(json.getString("stepType")); if ( json.containsKey("parameters") ) { @@ -959,7 +970,7 @@ public WorkflowStepData parseStepData( JsonObject json ) throws JsonParseExcepti } return wsd; } - + private String jsonValueToString(JsonValue jv) { switch ( jv.getValueType() ) { case STRING: return ((JsonString)jv).getString(); @@ -1007,7 +1018,6 @@ public void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cv if (cvv == null) { if (allowHarvestingMissingCVV) { // we need to process this as a primitive value - logger.warning(">>>> Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'. Processing as primitive per setting override."); parsePrimitiveValue(dsf, cvvType , json); return; } else { @@ -1039,11 +1049,11 @@ Long parseLong(String str) throws NumberFormatException { int parsePrimitiveInt(String str, int defaultValue) { return str == null ? defaultValue : Integer.parseInt(str); } - + public String parseHarvestingClient(JsonObject obj, HarvestingClient harvestingClient) throws JsonParseException { - + String dataverseAlias = obj.getString("dataverseAlias",null); - + harvestingClient.setName(obj.getString("nickName",null)); harvestingClient.setHarvestStyle(obj.getString("style", "default")); harvestingClient.setHarvestingUrl(obj.getString("harvestUrl",null)); @@ -1080,7 +1090,7 @@ private List getCategories(JsonObject filemetadataJson, Datase } return dataFileCategories; } - + /** * Validate than a JSON object has a field of an expected type, or throw an * inforamtive exception. @@ -1088,12 +1098,29 @@ private List getCategories(JsonObject filemetadataJson, Datase * @param jobject * @param fieldName * @param expectedValueType - * @throws JsonParseException + * @throws JsonParseException */ private void validate(String objectName, JsonObject jobject, String fieldName, ValueType expectedValueType) throws JsonParseException { - if ( (!jobject.containsKey(fieldName)) + if ( (!jobject.containsKey(fieldName)) || (jobject.get(fieldName).getValueType()!=expectedValueType) ) { throw new JsonParseException( objectName + " missing a field named '"+fieldName+"' of type " + expectedValueType ); } } + + public UserDTO parseUserDTO(JsonObject jobj) throws JsonParseException { + UserDTO userDTO = new UserDTO(); + + userDTO.setUsername(jobj.getString("username", null)); + userDTO.setEmailAddress(jobj.getString("emailAddress", null)); + userDTO.setFirstName(jobj.getString("firstName", null)); + userDTO.setLastName(jobj.getString("lastName", null)); + userDTO.setAffiliation(jobj.getString("affiliation", null)); + userDTO.setPosition(jobj.getString("position", null)); + + if (!FeatureFlags.API_BEARER_AUTH_HANDLE_TOS_ACCEPTANCE_IN_IDP.enabled()) { + userDTO.setTermsAccepted(getMandatoryBoolean(jobj, "termsAccepted")); + } + + return userDTO; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 6666a7f0e7d..06ccd2769cd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -56,6 +56,7 @@ import jakarta.ejb.Singleton; import jakarta.json.JsonArray; import jakarta.json.JsonObject; +import java.util.function.Predicate; /** * Convert objects to Json. @@ -642,22 +643,31 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO .add("displayName", metadataBlock.getDisplayName()) .add("displayOnCreate", metadataBlock.isDisplayOnCreate()); - Set datasetFieldTypes; - - if (ownerDataverse != null) { - datasetFieldTypes = new TreeSet<>(datasetFieldService.findAllInMetadataBlockAndDataverse( - metadataBlock, ownerDataverse, printOnlyDisplayedOnCreateDatasetFieldTypes)); - } else { - datasetFieldTypes = printOnlyDisplayedOnCreateDatasetFieldTypes - ? new TreeSet<>(datasetFieldService.findAllDisplayedOnCreateInMetadataBlock(metadataBlock)) - : new TreeSet<>(metadataBlock.getDatasetFieldTypes()); - } - JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder(); - for (DatasetFieldType datasetFieldType : datasetFieldTypes) { - fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse)); + + Predicate isNoChild = element -> element.isChild() == false; + List childLessList = metadataBlock.getDatasetFieldTypes().stream().filter(isNoChild).toList(); + Set datasetFieldTypesNoChildSorted = new TreeSet<>(childLessList); + + for (DatasetFieldType datasetFieldType : datasetFieldTypesNoChildSorted) { + + Long datasetFieldTypeId = datasetFieldType.getId(); + boolean requiredAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(datasetFieldTypeId); + boolean includedAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeIncludedAsInputLevel(datasetFieldTypeId); + boolean isNotInputLevelInOwnerDataverse = ownerDataverse != null && !ownerDataverse.isDatasetFieldTypeInInputLevels(datasetFieldTypeId); + + DatasetFieldType parentDatasetFieldType = datasetFieldType.getParentDatasetFieldType(); + boolean isRequired = parentDatasetFieldType == null ? datasetFieldType.isRequired() : parentDatasetFieldType.isRequired(); + + boolean displayCondition = printOnlyDisplayedOnCreateDatasetFieldTypes + ? (datasetFieldType.isDisplayOnCreate() || isRequired || requiredAsInputLevelInOwnerDataverse) + : ownerDataverse == null || includedAsInputLevelInOwnerDataverse || isNotInputLevelInOwnerDataverse; + + if (displayCondition) { + fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse)); + } } - + jsonObjectBuilder.add("fields", fieldsBuilder); return jsonObjectBuilder; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java index ef8ab39122f..21360fcd708 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java @@ -85,7 +85,10 @@ public NullSafeJsonBuilder add(String name, boolean value) { delegate.add(name, value); return this; } - + public NullSafeJsonBuilder add(String name, Boolean value) { + return (value != null) ? add(name, value.booleanValue()) : this; + } + @Override public NullSafeJsonBuilder addNull(String name) { delegate.addNull(name); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 012b389ce32..2e1dbeae767 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -307,7 +307,13 @@ notification.typeDescription.WORKFLOW_FAILURE=External workflow run has failed notification.typeDescription.STATUSUPDATED=Status of dataset has been updated notification.typeDescription.DATASETCREATED=Dataset was created by user notification.typeDescription.DATASETMENTIONED=Dataset was referenced in remote system - +notification.typeDescription.GLOBUSUPLOADCOMPLETED=Globus upload is completed +notification.typeDescription.GLOBUSUPLOADCOMPLETEDWITHERRORS=Globus upload completed with errors +notification.typeDescription.GLOBUSDOWNLOADCOMPLETED=Globus download is completed +notification.typeDescription.GLOBUSDOWNLOADCOMPLETEDWITHERRORS=Globus download completed with errors +notification.typeDescription.GLOBUSUPLOADLOCALFAILURE=Globus upload failed, internal error +notification.typeDescription.GLOBUSUPLOADREMOTEFAILURE=Globus upload failed, remote transfer error +notification.typeDescription.REQUESTEDFILEACCESS=File access requested groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. user.message.signup.label=Create Account user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. @@ -837,7 +843,8 @@ notification.email.datasetWasMentioned=Hello {0},

    The {1} has just been notification.email.datasetWasMentioned.subject={0}: A Dataset Relationship has been reported! notification.email.globus.uploadCompleted.subject={0}: Files uploaded successfully via Globus and verified notification.email.globus.downloadCompleted.subject={0}: Files downloaded successfully via Globus -notification.email.globus.uploadCompletedWithErrors.subject={0}: Uploaded files via Globus with errors +notification.email.globus.downloadCompletedWithErrors.subject={0}: Globus download task completed, errors encountered +notification.email.globus.uploadCompletedWithErrors.subject={0}: Globus upload task completed with errors notification.email.globus.uploadFailedRemotely.subject={0}: Failed to upload files via Globus notification.email.globus.uploadFailedLocally.subject={0}: Failed to add files uploaded via Globus to dataset # dataverse.xhtml @@ -1457,7 +1464,7 @@ dataset.editBtn.itemLabel.metadata=Metadata dataset.editBtn.itemLabel.terms=Terms dataset.editBtn.itemLabel.permissions=Permissions dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets -dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.privateUrl=Preview URL dataset.editBtn.itemLabel.permissionsDataset=Dataset dataset.editBtn.itemLabel.permissionsFile=Restricted Files dataset.editBtn.itemLabel.deleteDataset=Delete Dataset @@ -1669,6 +1676,8 @@ dataset.message.createFailure=The dataset could not be created. dataset.message.termsFailure=The dataset terms could not be updated. dataset.message.label.fileAccess=Publicly-accessible storage dataset.message.publicInstall=Files in this dataset may be readable outside Dataverse, restricted and embargoed access are disabled +dataset.message.parallelUpdateError=Changes cannot be saved. This dataset has been edited since this page was opened. To continue, copy your changes, refresh the page to see the recent updates, and re-enter any changes you want to save. +dataset.message.parallelPublishError=Publishing is blocked. This dataset has been edited since this page was opened. To publish it, refresh the page to see the recent updates, and publish again. dataset.metadata.publicationDate=Publication Date dataset.metadata.publicationDate.tip=The publication date of a Dataset. dataset.metadata.citationDate=Citation Date @@ -1721,23 +1730,34 @@ dataset.transferUnrestricted=Click Continue to transfer the elligible files. dataset.requestAccessToRestrictedFiles=You may request access to any restricted file(s) by clicking the Request Access button. dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button. dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} -dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. -dataset.privateurl.header=Unpublished Dataset Private URL -dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. -dataset.privateurl.absent=Private URL has not been created. -dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.infoMessageReviewer=You are viewing a preview of this unpublished dataset version. +dataset.privateurl.header=Unpublished Dataset Preview URL +dataset.privateurl.tip=To cite this data in publications, use the dataset's persistent ID instead of this URL. For more information about the Preview URL feature, please refer to the User Guide. +dataset.privateurl.onlyone=Only one Preview URL can be active for a single dataset. +dataset.privateurl.absent=Preview URL has not been created. +dataset.privateurl.general.button.label=Create General Preview URL +dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. They will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. +dataset.privateurl.general.title=General Preview +dataset.privateurl.anonymous.title=Anonymous Preview +dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL +dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. +dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and users of the Anonymous Preview URL will be able to access them. Users of the Anonymous Preview URL will not be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository, which might expose the dataset authors' identities. +dataset.privateurl.createPrivateUrl=Create Preview URL +dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published or if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access -dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published -dataset.privateurl.disablePrivateUrl=Disable Private URL -dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL -dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. -dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. -dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.createPrivateUrl.anonymized.unavailable=You won't be able to create an Anonymous Preview URL once a version of this dataset has been published. +dataset.privateurl.disableGeneralPreviewUrl=Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrl=Disable Anonymous Preview URL +dataset.privateurl.disableGeneralPreviewUrlConfirm=Yes, Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrlConfirm=Yes, Disable Anonymous Preview URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Preview URL? If you have shared the Preview URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Preview URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Preview URL Enabled dataset.privateurl.createdSuccess=Success! -dataset.privateurl.full=This Private URL provides full read access to the dataset -dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset -dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. -dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +dataset.privateurl.full=This Preview URL provides full read access to the dataset +dataset.privateurl.anonymized=This Preview URL provides access to the anonymized dataset +dataset.privateurl.disabledSuccess=You have successfully disabled the Preview URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Preview URL you must have the following permissions: {0}. dataset.externalstatus.header=Curation Status Changed dataset.externalstatus.removed=Curation Status Removed dataset.externalstatus.info=Curation Status is now "{0}" @@ -2498,6 +2518,7 @@ dataset.version.file.changed=Files (Changed File Metadata: {0} dataset.version.file.changed2=; Changed File Metadata: {0} dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0} dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0} +dataset.version.compare.incorrect.order=Compare requires the older dataset version to be listed first. #DataversePage.java dataverse.item.required=Required @@ -2719,8 +2740,8 @@ datasets.api.grant.role.assignee.has.role.error=User already has this role for t datasets.api.revoke.role.not.found.error="Role assignment {0} not found" datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2} datasets.api.privateurl.error.datasetnotfound=Could not find dataset. -datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset. -datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft. +datasets.api.privateurl.error.alreadyexists=Preview URL already exists for this dataset. +datasets.api.privateurl.error.notdraft=Can't create Preview URL because the latest version of this dataset is not a draft. datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published. datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date @@ -3062,3 +3083,27 @@ openapi.exception.invalid.format=Invalid format {0}, currently supported formats openapi.exception=Supported format definition not found. openapi.exception.unaligned=Unaligned parameters on Headers [{0}] and Request [{1}] +#Users.java +users.api.errors.bearerAuthFeatureFlagDisabled=This endpoint is only available when bearer authentication feature flag is enabled. +users.api.errors.bearerTokenRequired=Bearer token required. +users.api.errors.jsonParseToUserDTO=Error parsing the POSTed User json: {0} +users.api.userRegistered=User registered. + +#RegisterOidcUserCommand.java +registerOidcUserCommand.errors.userAlreadyRegisteredWithToken=User is already registered with this token. +registerOidcUserCommand.errors.invalidFields=The provided fields are invalid for registering a new user. +registerOidcUserCommand.errors.userShouldAcceptTerms=Terms should be accepted. +registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider=Unable to set {0} because it conflicts with an existing claim from the OIDC identity provider. +registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired=It is required to include the field {0} in the request JSON for registering the user. +registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON=Unable to set field {0} via JSON because the api-bearer-auth-provide-missing-claims feature flag is disabled. +registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired=The OIDC identity provider does not provide the user claim {0}, which is required for user registration. Please contact an administrator. +registerOidcUserCommand.errors.emailAddressInUse=Email already in use. +registerOidcUserCommand.errors.usernameInUse=Username already in use. + +#BearerTokenAuthMechanism.java +bearerTokenAuthMechanism.errors.tokenValidatedButNoRegisteredUser=Bearer token is validated, but there is no linked user account. + +#AuthenticationServiceBean.java +authenticationServiceBean.errors.unauthorizedBearerToken=Unauthorized bearer token. +authenticationServiceBean.errors.invalidBearerToken=Could not parse bearer token. +authenticationServiceBean.errors.bearerTokenDetectedNoOIDCProviderConfigured=Bearer token detected, no OIDC provider configured. diff --git a/src/main/resources/db/migration/V6.4.0.2.sql b/src/main/resources/db/migration/V6.4.0.2.sql new file mode 100644 index 00000000000..bc4a85b278f --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.2.sql @@ -0,0 +1,2 @@ +-- #10118 +ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text; diff --git a/src/main/resources/db/migration/V6.4.0.3.sql b/src/main/resources/db/migration/V6.4.0.3.sql index 792996414b4..307d8ed206c 100644 --- a/src/main/resources/db/migration/V6.4.0.3.sql +++ b/src/main/resources/db/migration/V6.4.0.3.sql @@ -1,4 +1,2 @@ --- Add these boolean flags to accommodate new harvesting client features +-- Add this boolean flag to accommodate a new harvesting client feature ALTER TABLE harvestingclient ADD COLUMN IF NOT EXISTS useOaiIdAsPid BOOLEAN DEFAULT FALSE; -ALTER TABLE harvestingclient ADD COLUMN IF NOT EXISTS useListRecords BOOLEAN DEFAULT FALSE; -ALTER TABLE harvestingclient ALTER COLUMN harvestingSet TYPE TEXT; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 6de0f00e94e..9426884d349 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -20,6 +20,7 @@ + - + @@ -834,7 +835,6 @@ /> @@ -995,7 +995,6 @@

    +

    #{bundle['dataset.privateurl.introduction']}

    +

    +

    +

    #{bundle['dataset.privateurl.onlyone']}

    +

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.general.description']}

    + + +
    +
    + +

    + #{privateUrlLink} +

    +
    +
    +
    + + +
    +
    + +

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.anonymous.description']}

    +

    #{bundle['dataset.privateurl.anonymous.description.paragraph.two']} #{bundle['dataset.privateurl.createPrivateUrl.anonymized.unavailable']}.

    + + + +

    + +
    +
    + +

    + #{privateUrlLink} +

    +
    +
    +
    + + +
    +
    +

    #{bundle['dataset.privateurl.absent']}

    @@ -1200,17 +1273,11 @@

    -
    - - - - - -
    +

    #{bundle['dataset.privateurl.cannotCreate']}

    @@ -1224,7 +1291,10 @@

    #{bundle['dataset.privateurl.disableConfirmationText']}

    - + + + + diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 723f95148cd..f8367ce01f8 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -130,7 +130,7 @@ - +
    -
    +
    #{bundle['mydataFragment.resultsByUserName']} - +
    @@ -150,4 +150,4 @@
    - \ No newline at end of file + diff --git a/src/main/webapp/previewurl.xhtml b/src/main/webapp/previewurl.xhtml new file mode 100644 index 00000000000..980d775506b --- /dev/null +++ b/src/main/webapp/previewurl.xhtml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/webapp/resources/iqbs/messages.xhtml b/src/main/webapp/resources/iqbs/messages.xhtml index bd17cf34d21..f8e1f5e8e9d 100644 --- a/src/main/webapp/resources/iqbs/messages.xhtml +++ b/src/main/webapp/resources/iqbs/messages.xhtml @@ -63,7 +63,7 @@ Server: - #{systemConfig.dataverseServer} + #{systemConfig.dataverseSiteUrl} #{msg.rendered()} diff --git a/src/main/webapp/resources/js/mydata.js b/src/main/webapp/resources/js/mydata.js index 899ba6637e2..c731d6772ac 100644 --- a/src/main/webapp/resources/js/mydata.js +++ b/src/main/webapp/resources/js/mydata.js @@ -391,7 +391,7 @@ function submit_my_data_search(){ // -------------------------------- // ah, but with the horribly coded xhtml page, we can't use form tags... //var formData = $('#mydata_filter_form').serialize(); - var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column2 :input").serialize() ; + var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column3 :input").serialize()+ '&' + $("#my_data_filter_column2 :input").serialize() ; // For debugging, show the search params if (MYDATA_DEBUG_ON){ diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index 505fe681363..fc224443a8e 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -582,7 +582,7 @@ - +
    diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java index a235c9b0061..588bf5294e5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java @@ -63,17 +63,17 @@ public void testIsSanitizeHtml() { //if textbox then sanitize - allow tags instance.setFieldType(DatasetFieldType.FieldType.TEXTBOX); result = instance.isSanitizeHtml(); - assertEquals(true, result); + assertTrue(result); //if textbox then don't sanitize - allow tags instance.setFieldType(DatasetFieldType.FieldType.EMAIL); result = instance.isSanitizeHtml(); - assertEquals(false, result); + assertFalse(result); //URL, too instance.setFieldType(DatasetFieldType.FieldType.URL); result = instance.isSanitizeHtml(); - assertEquals(true, result); + assertTrue(result); } @Test @@ -102,7 +102,7 @@ public void testIsEscapeOutputText(){ //URL, too instance.setFieldType(DatasetFieldType.FieldType.URL); result = instance.isEscapeOutputText(); - assertEquals(false, result); + assertFalse(result); } @@ -121,7 +121,7 @@ public void testGetSolrField(){ parent.setAllowMultiples(true); instance.setParentDatasetFieldType(parent); solrField = instance.getSolrField(); - assertEquals(true, solrField.isAllowedToBeMultivalued()); + assertTrue(solrField.isAllowedToBeMultivalued()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java index 3f85acc1f87..b753f534c6b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java @@ -52,63 +52,63 @@ public void testIsValid() { //Make string too long - should fail. value.setValue("asdfgX"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Make string too long - should fail. value.setValue("asdf"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Now lets try Dates dft.setFieldType(DatasetFieldType.FieldType.DATE); dft.setValidationFormat(null); value.setValue("1999AD"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("44BCE"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("2004-10-27"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("2002-08"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("[1999?]"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("Blergh"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Float dft.setFieldType(DatasetFieldType.FieldType.FLOAT); value.setValue("44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("44 1/2"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Integer dft.setFieldType(DatasetFieldType.FieldType.INT); value.setValue("44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("-44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("12.14"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java new file mode 100644 index 00000000000..0ba8dde8aa0 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java @@ -0,0 +1,460 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.branding.BrandingUtilTest; +import edu.harvard.iq.dataverse.datavariable.VariableMetadata; +import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; +import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; +import edu.harvard.iq.dataverse.util.BundleUtil; + +import java.net.URI; +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.logging.Logger; + +import static org.assertj.core.util.DateUtil.now; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import edu.harvard.iq.dataverse.util.json.JsonUtil; +import io.restassured.path.json.JsonPath; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; + +public class DatasetVersionDifferenceTest { + + private static final Logger logger = Logger.getLogger(DatasetVersion.class.getCanonicalName()); + + private static List addedFiles; + private static List removedFiles; + private static List changedFileMetadata; + private static List changedVariableMetadata; + private static List replacedFiles; + private static Long fileId = Long.valueOf(0); + + @BeforeAll + public static void setUp() { + BrandingUtilTest.setupMocks(); + } + + @AfterAll + public static void tearDown() { + BrandingUtilTest.setupMocks(); + } + + @Test + public void testDifferencing() { + Dataset dataset = new Dataset(); + License license = new License("CC0 1.0", + "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", + URI.create("http://creativecommons.org/publicdomain/zero/1.0"), URI.create("/resources/images/cc0.png"), + true, 1l); + license.setDefault(true); + dataset.setProtocol("doi"); + dataset.setAuthority("10.5072/FK2"); + dataset.setIdentifier("LK0D1H"); + DatasetVersion datasetVersion = new DatasetVersion(); + datasetVersion.setDataset(dataset); + datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + datasetVersion.setVersionNumber(1L); + datasetVersion.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + DatasetVersion datasetVersion2 = new DatasetVersion(); + datasetVersion2.setDataset(dataset); + datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT); + + // Published version's two files + DataFile dataFile = new DataFile(); + dataFile.setId(1L); + DataFile dataFile2 = new DataFile(); + dataFile2.setId(2L); + + FileMetadata fileMetadata1 = createFileMetadata(10L, datasetVersion, dataFile, "file1.txt"); + fileMetadata1.setLabel("file1.txt"); + + FileMetadata fileMetadata2 = createFileMetadata(20L, datasetVersion, dataFile2, "file2.txt"); + + // Draft version - same two files with one label change + FileMetadata fileMetadata3 = fileMetadata1.createCopy(); + fileMetadata3.setId(30L); + + FileMetadata fileMetadata4 = fileMetadata2.createCopy(); + fileMetadata4.setLabel("file3.txt"); + fileMetadata4.setId(40L); + + List fileMetadatas = new ArrayList<>(Arrays.asList(fileMetadata1, fileMetadata2)); + datasetVersion.setFileMetadatas(fileMetadatas); + List fileMetadatas2 = new ArrayList<>(Arrays.asList(fileMetadata3, fileMetadata4)); + datasetVersion2.setFileMetadatas(fileMetadatas2); + + SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd"); + Date publicationDate; + try { + publicationDate = dateFmt.parse("19551105"); + datasetVersion.setReleaseTime(publicationDate); + dataset.setPublicationDate(new Timestamp(publicationDate.getTime())); + } catch (ParseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + List versionList = new ArrayList<>(Arrays.asList(datasetVersion, datasetVersion2)); + dataset.setVersions(versionList); + + // One file has a changed label + List expectedAddedFiles = new ArrayList<>(); + List expectedRemovedFiles = new ArrayList<>(); + ; + List expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4); + List expectedChangedVariableMetadata = new ArrayList<>(); + List expectedReplacedFiles = new ArrayList<>(); + List changedTerms = new ArrayList<>(); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // change label for first file as well + fileMetadata3.setLabel("file1_updated.txt"); + expectedChangedFileMetadata = Arrays.asList(fileMetadata1, fileMetadata3, fileMetadata2, fileMetadata4); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // Add one change to variable metadata + fileMetadata3.setVariableMetadatas(Arrays.asList(new VariableMetadata())); + expectedChangedVariableMetadata = Arrays.asList(fileMetadata1, fileMetadata3); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // Replaced File + DataFile replacingFile = new DataFile(); + replacingFile.setId(3L); + replacingFile.setPreviousDataFileId(1L); + fileMetadata3.setDataFile(replacingFile); + expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4); + expectedChangedVariableMetadata = new ArrayList<>(); + + FileMetadata[] filePair = new FileMetadata[2]; + filePair[0] = fileMetadata1; + filePair[1] = fileMetadata3; + expectedReplacedFiles = new ArrayList<>(); + expectedReplacedFiles.add(filePair); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Add a new file + DataFile newFile = new DataFile(); + newFile.setId(3L); + FileMetadata fileMetadata5 = createFileMetadata(50L, datasetVersion2, newFile, "newFile.txt"); + datasetVersion2.getFileMetadatas().add(fileMetadata5); + expectedAddedFiles = Arrays.asList(fileMetadata5); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Remove a file + datasetVersion2.getFileMetadatas().remove(fileMetadata4); + expectedRemovedFiles = Arrays.asList(fileMetadata2); + expectedChangedFileMetadata = new ArrayList<>(); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set the published version's TermsOfUseAndAccess to a non-null value + TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess(); + datasetVersion.setTermsOfUseAndAccess(termsOfUseAndAccess); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set the draft version's TermsOfUseAndAccess to a non-null value + + datasetVersion2.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set a term field + + datasetVersion2.getTermsOfUseAndAccess().setTermsOfUse("Terms o' Use"); + String[] termField = new String[] { + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"), "", "Terms o' Use" }; + changedTerms.add(termField); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set a term field in the original version + + datasetVersion.getTermsOfUseAndAccess().setDisclaimer("Not our fault"); + String[] termField2 = new String[] { + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"), + "Not our fault", "" }; + changedTerms.add(termField2); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + } + + private FileMetadata createFileMetadata(long id, DatasetVersion datasetVersion, DataFile dataFile, String label) { + FileMetadata fileMetadata = new FileMetadata(); + fileMetadata.setId(id); + fileMetadata.setDatasetVersion(datasetVersion); + fileMetadata.setDataFile(dataFile); + fileMetadata.setLabel(label); + fileMetadata.setCategories(new ArrayList<>()); + return fileMetadata; + } + + /** + * CompareResults is currently testing the output of the + * DatasetVersionDifference class with the manually created expected results + * included as parameters and with the results of the less efficient algorithm + * it is replacing. Once we're collectively convinced that the tests here are + * correct (i.e. the manually created expected* parameters are set correctly for + * each use case), we could drop running the originalCalculateDifference method + * and just compare with the expected* results. + * + * @param changedTerms + */ + private void compareResults(DatasetVersion datasetVersion, DatasetVersion datasetVersion2, + List expectedAddedFiles, List expectedRemovedFiles, + List expectedChangedFileMetadata, List expectedChangedVariableMetadata, + List expectedReplacedFiles, List changedTerms) { + DatasetVersionDifference diff = new DatasetVersionDifference(datasetVersion2, datasetVersion); + // Run the original algorithm + originalCalculateDifference(datasetVersion2, datasetVersion); + // Compare the old and new algorithms + assertEquals(addedFiles, diff.getAddedFiles()); + assertEquals(removedFiles, diff.getRemovedFiles()); + assertEquals(changedFileMetadata, diff.getChangedFileMetadata()); + assertEquals(changedVariableMetadata, diff.getChangedVariableMetadata()); + assertEquals(replacedFiles.size(), diff.getReplacedFiles().size()); + for (int i = 0; i < replacedFiles.size(); i++) { + assertEquals(replacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); + assertEquals(replacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]); + } + + // Also compare the new algorithm with the manually created expected* values for + // the test cases + assertEquals(expectedAddedFiles, diff.getAddedFiles()); + assertEquals(expectedRemovedFiles, diff.getRemovedFiles()); + assertEquals(expectedChangedFileMetadata, diff.getChangedFileMetadata()); + assertEquals(expectedChangedVariableMetadata, diff.getChangedVariableMetadata()); + assertEquals(expectedReplacedFiles.size(), diff.getReplacedFiles().size()); + for (int i = 0; i < expectedReplacedFiles.size(); i++) { + assertEquals(expectedReplacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); + assertEquals(expectedReplacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]); + } + + assertEquals(changedTerms.size(), diff.getChangedTermsAccess().size()); + for (int i = 0; i < changedTerms.size(); i++) { + String[] diffArray = diff.getChangedTermsAccess().get(i); + assertEquals(changedTerms.get(i)[0], diffArray[0]); + assertEquals(changedTerms.get(i)[1], diffArray[1]); + assertEquals(changedTerms.get(i)[2], diffArray[2]); + } + } + + @Deprecated + // This is the "Original" difference calculation from DatasetVersionDifference + // It is included here to help verify that the new implementation is the same as + // the original + private static void originalCalculateDifference(DatasetVersion newVersion, DatasetVersion originalVersion) { + + addedFiles = new ArrayList<>(); + removedFiles = new ArrayList<>(); + changedFileMetadata = new ArrayList<>(); + changedVariableMetadata = new ArrayList<>(); + replacedFiles = new ArrayList<>(); + long startTime = System.currentTimeMillis(); + // TODO: ? + // It looks like we are going through the filemetadatas in both versions, + // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of + // changed, deleted and added files between the 2 versions... But why + // are we doing it, if we are doing virtually the same thing inside + // the initDatasetFilesDifferenceList(), below - but in a more efficient + // way (sorting both lists, then goint through them in parallel, at the + // cost of (N+M) max.? + // -- 4.6 Nov. 2016 + + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + boolean deleted = true; + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + deleted = false; + if (!DatasetVersionDifference.compareFileMetadatas(fmdo, fmdn).isEmpty()) { + changedFileMetadata.add(fmdo); + changedFileMetadata.add(fmdn); + } + if (!VariableMetadataUtil.compareVariableMetadata(fmdo, fmdn) + || !DatasetVersionDifference.compareVarGroup(fmdo, fmdn)) { + changedVariableMetadata.add(fmdo); + changedVariableMetadata.add(fmdn); + } + break; + } + } + if (deleted) { + removedFiles.add(fmdo); + } + } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + boolean added = true; + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + added = false; + break; + } + } + if (added) { + addedFiles.add(fmdn); + } + } + + getReplacedFiles(); + logger.info("Difference Loop Execution time: " + (System.currentTimeMillis() - startTime) + " ms"); + + } + + @Deprecated + // This is used only in the original algorithm and was removed from + // DatasetVersionDifference + private static void getReplacedFiles() { + if (addedFiles.isEmpty() || removedFiles.isEmpty()) { + return; + } + List addedToReplaced = new ArrayList<>(); + List removedToReplaced = new ArrayList<>(); + for (FileMetadata added : addedFiles) { + DataFile addedDF = added.getDataFile(); + Long replacedId = addedDF.getPreviousDataFileId(); + if (added.getDataFile().getPreviousDataFileId() != null) { + } + for (FileMetadata removed : removedFiles) { + DataFile test = removed.getDataFile(); + if (test.getId().equals(replacedId)) { + addedToReplaced.add(added); + removedToReplaced.add(removed); + FileMetadata[] replacedArray = new FileMetadata[2]; + replacedArray[0] = removed; + replacedArray[1] = added; + replacedFiles.add(replacedArray); + } + } + } + if (addedToReplaced.isEmpty()) { + } else { + addedToReplaced.stream().forEach((delete) -> { + addedFiles.remove(delete); + }); + removedToReplaced.stream().forEach((delete) -> { + removedFiles.remove(delete); + }); + } + } + + @Test + public void testCompareVersionsAsJson() { + + Dataverse dv = new Dataverse(); + Dataset ds = new Dataset(); + ds.setOwner(dv); + ds.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null)); + + DatasetVersion dv1 = initDatasetVersion(0L, ds, DatasetVersion.VersionState.RELEASED); + DatasetVersion dv2 = initDatasetVersion(1L, ds, DatasetVersion.VersionState.DRAFT); + ds.setVersions(List.of(dv1, dv2)); + + TermsOfUseAndAccess toa = new TermsOfUseAndAccess(); + toa.setDisclaimer("disclaimer"); + dv2.setTermsOfUseAndAccess(toa); + DatasetField dsf = new DatasetField(); + dsf.setDatasetFieldType(new DatasetFieldType("Author", DatasetFieldType.FieldType.TEXT, true)); + MetadataBlock mb = new MetadataBlock(); + mb.setDisplayName("testMetadataBlock"); + dsf.getDatasetFieldType().setMetadataBlock(mb); + dsf.setSingleValue("TEST"); + dv2.getDatasetFields().add(dsf); + // modify file at index 0 + dv2.getFileMetadatas().get(0).setRestricted(!dv2.getFileMetadatas().get(2).isRestricted()); + + FileMetadata addedFile = initFile(dv2); // add a new file + FileMetadata removedFile = dv2.getFileMetadatas().get(1); // remove the second file + dv2.getFileMetadatas().remove(1); + FileMetadata replacedFile = dv2.getFileMetadatas().get(1); // the third file is now at index 1 since the second file was removed + FileMetadata replacementFile = initFile(dv2, replacedFile.getDataFile().getId()); // replace the third file with a new file + dv2.getFileMetadatas().remove(1); + + DatasetVersionDifference dvd = new DatasetVersionDifference(dv2, dv1); + + JsonObjectBuilder json = dvd.compareVersionsAsJson(); + JsonObject obj = json.build(); + System.out.println(JsonUtil.prettyPrint(obj)); + + JsonPath dataFile = JsonPath.from(JsonUtil.prettyPrint(obj)); + assertTrue("TEST".equalsIgnoreCase(dataFile.getString("metadataChanges[0].changed[0].newValue"))); + assertTrue(addedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesAdded[0].fileName"))); + assertTrue(removedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesRemoved[0].fileName"))); + assertTrue(replacedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].oldFile.fileName"))); + assertTrue(replacementFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].newFile.fileName"))); + assertTrue("true".equalsIgnoreCase(dataFile.getString("fileChanges[0].changed[0].newValue"))); + assertTrue("disclaimer".equalsIgnoreCase(dataFile.getString("TermsOfAccess.changed[0].newValue"))); + } + private DatasetVersion initDatasetVersion(Long id, Dataset ds, DatasetVersion.VersionState vs) { + DatasetVersion dv = new DatasetVersion(); + dv.setDataset(ds); + dv.setVersion(1L); + dv.setVersionState(vs); + dv.setMinorVersionNumber(0L); + if (vs == DatasetVersion.VersionState.RELEASED) { + dv.setVersionNumber(1L); + dv.setVersion(1L); + dv.setReleaseTime(now()); + } + dv.setId(id); + dv.setCreateTime(now()); + dv.setLastUpdateTime(now()); + dv.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + dv.setFileMetadatas(initFiles(dv)); + return dv; + } + private List initFiles(DatasetVersion dsv) { + List fileMetadatas = new ArrayList<>(); + fileId = 0L; + for (int i=0; i < 10; i++) { + FileMetadata fm = initFile(dsv); + fileMetadatas.add(fm); + } + return fileMetadatas; + } + private FileMetadata initFile(DatasetVersion dsv) { + return initFile(dsv, null); + } + private FileMetadata initFile(DatasetVersion dsv, Long prevId) { + Long id = fileId++; + FileMetadata fm = new FileMetadata(); + DataFile df = new DataFile(); + fm.setDatasetVersion(dsv); + DataTable dt = new DataTable(); + dt.setOriginalFileName("filename"+id+".txt"); + df.setId(id); + df.setDescription("Desc"+id); + df.setRestricted(false); + df.setFilesize(100 + id); + df.setChecksumType(DataFile.ChecksumType.MD5); + df.setChecksumValue("value"+id); + df.setDataTable(dt); + df.setOwner(dsv.getDataset()); + df.getFileMetadatas().add(fm); + df.setPreviousDataFileId(prevId); + fm.setId(id); + fm.setDataFile(df); + fm.setLabel("Label"+id); + fm.setDirectoryLabel("/myFilePath/"); + fm.setDescription("Desc"+id); + dsv.getFileMetadatas().add(fm); + return fm; + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java index 4cd6c4dfaa7..b36d8907472 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java @@ -80,7 +80,7 @@ public void testIsInReview() { DatasetVersion nonDraft = new DatasetVersion(); nonDraft.setVersionState(DatasetVersion.VersionState.RELEASED); - assertEquals(false, nonDraft.isInReview()); + assertFalse(nonDraft.isInReview()); ds.addLock(null); assertFalse(nonDraft.isInReview()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 6d7dd2eae29..94aece95861 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -16,6 +16,8 @@ import java.util.HashMap; import java.util.List; +import jakarta.json.Json; +import jakarta.json.JsonArray; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; @@ -26,13 +28,11 @@ import java.util.Map; import java.util.UUID; -import java.util.logging.Level; import java.util.logging.Logger; import static jakarta.ws.rs.core.Response.Status.*; +import static org.hamcrest.CoreMatchers.*; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.jupiter.api.Assertions.assertTrue; public class AdminIT { @@ -901,6 +901,50 @@ public void testDownloadTmpFile() throws IOException { .body("message", equalTo("Path must begin with '/tmp' but after normalization was '/etc/passwd'.")); } + @Test + public void testFindMissingFiles() { + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUserResponse); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + UtilIT.setSuperuserStatus(username, true); + + String dataverseAlias = ":root"; + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + + // Upload file + Response uploadResponse = UtilIT.uploadRandomFile(datasetPersistentId, apiToken); + uploadResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Audit files + Response resp = UtilIT.auditFiles(apiToken, null, 100L, null); + resp.prettyPrint(); + JsonArray emptyArray = Json.createArrayBuilder().build(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.lastId", equalTo(100)); + + // Audit files with invalid parameters + resp = UtilIT.auditFiles(apiToken, 100L, 0L, null); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo("ERROR")) + .body("message", equalTo("Invalid Parameters: lastId must be equal to or greater than firstId")); + + // Audit files with list of dataset identifiers parameter + resp = UtilIT.auditFiles(apiToken, 1L, null, "bad/id, " + datasetPersistentId); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.failures[0].datasetIdentifier", equalTo("bad/id")) + .body("data.failures[0].reason", equalTo("Not Found")); + } + private String createTestNonSuperuserApiToken() { Response createUserResponse = UtilIT.createRandomUser(); createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java index ca99960f240..5f00d34b276 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java @@ -1,15 +1,61 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.ControlledVocabularyValueServiceBean; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.ws.rs.core.Response; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import java.io.File; +import java.io.StringReader; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; +@ExtendWith(MockitoExtension.class) public class DatasetFieldServiceApiTest { + @Mock + private ActionLogServiceBean actionLogSvc; + + @Mock + private MetadataBlockServiceBean metadataBlockService; + + @Mock + private DataverseServiceBean dataverseService; + + @Mock + private DatasetFieldServiceBean datasetFieldService; + + @Mock + private ControlledVocabularyValueServiceBean controlledVocabularyValueService; + + private DatasetFieldServiceApi api; + + @BeforeEach + public void setup(){ + api = new DatasetFieldServiceApi(); + api.actionLogSvc = actionLogSvc; + api.metadataBlockService = metadataBlockService; + api.dataverseService = dataverseService; + api.datasetFieldService = datasetFieldService; + api.controlledVocabularyValueService = controlledVocabularyValueService; + } + @Test public void testArrayIndexOutOfBoundMessageBundle() { List arguments = new ArrayList<>(); @@ -59,4 +105,41 @@ public void testGetGeneralErrorMessage() { message ); } + + @Test + public void testGetGeneralErrorMessageEmptyHeader() { + DatasetFieldServiceApi api = new DatasetFieldServiceApi(); + String message = api.getGeneralErrorMessage(null, 5, "some error"); + assertEquals( + "Error parsing metadata block in unknown part, line #5: some error", + message + ); + } + + @Test + public void testLoadDatasetFieldsWhitespaceTrimming() { + + Path resourceDirectory = Paths.get("src/test/resources/tsv/whitespace-test.tsv"); + File testfile = new File(resourceDirectory.toFile().getAbsolutePath()); + JsonReader jsonReader; + try (Response response = api.loadDatasetFields(testfile)) { + assertEquals(200, response.getStatus()); + jsonReader = Json.createReader(new StringReader(response.getEntity().toString())); + } + JsonObject jsonObject = jsonReader.readObject(); + + final List metadataNames = jsonObject.getJsonObject("data").getJsonArray("added") + .getValuesAs(e -> e.asJsonObject().getString("name")); + assertThat(metadataNames).contains("whitespaceDemo") + .contains("whitespaceDemoOne") + .contains("whitespaceDemoTwo") + .contains("whitespaceDemoThree") + .contains("CV1") + .contains("CV2") + .contains("CV3"); + assertThat(metadataNames).doesNotContain(" whitespaceDemo") + .doesNotContain("whitespaceDemoOne ") + .doesNotContain("CV1 ") + .doesNotContain(" CV2"); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 93f1024ae7a..34afbb404f0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1676,7 +1676,7 @@ public void testPrivateUrl() { List assignments = with(roleAssignments.body().asString()).param("member", "member").getJsonObject("data.findAll { data -> data._roleAlias == member }"); assertEquals(1, assignments.size()); PrivateUrlUser privateUrlUser = new PrivateUrlUser(datasetId); - assertEquals("Private URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); + assertEquals("Preview URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); List assigneeShouldExistForPrivateUrlUser = with(roleAssignments.body().asString()).param("assigneeString", privateUrlUser.getIdentifier()).getJsonObject("data.findAll { data -> data.assignee == assigneeString }"); logger.info(assigneeShouldExistForPrivateUrlUser + " found for " + privateUrlUser.getIdentifier()); assertEquals(1, assigneeShouldExistForPrivateUrlUser.size()); @@ -1757,7 +1757,7 @@ public void testPrivateUrl() { Response privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, apiToken); privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.prettyPrint(); - assertEquals(false, privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier())); + assertFalse(privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier())); String newTitleAgain = "I am changing the title again"; Response draftCreatedAgainPostPub = UtilIT.updateDatasetTitleViaSword(dataset1PersistentId, newTitleAgain, apiToken); @@ -4242,7 +4242,7 @@ public void testCitationDate() throws IOException { .statusCode(OK.getStatusCode()) .body("data.message", is(expectedCitation)); - Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportDatasetAsDublinCore.prettyPrint(); exportDatasetAsDublinCore.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) @@ -4259,7 +4259,7 @@ public void testCitationDate() throws IOException { rexport.then().assertThat().statusCode(OK.getStatusCode()); String todayDate = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportPostClear.prettyPrint(); exportPostClear.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) @@ -5168,4 +5168,134 @@ public void testGetCanDownloadAtLeastOneFile() { Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, secondUserApiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } + + @Test + public void testCompareDatasetVersionsAPI() throws InterruptedException { + + Response createUser = UtilIT.createRandomUser(); + assertEquals(200, createUser.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + // used for all added files + JsonObjectBuilder json = Json.createObjectBuilder() + .add("description", "my description") + .add("directoryLabel", "/data/subdir1/") + .add("categories", Json.createArrayBuilder() + .add("Data") + ); + JsonObject jsonObj = json.build(); + String pathToFile = "src/main/webapp/resources/images/dataverse-icon-1200.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer modifyFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + pathToFile = "src/main/webapp/resources/images/dataverseproject_logo.jpg"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer deleteFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + + pathToFile = "src/main/webapp/resources/images/fav/favicon-16x16.png"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer replaceFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + + Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + assertEquals(200, publishDataset.getStatusCode()); + + // post publish update to create DRAFT version + String pathToJsonFilePostPub = "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; + Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken); + addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); + + // Test adding a file + pathToFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToFile, jsonObj, apiToken); + uploadTabularFileResponse.prettyPrint(); + uploadTabularFileResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer addedFileId = UtilIT.getDataFileIdFromResponse(uploadTabularFileResponse); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(String.valueOf(addedFileId), apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.prettyPrint(); + setFileTabularTagsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test removing a file + uploadResponse = UtilIT.deleteFile(deleteFileId, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(NO_CONTENT.getStatusCode()); + + // Test Replacing a file + Response replaceResponse = UtilIT.replaceFile(String.valueOf(replaceFileId), "src/main/webapp/resources/images/fav/favicon-32x32.png", jsonObj, apiToken); + replaceResponse.prettyPrint(); + replaceResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test modify by restricting the file + Response restrictResponse = UtilIT.restrictFile(modifyFileId.toString(), true, apiToken); + restrictResponse.prettyPrint(); + restrictResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Also test a terms of access change + String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}"; + Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true); + updateTerms.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":latest-published", ":draft", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("data.oldVersion.versionNumber", CoreMatchers.equalTo("1.0")) + .body("data.newVersion.versionNumber", CoreMatchers.equalTo("DRAFT")) + .body("data.metadataChanges[0].blockName", CoreMatchers.equalTo("Citation Metadata")) + .body("data.metadataChanges[0].changed[0].fieldName", CoreMatchers.equalTo("Author")) + .body("data.metadataChanges[0].changed[0].oldValue", CoreMatchers.containsString("Finch, Fiona; (Birds Inc.)")) + .body("data.metadataChanges[1].blockName", CoreMatchers.equalTo("Life Sciences Metadata")) + .body("data.metadataChanges[1].changed[0].fieldName", CoreMatchers.equalTo("Design Type")) + .body("data.metadataChanges[1].changed[0].oldValue", CoreMatchers.containsString("")) + .body("data.metadataChanges[1].changed[0].newValue", CoreMatchers.containsString("Parallel Group Design; Nested Case Control Design")) + .body("data.filesAdded[0].fileName", CoreMatchers.equalTo("test.tab")) + .body("data.filesAdded[0].filePath", CoreMatchers.equalTo("data/subdir1")) + .body("data.filesAdded[0].description", CoreMatchers.equalTo("my description")) + .body("data.filesAdded[0].tags[0]", CoreMatchers.equalTo("Survey")) + .body("data.filesRemoved[0].fileName", CoreMatchers.equalTo("dataverseproject_logo.jpg")) + .body("data.fileChanges[0].fileName", CoreMatchers.equalTo("dataverse-icon-1200.png")) + .body("data.fileChanges[0].changed[0].newValue", CoreMatchers.equalTo("true")) + .body("data.filesReplaced[0].oldFile.fileName", CoreMatchers.equalTo("favicon-16x16.png")) + .body("data.filesReplaced[0].newFile.fileName", CoreMatchers.equalTo("favicon-32x32.png")) + .body("data.TermsOfAccess", CoreMatchers.notNullValue()) + .statusCode(OK.getStatusCode()); + + compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":draft", ":latest-published", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order"))) + .statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 9567cf3910a..13c4c30190b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -927,7 +927,8 @@ public void testListMetadataBlocks() { .body("data.size()", equalTo(1)) .body("data[0].name", is("citation")) .body("data[0].fields.title.displayOnCreate", equalTo(true)) - .body("data[0].fields.size()", is(28)); + .body("data[0].fields.size()", is(10)) + .body("data[0].fields.author.childFields.size()", is(4)); Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -1007,17 +1008,23 @@ public void testListMetadataBlocks() { // Since the included property of notesText is set to false, we should retrieve the total number of fields minus one int citationMetadataBlockIndex = geospatialMetadataBlockIndex == 0 ? 1 : 0; listMetadataBlocksResponse.then().assertThat() - .body(String.format("data[%d].fields.size()", citationMetadataBlockIndex), equalTo(79)); + .body(String.format("data[%d].fields.size()", citationMetadataBlockIndex), equalTo(34)); // Since the included property of geographicCoverage is set to false, we should retrieve the total number of fields minus one listMetadataBlocksResponse.then().assertThat() - .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(10)); + .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(2)); + + listMetadataBlocksResponse = UtilIT.getMetadataBlock("geospatial"); - String actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex)); - String actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex)); - String actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex)); + String actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].name")); + String actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].childFields['country'].name")); + String actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].childFields['city'].name")); + + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.fields['geographicCoverage'].childFields.size()", equalTo(4)) + .body("data.fields['geographicBoundingBox'].childFields.size()", equalTo(4)); - assertNull(actualGeospatialMetadataField1); + assertNotNull(actualGeospatialMetadataField1); assertNotNull(actualGeospatialMetadataField2); assertNotNull(actualGeospatialMetadataField3); @@ -1040,21 +1047,21 @@ public void testListMetadataBlocks() { geospatialMetadataBlockIndex = actualMetadataBlockDisplayName2.equals("Geospatial Metadata") ? 1 : 0; listMetadataBlocksResponse.then().assertThat() - .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(1)); + .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(0)); - actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex)); - actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex)); - actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex)); +// actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex)); +// actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.childFields['country'].name", geospatialMetadataBlockIndex)); +// actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.childFields['city'].name", geospatialMetadataBlockIndex)); - assertNull(actualGeospatialMetadataField1); - assertNotNull(actualGeospatialMetadataField2); - assertNull(actualGeospatialMetadataField3); +// assertNull(actualGeospatialMetadataField1); +// assertNotNull(actualGeospatialMetadataField2); +// assertNull(actualGeospatialMetadataField3); citationMetadataBlockIndex = geospatialMetadataBlockIndex == 0 ? 1 : 0; // notesText has displayOnCreate=true but has include=false, so should not be retrieved String notesTextCitationMetadataField = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.notesText.name", citationMetadataBlockIndex)); - assertNull(notesTextCitationMetadataField); + assertNotNull(notesTextCitationMetadataField); // producerName is a conditionally required field, so should not be retrieved String producerNameCitationMetadataField = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.producerName.name", citationMetadataBlockIndex)); @@ -1083,6 +1090,16 @@ public void testListMetadataBlocks() { .body("data[0].displayName", equalTo("Citation Metadata")) .body("data[0].fields", not(equalTo(null))) .body("data.size()", equalTo(1)); + + // Checking child / parent logic + listMetadataBlocksResponse = UtilIT.getMetadataBlock("citation"); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.displayName", equalTo("Citation Metadata")) + .body("data.fields", not(equalTo(null))) + .body("data.fields.otherIdAgency", equalTo(null)) + .body("data.fields.otherId.childFields.size()", equalTo(2)); } @Test @@ -1379,6 +1396,48 @@ public void testUpdateDataverse() { Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // Update the dataverse without setting metadata blocks, facets, or input levels + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the metadata blocks are inherited from the parent + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].name", equalTo("citation")); + + // Assert that the facets are inherited from the parent + String[] rootFacetIds = new String[]{"authorName", "subject", "keywordValue", "dateOfDeposit"}; + listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]"); + String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]"); + String actualFacetName3 = listDataverseFacetsResponse.then().extract().path("data[2]"); + String actualFacetName4 = listDataverseFacetsResponse.then().extract().path("data[3]"); + assertThat(rootFacetIds, hasItemInArray(actualFacetName1)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName2)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName3)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName4)); + + // Assert that the dataverse should not have any input level + listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + listDataverseInputLevelsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); + // Should return error when the dataverse to edit does not exist updateDataverseResponse = UtilIT.updateDataverse( "unexistingDataverseAlias", diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index e3c26284d55..98107eca33a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2275,7 +2275,6 @@ public void testDeleteFile() { Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken); v1.prettyPrint(); v1.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("cc0.png")) .statusCode(OK.getStatusCode()); Map v1files1 = with(v1.body().asString()).param("fileToFind", "cc0.png") @@ -2290,7 +2289,6 @@ public void testDeleteFile() { Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) .statusCode(OK.getStatusCode()); Map v1files2 = with(postv1draft2.body().asString()).param("fileToFind", "orcid_16x16.png") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java index 5e436dd0e98..b198d2769a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java @@ -6,11 +6,14 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; import static jakarta.ws.rs.core.Response.Status.OK; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Paths; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; +import org.skyscreamer.jsonassert.JSONAssert; public class InfoIT { @@ -81,6 +84,22 @@ public void testGetZipDownloadLimit() { .body("data", notNullValue()); } + @Test + public void testGetExportFormats() throws IOException { + Response response = given().urlEncodingEnabled(false) + .get("/api/info/exportFormats"); + response.prettyPrint(); + response.then().assertThat().statusCode(OK.getStatusCode()); + + String actual = response.getBody().asString(); + String expected = + java.nio.file.Files.readString( + Paths.get("src/test/resources/json/export-formats.json"), + StandardCharsets.UTF_8); + JSONAssert.assertEquals(expected, actual, true); + + } + private void testSettingEndpoint(SettingsServiceBean.Key settingKey, String testSettingValue) { String endpoint = "/api/info/settings/" + settingKey; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index 6e7061961f0..3b0b56740eb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import io.restassured.RestAssured; + import io.restassured.response.Response; import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.BeforeAll; @@ -9,6 +10,7 @@ import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assumptions.assumeFalse; @@ -42,22 +44,27 @@ void testListMetadataBlocks() { // returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, true); - int expectedNumberOfMetadataFields = 80; + int expectedNumberOfMetadataFields = 35; + listMetadataBlocksResponse.prettyPrint(); listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) .body("data[0].fields.size()", equalTo(expectedNumberOfMetadataFields)) - .body("data.size()", equalTo(expectedDefaultNumberOfMetadataBlocks)); + .body("data.size()", equalTo(expectedDefaultNumberOfMetadataBlocks)) + .body("data[1].fields.geographicCoverage.childFields.size()", is(4)) + .body("data[0].fields.publication.childFields.size()", is(5)); // onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(true, true); - expectedNumberOfMetadataFields = 28; + listMetadataBlocksResponse.prettyPrint(); + expectedNumberOfMetadataFields = 10; listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) .body("data[0].fields.size()", equalTo(expectedNumberOfMetadataFields)) .body("data[0].displayName", equalTo("Citation Metadata")) - .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks)); + .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks)) + .body("data[0].fields.author.childFields.size()", is(4)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java index 90357596c25..08ebec31cd6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java @@ -22,7 +22,7 @@ public class SavedSearchIT { @BeforeAll public static void setUpClass() { - + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } @AfterAll @@ -53,81 +53,55 @@ public void testSavedSearches() { Integer datasetId2 = UtilIT.getDatasetIdFromResponse(createDatasetResponse2); // missing body - Response resp = RestAssured.given() - .contentType("application/json") - .post("/api/admin/savedsearches"); + Response resp = UtilIT.setSavedSearch(); resp.prettyPrint(); resp.then().assertThat() .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); // creatorId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // definitionPointId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // missing filter - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId)) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId)); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); // create a saved search as superuser : OK - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); @@ -136,8 +110,7 @@ public void testSavedSearches() { Integer createdSavedSearchId = path.getInt("data.id"); // get list as non superuser : OK - Response getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + Response getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); getListReponse.then().assertThat() .statusCode(OK.getStatusCode()); @@ -146,22 +119,19 @@ public void testSavedSearches() { List listBeforeDelete = path2.getList("data.savedSearches"); // makelinks/all as non superuser : OK - Response makelinksAll = RestAssured.given() - .put("/api/admin/savedsearches/makelinks/all"); + Response makelinksAll = UtilIT.setSavedSearchMakelinksAll(); makelinksAll.prettyPrint(); makelinksAll.then().assertThat() .statusCode(OK.getStatusCode()); //delete a saved search as non superuser : OK - Response deleteReponse = RestAssured.given() - .delete("/api/admin/savedsearches/" + createdSavedSearchId); + Response deleteReponse = UtilIT.deleteSavedSearchById(createdSavedSearchId); deleteReponse.prettyPrint(); deleteReponse.then().assertThat() .statusCode(OK.getStatusCode()); // check list count minus 1 - getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); JsonPath path3 = JsonPath.from(getListReponse.body().asString()); List listAfterDelete = path3.getList("data.savedSearches"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index b03c23cd1e2..c97762526b0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -4,6 +4,9 @@ import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + +import java.util.List; +import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.json.Json; @@ -29,6 +32,7 @@ import jakarta.json.JsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.*; +import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -175,6 +179,7 @@ public void testSearchCitation() { Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); createDatasetResponse.prettyPrint(); Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse); Response searchResponse = UtilIT.search("id:dataset_" + datasetId + "_draft", apiToken); searchResponse.prettyPrint(); @@ -185,20 +190,49 @@ public void testSearchCitation() { .body("data.items[0].citationHtml", Matchers.containsString("href")) .statusCode(200); - Response deleteDatasetResponse = UtilIT.deleteDatasetViaNativeApi(datasetId, apiToken); - deleteDatasetResponse.prettyPrint(); - deleteDatasetResponse.then().assertThat() + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadImage = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + uploadImage.prettyPrint(); + uploadImage.then().assertThat() + .statusCode(200); + + Response publishResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishResponse.prettyPrint(); + publishResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + publishResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishResponse.prettyPrint(); + publishResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); - deleteDataverseResponse.prettyPrint(); - deleteDataverseResponse.then().assertThat() + Response updateTitleResponseAuthor = UtilIT.updateDatasetTitleViaSword(datasetPersistentId, "New Title", apiToken); + updateTitleResponseAuthor.prettyPrint(); + updateTitleResponseAuthor.then().assertThat() .statusCode(OK.getStatusCode()); - Response deleteUserResponse = UtilIT.deleteUser(username); - deleteUserResponse.prettyPrint(); - assertEquals(200, deleteUserResponse.getStatusCode()); + // search descending will get the latest 100. + // This could fail if more than 100 get created between our update and the search. Highly unlikely + searchResponse = UtilIT.search("*&type=file&sort=date&order=desc&per_page=100&start=0&subtree=root" , apiToken); + searchResponse.prettyPrint(); + int i=0; + String parentCitation = ""; + String datasetName = ""; + // most likely ours is in index 0, but it's not a guaranty. + while (i < 100) { + String dataset_persistent_id = searchResponse.body().jsonPath().getString("data.items[" + i + "].dataset_persistent_id"); + if (datasetPersistentId.equalsIgnoreCase(dataset_persistent_id)) { + parentCitation = searchResponse.body().jsonPath().getString("data.items[" + i + "].dataset_citation"); + datasetName = searchResponse.body().jsonPath().getString("data.items[" + i + "].dataset_name"); + break; + } + i++; + } + // see https://github.com/IQSS/dataverse/issues/10735 + // was showing the citation of the draft version and not the released parent + assertFalse(parentCitation.contains("New Title")); + assertTrue(parentCitation.contains(datasetName)); + assertFalse(parentCitation.contains("DRAFT")); } @Test @@ -1284,7 +1318,7 @@ public static void cleanup() { } @Test - public void testSearchFilesAndUrlImages() { + public void testSearchFilesAndUrlImages() throws InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); String username = UtilIT.getUsernameFromResponse(createUser); @@ -1300,8 +1334,12 @@ public void testSearchFilesAndUrlImages() { System.out.println("id: " + datasetId); String datasetPid = JsonPath.from(createDatasetResponse.getBody().asString()).getString("data.persistentId"); System.out.println("datasetPid: " + datasetPid); - String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response logoResponse = UtilIT.uploadDatasetLogo(datasetPid, pathToFile, apiToken); + logoResponse.prettyPrint(); + logoResponse.then().assertThat() + .statusCode(200); + Response uploadImage = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); uploadImage.prettyPrint(); uploadImage.then().assertThat() @@ -1311,6 +1349,23 @@ public void testSearchFilesAndUrlImages() { uploadFile.prettyPrint(); uploadFile.then().assertThat() .statusCode(200); + pathToFile = "src/test/resources/tab/test.tab"; + String searchableUniqueId = "testtab"+ UUID.randomUUID().toString().substring(0, 8); // so the search only returns 1 file + JsonObjectBuilder json = Json.createObjectBuilder() + .add("description", searchableUniqueId) + .add("restrict", "true") + .add("categories", Json.createArrayBuilder().add("Data")); + Response uploadTabFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, json.build(), apiToken); + uploadTabFile.prettyPrint(); + uploadTabFile.then().assertThat() + .statusCode(200); + // Ensure tabular file is ingested + sleep(2000); + // Set tabular tags + String tabularFileId = uploadTabFile.getBody().jsonPath().getString("data.files[0].dataFile.id"); + List testTabularTags = List.of("Survey", "Genomics"); + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, testTabularTags); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); publishDataverse.prettyPrint(); @@ -1339,6 +1394,13 @@ public void testSearchFilesAndUrlImages() { .body("data.items[0].url", CoreMatchers.containsString("/dataverse/")) .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("image_url"))); + searchResp = UtilIT.search(datasetPid, apiToken); + searchResp.prettyPrint(); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.items[0].type", CoreMatchers.is("dataset")) + .body("data.items[0].image_url", CoreMatchers.containsString("/logo")); + searchResp = UtilIT.search("mydata", apiToken); searchResp.prettyPrint(); searchResp.then().assertThat() @@ -1346,5 +1408,78 @@ public void testSearchFilesAndUrlImages() { .body("data.items[0].type", CoreMatchers.is("file")) .body("data.items[0].url", CoreMatchers.containsString("/datafile/")) .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("image_url"))); + searchResp = UtilIT.search(searchableUniqueId, apiToken); + searchResp.prettyPrint(); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.items[0].type", CoreMatchers.is("file")) + .body("data.items[0].url", CoreMatchers.containsString("/datafile/")) + .body("data.items[0].variables", CoreMatchers.is(3)) + .body("data.items[0].observations", CoreMatchers.is(10)) + .body("data.items[0].restricted", CoreMatchers.is(true)) + .body("data.items[0].canDownloadFile", CoreMatchers.is(true)) + .body("data.items[0].tabularTags", CoreMatchers.hasItem("Genomics")) + .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("image_url"))); + } + + @Test + public void testShowTypeCounts() { + //Create 1 user and 1 Dataverse/Collection + Response createUser = UtilIT.createRandomUser(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String affiliation = "testAffiliation"; + + // test total_count_per_object_type is not included because the results are empty + Response searchResp = UtilIT.search(username, apiToken, "&show_type_counts=true"); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count_per_object_type", CoreMatchers.equalTo(null)); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken, affiliation); + assertEquals(201, createDataverseResponse.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // create 3 Datasets, each with 2 Datafiles + for (int i = 0; i < 3; i++) { + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse).toString(); + + // putting the dataverseAlias in the description of each file so the search q={dataverseAlias} will return dataverse, dataset, and files for this test only + String jsonAsString = "{\"description\":\"" + dataverseAlias + "\",\"directoryLabel\":\"data/subdir1\",\"categories\":[\"Data\"], \"restrict\":\"false\" }"; + + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadImage = UtilIT.uploadFileViaNative(datasetId, pathToFile, jsonAsString, apiToken); + uploadImage.then().assertThat() + .statusCode(200); + pathToFile = "src/main/webapp/resources/js/mydata.js"; + Response uploadFile = UtilIT.uploadFileViaNative(datasetId, pathToFile, jsonAsString, apiToken); + uploadFile.then().assertThat() + .statusCode(200); + + // This call forces a wait for dataset indexing to finish and gives time for file uploads to complete + UtilIT.search("id:dataset_" + datasetId, apiToken); + } + + // Test Search without show_type_counts + searchResp = UtilIT.search(dataverseAlias, apiToken); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count_per_object_type", CoreMatchers.equalTo(null)); + // Test Search with show_type_counts = FALSE + searchResp = UtilIT.search(dataverseAlias, apiToken, "&show_type_counts=false"); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count_per_object_type", CoreMatchers.equalTo(null)); + // Test Search with show_type_counts = TRUE + searchResp = UtilIT.search(dataverseAlias, apiToken, "&show_type_counts=true"); + searchResp.prettyPrint(); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count_per_object_type.Dataverses", CoreMatchers.is(1)) + .body("data.total_count_per_object_type.Datasets", CoreMatchers.is(3)) + .body("data.total_count_per_object_type.Files", CoreMatchers.is(6)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java index 518431bfa2d..709908ac6eb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java @@ -850,7 +850,7 @@ public void testDeleteFiles() { String citation = atomEntryDraftV2.body().xmlPath().getString("entry.bibliographicCitation"); logger.info("citation (should contain 'DRAFT'): " + citation); boolean draftStringFoundInCitation = citation.matches(".*DRAFT.*"); - assertEquals(true, draftStringFoundInCitation); + assertTrue(draftStringFoundInCitation); List oneFileLeftInV2Draft = statement3.getBody().xmlPath().getList("feed.entry.id"); logger.info("Number of files remaining in this post version 1 draft:" + oneFileLeftInV2Draft.size()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index ce3b8bf75ff..eb78a216626 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -1,31 +1,33 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.util.BundleUtil; import io.restassured.RestAssured; + import static io.restassured.RestAssured.given; + import io.restassured.http.ContentType; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import java.util.ArrayList; + import java.util.Arrays; import java.util.List; import java.util.UUID; + import jakarta.json.Json; import jakarta.json.JsonObjectBuilder; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; -import static jakarta.ws.rs.core.Response.Status.OK; -import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; + +import static jakarta.ws.rs.core.Response.Status.*; +import static org.hamcrest.Matchers.notNullValue; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertTrue; import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; public class UsersIT { @@ -515,6 +517,177 @@ public void testDeleteAuthenticatedUser() { } + @Test + // This test is disabled because it is only compatible with the containerized development environment and would cause the Jenkins job to fail. + @Disabled + public void testRegisterOIDCUser() { + // Set Up - Get the admin access token from the OIDC provider + Response adminOidcLoginResponse = UtilIT.performKeycloakROPCLogin("admin", "admin"); + adminOidcLoginResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("access_token", notNullValue()); + String adminOidcAccessToken = adminOidcLoginResponse.jsonPath().getString("access_token"); + + // Set Up - Create random user in the OIDC provider without some necessary claims (email, firstName and lastName) + String randomUsername = UUID.randomUUID().toString().substring(0, 8); + + String newKeycloakUserWithoutClaimsJson = "{" + + "\"username\":\"" + randomUsername + "\"," + + "\"enabled\":true," + + "\"credentials\":[" + + " {" + + " \"type\":\"password\"," + + " \"value\":\"password\"," + + " \"temporary\":false" + + " }" + + "]" + + "}"; + + Response createKeycloakOidcUserResponse = UtilIT.createKeycloakUser(adminOidcAccessToken, newKeycloakUserWithoutClaimsJson); + createKeycloakOidcUserResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + Response newUserOidcLoginResponse = UtilIT.performKeycloakROPCLogin(randomUsername, "password"); + String userWithoutClaimsAccessToken = newUserOidcLoginResponse.jsonPath().getString("access_token"); + + // Set Up - Create a second random user in the OIDC provider with all necessary claims (including email, firstName and lastName) + randomUsername = UUID.randomUUID().toString().substring(0, 8); + String email = randomUsername + "@dataverse.org"; + String firstName = "John"; + String lastName = "Doe"; + + String newKeycloakUserWithClaimsJson = "{" + + "\"username\":\"" + randomUsername + "\"," + + "\"enabled\":true," + + "\"email\":\"" + email + "\"," + + "\"firstName\":\"" + firstName + "\"," + + "\"lastName\":\"" + lastName + "\"," + + "\"credentials\":[" + + " {" + + " \"type\":\"password\"," + + " \"value\":\"password\"," + + " \"temporary\":false" + + " }" + + "]" + + "}"; + + Response createKeycloakOidcUserWithClaimsResponse = UtilIT.createKeycloakUser(adminOidcAccessToken, newKeycloakUserWithClaimsJson); + createKeycloakOidcUserWithClaimsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + Response newUserWithClaimsOidcLoginResponse = UtilIT.performKeycloakROPCLogin(randomUsername, "password"); + String userWithClaimsAccessToken = newUserWithClaimsOidcLoginResponse.jsonPath().getString("access_token"); + + // Should return error when empty token is passed + Response registerOidcUserResponse = UtilIT.registerOidcUser( + "{}", + "" + ); + registerOidcUserResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("users.api.errors.bearerTokenRequired"))); + + // Should return error when a malformed User JSON is sent + registerOidcUserResponse = UtilIT.registerOidcUser( + "{{{user:abcde}", + "Bearer testBearerToken" + ); + registerOidcUserResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Error parsing the POSTed User json: Invalid token=CURLYOPEN at (line no=1, column no=2, offset=1). Expected tokens are: [STRING]")); + + // Should return error when the provided User JSON is valid but the provided Bearer token is invalid + registerOidcUserResponse = UtilIT.registerOidcUser( + "{" + + "\"termsAccepted\":true" + + "}", + "Bearer testBearerToken" + ); + registerOidcUserResponse.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()) + .body("message", equalTo("Unauthorized bearer token.")); + + // Should return an error when the termsAccepted field is missing in the User JSON + registerOidcUserResponse = UtilIT.registerOidcUser( + "{" + + "\"affiliation\":\"YourAffiliation\"," + + "\"position\":\"YourPosition\"" + + "}", + "Bearer testBearerToken" + ); + registerOidcUserResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Error parsing the POSTed User json: Field 'termsAccepted' is mandatory")); + + // Should return an error when the Bearer token is valid but required claims are missing in the IdP, needing completion from the request JSON + registerOidcUserResponse = UtilIT.registerOidcUser( + "{" + + "\"termsAccepted\":true" + + "}", + "Bearer " + userWithoutClaimsAccessToken + ); + registerOidcUserResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.invalidFields"))) + .body("fieldErrors.firstName", equalTo(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired", List.of("firstName")))) + .body("fieldErrors.lastName", equalTo(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired", List.of("lastName")))) + .body("fieldErrors.emailAddress", equalTo(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired", List.of("emailAddress")))); + + // Should register user when the Bearer token is valid and the provided User JSON contains the missing claims in the IdP + registerOidcUserResponse = UtilIT.registerOidcUser( + "{" + + "\"firstName\":\"testFirstName\"," + + "\"lastName\":\"testLastName\"," + + "\"emailAddress\":\"" + UUID.randomUUID().toString().substring(0, 8) + "@dataverse.org\"," + + "\"termsAccepted\":true" + + "}", + "Bearer " + userWithoutClaimsAccessToken + ); + registerOidcUserResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("User registered.")); + + // Should return error when attempting to re-register with the same Bearer token but different User data + String newUserJson = "{" + + "\"firstName\":\"newFirstName\"," + + "\"lastName\":\"newLastName\"," + + "\"emailAddress\":\"newEmail@dataverse.com\"," + + "\"termsAccepted\":true" + + "}"; + registerOidcUserResponse = UtilIT.registerOidcUser( + newUserJson, + "Bearer " + userWithoutClaimsAccessToken + ); + registerOidcUserResponse.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("User is already registered with this token.")); + + // Should return an error when the Bearer token is valid and attempting to set JSON properties that conflict with existing claims in the IdP + registerOidcUserResponse = UtilIT.registerOidcUser( + "{" + + "\"firstName\":\"testFirstName\"," + + "\"lastName\":\"testLastName\"," + + "\"emailAddress\":\"" + UUID.randomUUID().toString().substring(0, 8) + "@dataverse.org\"," + + "\"termsAccepted\":true" + + "}", + "Bearer " + userWithClaimsAccessToken + ); + registerOidcUserResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("fieldErrors.firstName", equalTo(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("firstName")))) + .body("fieldErrors.lastName", equalTo(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("lastName")))) + .body("fieldErrors.emailAddress", equalTo(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("emailAddress")))); + + // Should register user when the Bearer token is valid and all required claims are present in the IdP, requiring only minimal data in the User JSON + registerOidcUserResponse = UtilIT.registerOidcUser( + "{" + + "\"termsAccepted\":true" + + "}", + "Bearer " + userWithClaimsAccessToken + ); + registerOidcUserResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("User registered.")); + } + private Response convertUserFromBcryptToSha1(long idOfBcryptUserToConvert, String password) { JsonObjectBuilder data = Json.createObjectBuilder(); data.add("builtinUserId", idOfBcryptUserToConvert); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 502f1ecb0a8..2b8b2ce45e3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -24,6 +24,7 @@ import edu.harvard.iq.dataverse.api.datadeposit.SwordConfigurationImpl; import io.restassured.path.xml.XmlPath; import edu.harvard.iq.dataverse.mydata.MyDataFilterParams; +import jakarta.ws.rs.core.HttpHeaders; import org.apache.commons.lang3.StringUtils; import org.junit.jupiter.api.Test; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -241,6 +242,22 @@ public static Response clearThumbnailFailureFlag(long fileId) { return response; } + public static Response auditFiles(String apiToken, Long firstId, Long lastId, String csvList) { + String params = ""; + if (firstId != null) { + params = "?firstId="+ firstId; + } + if (lastId != null) { + params = params + (params.isEmpty() ? "?" : "&") + "lastId="+ lastId; + } + if (csvList != null) { + params = params + (params.isEmpty() ? "?" : "&") + "datasetIdentifierList="+ csvList; + } + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/admin/datafiles/auditFiles" + params); + } + private static String getAuthenticatedUserAsJsonString(String persistentUserId, String firstName, String lastName, String authenticationProviderId, String identifier) { JsonObjectBuilder builder = Json.createObjectBuilder(); builder.add("authenticationProviderId", authenticationProviderId); @@ -325,7 +342,14 @@ static Integer getDatasetIdFromResponse(Response createDatasetResponse) { logger.info("Id found in create dataset response: " + datasetId); return datasetId; } - + + static Integer getDataFileIdFromResponse(Response uploadDataFileResponse) { + JsonPath dataFile = JsonPath.from(uploadDataFileResponse.body().asString()); + int dataFileId = dataFile.getInt("data.files[0].dataFile.id"); + logger.info("Id found in upload DataFile response: " + dataFileId); + return dataFileId; + } + static Integer getSearchCountFromResponse(Response searchResponse) { JsonPath createdDataset = JsonPath.from(searchResponse.body().asString()); int searchCount = createdDataset.getInt("data.total_count"); @@ -1608,7 +1632,16 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str + persistentId + (excludeFiles ? "&excludeFiles=true" : "")); } - + static Response compareDatasetVersions(String persistentId, String versionNumber1, String versionNumber2, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/:persistentId/versions/" + + versionNumber1 + + "/compare/" + + versionNumber2 + + "?persistentId=" + + persistentId); + } static Response getDatasetWithOwners(String persistentId, String apiToken, boolean returnOwners) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -2161,19 +2194,22 @@ static Response uploadProvFreeForm(String idOrPersistentId, JsonObject jsonObjec // return requestSpecification.delete("/api/files/" + idInPath + "/prov-freeform" + optionalQueryParam); // } static Response exportDataset(String datasetPersistentId, String exporter) { - return exportDataset(datasetPersistentId, exporter, null); + return exportDataset(datasetPersistentId, exporter, null, false); } - static Response exportDataset(String datasetPersistentId, String exporter, String apiToken) { -// http://localhost:8080/api/datasets/export?exporter=dataverse_json&persistentId=doi%3A10.5072/FK2/W6WIMQ + return exportDataset(datasetPersistentId, exporter, apiToken, false); + } + static Response exportDataset(String datasetPersistentId, String exporter, String apiToken, boolean wait) { + // Wait for the Async call to finish to get the updated data + if (wait) { + sleepForReexport(datasetPersistentId, apiToken, 10); + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); } return requestSpecification - // .header(API_TOKEN_HTTP_HEADER, apiToken) - // .get("/api/datasets/:persistentId/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); .get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); } @@ -4131,8 +4167,37 @@ static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, S .body(driverLabel) .put("/api/datasets/" + datasetId + "/storageDriver"); } - - + + /** GET on /api/admin/savedsearches/list */ + static Response getSavedSearchList() { + return given().get("/api/admin/savedsearches/list"); + } + + /** POST on /api/admin/savedsearches without body */ + static Response setSavedSearch() { + return given() + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** POST on /api/admin/savedsearches with body */ + static Response setSavedSearch(String body) { + return given() + .body(body) + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** PUT on /api/admin/savedsearches/makelinks/all */ + static Response setSavedSearchMakelinksAll() { + return given().put("/api/admin/savedsearches/makelinks/all"); + } + + /** DELETE on /api/admin/savedsearches/{id} with identifier */ + static Response deleteSavedSearchById(Integer id) { + return given().delete("/api/admin/savedsearches/" + id); + } + //Globus Store related - not currently used static Response getDatasetGlobusUploadParameters(Integer datasetId, String locale, String apiToken) { @@ -4241,4 +4306,57 @@ static Response deleteDatasetTypes(long doomed, String apiToken) { .delete("/api/datasets/datasetTypes/" + doomed); } + static Response registerOidcUser(String jsonIn, String bearerToken) { + return given() + .header(HttpHeaders.AUTHORIZATION, bearerToken) + .body(jsonIn) + .contentType(ContentType.JSON) + .post("/api/users/register"); + } + + /** + * Creates a new user in the development Keycloak instance. + *

    This method is specifically designed for use in the containerized Keycloak development + * environment. The configured Keycloak instance must be accessible at the specified URL. + * The method sends a request to the Keycloak Admin API to create a new user in the given realm. + * + *

    Refer to the {@code testRegisterOidc()} method in the {@code UsersIT} class for an example + * of this method in action. + * + * @param bearerToken The Bearer token used for authenticating the request to the Keycloak Admin API. + * @param userJson The JSON representation of the user to be created. + * @return A {@link Response} containing the result of the user creation request. + */ + static Response createKeycloakUser(String bearerToken, String userJson) { + return given() + .contentType(ContentType.JSON) + .header(HttpHeaders.AUTHORIZATION, "Bearer " + bearerToken) + .body(userJson) + .post("http://keycloak.mydomain.com:8090/admin/realms/test/users"); + } + + /** + * Performs an OIDC login in the development Keycloak instance using the Resource Owner Password Credentials (ROPC) + * grant type to retrieve authentication tokens from a Keycloak instance. + * + *

    This method is specifically designed for use in the containerized Keycloak development + * environment. The configured Keycloak instance must be accessible at the specified URL. + * + *

    Refer to the {@code testRegisterOidc()} method in the {@code UsersIT} class for an example + * of this method in action. + * + * @return A {@link Response} containing authentication tokens, including access and refresh tokens, + * if the login is successful. + */ + static Response performKeycloakROPCLogin(String username, String password) { + return given() + .contentType(ContentType.URLENC) + .formParam("client_id", "test") + .formParam("client_secret", "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8") + .formParam("username", username) + .formParam("password", password) + .formParam("grant_type", "password") + .formParam("scope", "openid") + .post("http://keycloak.mydomain.com:8090/realms/test/protocol/openid-connect/token"); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java index 486697664e6..12216819cf8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java @@ -84,9 +84,9 @@ public void testFindUserFromRequest_ApiKeyProvided_AnonymizedPrivateUrlUserAuthe sut.userSvc = Mockito.mock(UserServiceBean.class); ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(TEST_API_KEY, TEST_PATH); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); + WrappedUnauthorizedAuthErrorResponse wrappedUnauthorizedAuthErrorResponse = assertThrows(WrappedUnauthorizedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - assertEquals(RESPONSE_MESSAGE_BAD_API_KEY, wrappedAuthErrorResponse.getMessage()); + assertEquals(RESPONSE_MESSAGE_BAD_API_KEY, wrappedUnauthorizedAuthErrorResponse.getMessage()); } @Test @@ -123,8 +123,8 @@ public void testFindUserFromRequest_ApiKeyProvided_CanNotAuthenticateUserWithAny sut.userSvc = Mockito.mock(UserServiceBean.class); ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(TEST_API_KEY, TEST_PATH); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); + WrappedUnauthorizedAuthErrorResponse wrappedUnauthorizedAuthErrorResponse = assertThrows(WrappedUnauthorizedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - assertEquals(RESPONSE_MESSAGE_BAD_API_KEY, wrappedAuthErrorResponse.getMessage()); + assertEquals(RESPONSE_MESSAGE_BAD_API_KEY, wrappedUnauthorizedAuthErrorResponse.getMessage()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index 7e1c23d26f4..ab4090eb0a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -1,15 +1,13 @@ package edu.harvard.iq.dataverse.api.auth; -import com.nimbusds.oauth2.sdk.ParseException; -import com.nimbusds.oauth2.sdk.token.BearerAccessToken; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; -import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; +import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeEach; @@ -18,18 +16,13 @@ import jakarta.ws.rs.container.ContainerRequestContext; -import java.io.IOException; -import java.util.Collections; -import java.util.Optional; - -import static edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism.*; import static org.junit.jupiter.api.Assertions.*; @LocalJvmSettings @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") class BearerTokenAuthMechanismTest { - private static final String TEST_API_KEY = "test-api-key"; + private static final String TEST_BEARER_TOKEN = "Bearer test"; private BearerTokenAuthMechanism sut; @@ -49,119 +42,42 @@ void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { } @Test - void testFindUserFromRequest_invalid_token() { - Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); - - ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer "); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - - //then - assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); - } - @Test - void testFindUserFromRequest_no_OidcProvider() { - Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); - - ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - - //then - assertEquals(BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED, wrappedAuthErrorResponse.getMessage()); - } - - @Test - void testFindUserFromRequest_oneProvider_invalidToken_1() throws ParseException, IOException { - OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); - String providerID = "OIEDC"; - Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); - // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean - Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID)); - Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider); - - // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token - BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.empty()); + void testFindUserFromRequest_invalid_token() throws AuthorizationException { + String testErrorMessage = "test error"; + Mockito.when(sut.authSvc.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN)).thenThrow(new AuthorizationException(testErrorMessage)); // when - ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake(TEST_BEARER_TOKEN); + WrappedUnauthorizedAuthErrorResponse wrappedUnauthorizedAuthErrorResponse = assertThrows(WrappedUnauthorizedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); //then - assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); + assertEquals(testErrorMessage, wrappedUnauthorizedAuthErrorResponse.getMessage()); } @Test - void testFindUserFromRequest_oneProvider_invalidToken_2() throws ParseException, IOException { - OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); - String providerID = "OIEDC"; - Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); - // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean - Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID)); - Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider); - - // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token - BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenThrow(IOException.class); - - // when - ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - - //then - assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); - } - @Test - void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException { - OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); - String providerID = "OIEDC"; - Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); - // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean - Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID)); - Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider); - - // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token - UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); - BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); - - // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier + void testFindUserFromRequest_validToken_accountExists() throws WrappedAuthErrorResponse, AuthorizationException { AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser(); - Mockito.when(sut.authSvc.lookupUser(userinfo)).thenReturn(testAuthenticatedUser); + Mockito.when(sut.authSvc.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN)).thenReturn(testAuthenticatedUser); Mockito.when(sut.userSvc.updateLastApiUseTime(testAuthenticatedUser)).thenReturn(testAuthenticatedUser); // when - ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake(TEST_BEARER_TOKEN); User actual = sut.findUserFromRequest(testContainerRequest); //then assertEquals(testAuthenticatedUser, actual); Mockito.verify(sut.userSvc, Mockito.atLeastOnce()).updateLastApiUseTime(testAuthenticatedUser); - } - @Test - void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException { - OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); - String providerID = "OIEDC"; - Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); - // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean - Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID)); - Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider); - - // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token - UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); - BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); - - // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier - Mockito.when(sut.authSvc.lookupUser(userinfo)).thenReturn(null); + @Test + void testFindUserFromRequest_validToken_noAccount() throws AuthorizationException { + Mockito.when(sut.authSvc.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN)).thenReturn(null); // when - ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); - User actual = sut.findUserFromRequest(testContainerRequest); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake(TEST_BEARER_TOKEN); + WrappedForbiddenAuthErrorResponse wrappedForbiddenAuthErrorResponse = assertThrows(WrappedForbiddenAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); //then - assertNull(actual); - + assertEquals(BundleUtil.getStringFromBundle("bearerTokenAuthMechanism.errors.tokenValidatedButNoRegisteredUser"), wrappedForbiddenAuthErrorResponse.getMessage()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java index 74db6e544da..6fd7d2e1d8e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java @@ -65,9 +65,9 @@ public void testFindUserFromRequest_SignedUrlTokenProvided_UserExists_InvalidSig sut.authSvc = authenticationServiceBeanStub; ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(TEST_SIGNED_URL_TOKEN, TEST_SIGNED_URL_USER_ID); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); + WrappedUnauthorizedAuthErrorResponse wrappedUnauthorizedAuthErrorResponse = assertThrows(WrappedUnauthorizedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedAuthErrorResponse.getMessage()); + assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedUnauthorizedAuthErrorResponse.getMessage()); } @Test @@ -79,9 +79,9 @@ public void testFindUserFromRequest_SignedUrlTokenProvided_UserExists_UserApiTok sut.authSvc = authenticationServiceBeanStub; ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(TEST_SIGNED_URL_TOKEN, TEST_SIGNED_URL_USER_ID); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); + WrappedUnauthorizedAuthErrorResponse wrappedUnauthorizedAuthErrorResponse = assertThrows(WrappedUnauthorizedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedAuthErrorResponse.getMessage()); + assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedUnauthorizedAuthErrorResponse.getMessage()); } @Test @@ -92,8 +92,8 @@ public void testFindUserFromRequest_SignedUrlTokenProvided_UserDoesNotExistForTh sut.authSvc = authenticationServiceBeanStub; ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(TEST_SIGNED_URL_TOKEN, TEST_SIGNED_URL_USER_ID); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); + WrappedUnauthorizedAuthErrorResponse wrappedUnauthorizedAuthErrorResponse = assertThrows(WrappedUnauthorizedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedAuthErrorResponse.getMessage()); + assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedUnauthorizedAuthErrorResponse.getMessage()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java index 3f90fa73fa9..22c3abffe2b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java @@ -54,8 +54,8 @@ public void testFindUserFromRequest_WorkflowKeyProvided_UserNotAuthenticated() { sut.authSvc = authenticationServiceBeanStub; ContainerRequestContext testContainerRequest = new WorkflowKeyContainerRequestTestFake(TEST_WORKFLOW_KEY); - WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); + WrappedUnauthorizedAuthErrorResponse wrappedUnauthorizedAuthErrorResponse = assertThrows(WrappedUnauthorizedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); - assertEquals(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY, wrappedAuthErrorResponse.getMessage()); + assertEquals(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY, wrappedUnauthorizedAuthErrorResponse.getMessage()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java index 44739f3f62a..acf5d970358 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java @@ -1,6 +1,13 @@ package edu.harvard.iq.dataverse.api.imports; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; +import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; + +import org.apache.commons.io.FileUtils; +import com.google.gson.Gson; +import java.io.File; +import java.io.IOException; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; @@ -8,6 +15,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; +import java.nio.charset.StandardCharsets; + @ExtendWith(MockitoExtension.class) public class ImportGenericServiceBeanTest { @@ -15,7 +24,47 @@ public class ImportGenericServiceBeanTest { private ImportGenericServiceBean importGenericService; @Test - public void testReassignIdentifierAsGlobalId() { + void testIdentifierHarvestableWithOtherID() throws IOException { + // "otherIdValue" containing the value : doi:10.7910/DVN/TJCLKP + File file = new File("src/test/resources/json/importGenericWithOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + // junk or null + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "junk")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, null)); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testIdentifierHarvestableWithoutOtherID() throws IOException { + // Does not contain data of type "otherIdValue" + File file = new File("src/test/resources/json/importGenericWithoutOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + // non-URL + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "doi:10.7910/DVN/TJCLKP")); + assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "hdl:10.7910/DVN/TJCLKP")); + // HTTPS + assertEquals("https://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://hdl.handle.net/10.7910/DVN/TJCLKP")); + // HTTP (no S) + assertEquals("http://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://hdl.handle.net/10.7910/DVN/TJCLKP")); + // junk or null + assertNull(importGenericService.selectIdentifier(dto, "junk")); + assertNull(importGenericService.selectIdentifier(dto, null)); + assertNull(importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertNull(importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testReassignIdentifierAsGlobalId() { // non-URL assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO())); assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO())); @@ -29,6 +78,8 @@ public void testReassignIdentifierAsGlobalId() { assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO())); // junk assertNull(importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("http://www.example.com", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("https://dataverse.org", new DatasetDTO())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBeanTest.java new file mode 100644 index 00000000000..56ac4eefb3d --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBeanTest.java @@ -0,0 +1,152 @@ +package edu.harvard.iq.dataverse.authorization; + +import com.nimbusds.oauth2.sdk.ParseException; +import com.nimbusds.oauth2.sdk.token.BearerAccessToken; +import com.nimbusds.openid.connect.sdk.claims.UserInfo; +import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.TypedQuery; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import java.io.IOException; +import java.util.Map; +import java.util.Optional; + +import static org.junit.jupiter.api.Assertions.*; + +public class AuthenticationServiceBeanTest { + + private AuthenticationServiceBean sut; + private static final String TEST_BEARER_TOKEN = "Bearer test"; + + @BeforeEach + public void setUp() { + sut = new AuthenticationServiceBean(); + sut.authProvidersRegistrationService = Mockito.mock(AuthenticationProvidersRegistrationServiceBean.class); + sut.em = Mockito.mock(EntityManager.class); + } + + @Test + void testLookupUserByOIDCBearerToken_no_OIDCProvider() { + // Given no OIDC providers are configured + Mockito.when(sut.authProvidersRegistrationService.getAuthenticationProvidersMap()).thenReturn(Map.of()); + + // When invoking lookupUserByOIDCBearerToken + AuthorizationException exception = assertThrows(AuthorizationException.class, + () -> sut.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN)); + + // Then the exception message should indicate no OIDC provider is configured + assertEquals(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.bearerTokenDetectedNoOIDCProviderConfigured"), exception.getMessage()); + } + + @Test + void testLookupUserByOIDCBearerToken_oneProvider_invalidToken_1() throws ParseException, OAuth2Exception, IOException { + // Given a single OIDC provider that cannot find a user + OIDCAuthProvider oidcAuthProviderStub = stubOIDCAuthProvider("OIEDC"); + BearerAccessToken token = BearerAccessToken.parse(TEST_BEARER_TOKEN); + Mockito.when(oidcAuthProviderStub.getUserInfo(token)).thenReturn(Optional.empty()); + + // When invoking lookupUserByOIDCBearerToken + AuthorizationException exception = assertThrows(AuthorizationException.class, + () -> sut.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN)); + + // Then the exception message should indicate an unauthorized token + assertEquals(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.unauthorizedBearerToken"), exception.getMessage()); + } + + @Test + void testLookupUserByOIDCBearerToken_oneProvider_invalidToken_2() throws ParseException, IOException, OAuth2Exception { + // Given a single OIDC provider that throws an IOException + OIDCAuthProvider oidcAuthProviderStub = stubOIDCAuthProvider("OIEDC"); + BearerAccessToken token = BearerAccessToken.parse(TEST_BEARER_TOKEN); + Mockito.when(oidcAuthProviderStub.getUserInfo(token)).thenThrow(IOException.class); + + // When invoking lookupUserByOIDCBearerToken + AuthorizationException exception = assertThrows(AuthorizationException.class, + () -> sut.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN)); + + // Then the exception message should indicate an unauthorized token + assertEquals(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.unauthorizedBearerToken"), exception.getMessage()); + } + + @Test + void testLookupUserByOIDCBearerToken_oneProvider_validToken() throws ParseException, IOException, AuthorizationException, OAuth2Exception { + // Given a single OIDC provider that returns a valid user identifier + setUpOIDCProviderWhichValidatesToken(); + + // Setting up an authenticated user is found + AuthenticatedUser authenticatedUser = setupAuthenticatedUserQueryWithResult(new AuthenticatedUser()); + + // When invoking lookupUserByOIDCBearerToken + User actualUser = sut.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN); + + // Then the actual user should match the expected authenticated user + assertEquals(authenticatedUser, actualUser); + } + + @Test + void testLookupUserByOIDCBearerToken_oneProvider_validToken_noAccount() throws ParseException, IOException, AuthorizationException, OAuth2Exception { + // Given a single OIDC provider that returns a valid user identifier + setUpOIDCProviderWhichValidatesToken(); + + // Setting up an authenticated user is not found + setupAuthenticatedUserQueryWithNoResult(); + + // When invoking lookupUserByOIDCBearerToken + User actualUser = sut.lookupUserByOIDCBearerToken(TEST_BEARER_TOKEN); + + // Then no user should be found, and result should be null + assertNull(actualUser); + } + + private AuthenticatedUser setupAuthenticatedUserQueryWithResult(AuthenticatedUser authenticatedUser) { + TypedQuery queryStub = Mockito.mock(TypedQuery.class); + AuthenticatedUserLookup lookupStub = Mockito.mock(AuthenticatedUserLookup.class); + Mockito.when(lookupStub.getAuthenticatedUser()).thenReturn(authenticatedUser); + Mockito.when(queryStub.getSingleResult()).thenReturn(lookupStub); + Mockito.when(sut.em.createNamedQuery("AuthenticatedUserLookup.findByAuthPrvID_PersUserId", AuthenticatedUserLookup.class)).thenReturn(queryStub); + return authenticatedUser; + } + + private void setupAuthenticatedUserQueryWithNoResult() { + TypedQuery queryStub = Mockito.mock(TypedQuery.class); + Mockito.when(queryStub.getSingleResult()).thenThrow(new NoResultException()); + Mockito.when(sut.em.createNamedQuery("AuthenticatedUserLookup.findByAuthPrvID_PersUserId", AuthenticatedUserLookup.class)).thenReturn(queryStub); + } + + private void setUpOIDCProviderWhichValidatesToken() throws ParseException, IOException, OAuth2Exception { + OIDCAuthProvider oidcAuthProviderStub = stubOIDCAuthProvider("OIDC"); + + BearerAccessToken token = BearerAccessToken.parse(TEST_BEARER_TOKEN); + + // Stub the UserInfo returned by the provider + UserInfo userInfoStub = Mockito.mock(UserInfo.class); + Mockito.when(oidcAuthProviderStub.getUserInfo(token)).thenReturn(Optional.of(userInfoStub)); + + // Stub OAuth2UserRecord and its associated UserRecordIdentifier + OAuth2UserRecord oAuth2UserRecordStub = Mockito.mock(OAuth2UserRecord.class); + UserRecordIdentifier userRecordIdentifierStub = Mockito.mock(UserRecordIdentifier.class); + Mockito.when(userRecordIdentifierStub.getUserIdInRepo()).thenReturn("testUserId"); + Mockito.when(userRecordIdentifierStub.getUserRepoId()).thenReturn("testRepoId"); + Mockito.when(oAuth2UserRecordStub.getUserRecordIdentifier()).thenReturn(userRecordIdentifierStub); + + // Stub the OIDCAuthProvider to return OAuth2UserRecord + Mockito.when(oidcAuthProviderStub.getUserRecord(userInfoStub)).thenReturn(oAuth2UserRecordStub); + } + + private OIDCAuthProvider stubOIDCAuthProvider(String providerID) { + OIDCAuthProvider oidcAuthProviderStub = Mockito.mock(OIDCAuthProvider.class); + Mockito.when(oidcAuthProviderStub.getId()).thenReturn(providerID); + Mockito.when(sut.authProvidersRegistrationService.getAuthenticationProvidersMap()).thenReturn(Map.of(providerID, oidcAuthProviderStub)); + return oidcAuthProviderStub; + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index ee6823ef98a..58b792691b9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -7,7 +7,6 @@ import edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism; import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; -import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -38,16 +37,12 @@ import java.util.Map; import java.util.Optional; -import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.api.Assumptions.assumeFalse; import static org.junit.jupiter.api.Assumptions.assumeTrue; import static org.mockito.Mockito.when; @@ -143,7 +138,7 @@ void testCreateProvider() throws Exception { /** * This test covers using an OIDC provider as authorization party when accessing the Dataverse API with a - * Bearer Token. See {@link BearerTokenAuthMechanism}. It needs to mock the auth services to avoid adding + * Bearer Token. See {@link BearerTokenAuthMechanism}. It needs to mock the auth service to avoid adding * more dependencies. */ @Test @@ -158,19 +153,15 @@ void testApiBearerAuth() throws Exception { String accessToken = getBearerTokenViaKeycloakAdminClient(); assumeFalse(accessToken == null); - OIDCAuthProvider oidcAuthProvider = getProvider(); // This will also receive the details from the remote Keycloak in the container - UserRecordIdentifier identifier = oidcAuthProvider.getUserIdentifier(new BearerAccessToken(accessToken)).get(); String token = "Bearer " + accessToken; BearerTokenKeyContainerRequestTestFake request = new BearerTokenKeyContainerRequestTestFake(token); AuthenticatedUser user = new MockAuthenticatedUser(); // setup mocks (we don't want or need a database here) - when(authService.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Set.of(oidcAuthProvider.getId())); - when(authService.getAuthenticationProvider(oidcAuthProvider.getId())).thenReturn(oidcAuthProvider); - when(authService.lookupUser(identifier)).thenReturn(user); + when(authService.lookupUserByOIDCBearerToken(token)).thenReturn(user); when(userService.updateLastApiUseTime(user)).thenReturn(user); - + // when (let's do this again, but now with the actual subject under test!) User lookedUpUser = bearerTokenAuthMechanism.findUserFromRequest(request); diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java index 7bd802b3b02..bd3bfcc1a60 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java @@ -378,20 +378,20 @@ public void testMutingInJson() { public void testHasEmailMuted() { testUser.setMutedEmails(mutedTypes); System.out.println("hasEmailMuted"); - assertEquals(true, testUser.hasEmailMuted(Type.ASSIGNROLE)); - assertEquals(true, testUser.hasEmailMuted(Type.REVOKEROLE)); - assertEquals(false, testUser.hasEmailMuted(Type.CREATEDV)); - assertEquals(false, testUser.hasEmailMuted(null)); + assertTrue(testUser.hasEmailMuted(Type.ASSIGNROLE)); + assertTrue(testUser.hasEmailMuted(Type.REVOKEROLE)); + assertFalse(testUser.hasEmailMuted(Type.CREATEDV)); + assertFalse(testUser.hasEmailMuted(null)); } @Test public void testHasNotificationsMutedMuted() { testUser.setMutedNotifications(mutedTypes); System.out.println("hasNotificationMuted"); - assertEquals(true, testUser.hasNotificationMuted(Type.ASSIGNROLE)); - assertEquals(true, testUser.hasNotificationMuted(Type.REVOKEROLE)); - assertEquals(false, testUser.hasNotificationMuted(Type.CREATEDV)); - assertEquals(false, testUser.hasNotificationMuted(null)); + assertTrue(testUser.hasNotificationMuted(Type.ASSIGNROLE)); + assertTrue(testUser.hasNotificationMuted(Type.REVOKEROLE)); + assertFalse(testUser.hasNotificationMuted(Type.CREATEDV)); + assertFalse(testUser.hasNotificationMuted(null)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java index a8dda2f6a7e..d3c5cdca470 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java @@ -38,7 +38,7 @@ void getIdentifier() { @Test void testGetDisplayInfo() { RoleAssigneeDisplayInfo displayInfo = privateUrlUser.getDisplayInfo(); - assertEquals("Private URL Enabled", displayInfo.getTitle()); + assertEquals("Preview URL Enabled", displayInfo.getTitle()); assertNull(displayInfo.getEmailAddress()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java index 552d76b74e8..ea5cc4b66a8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java @@ -89,20 +89,20 @@ public void tearDownClass() throws IOException { */ @Test public void testOpen() throws IOException { - assertEquals(false, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); datasetAccess.open(DataAccessOption.READ_ACCESS); - assertEquals(true, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertTrue(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); datasetAccess.open(DataAccessOption.WRITE_ACCESS); - assertEquals(false, datasetAccess.canRead()); - assertEquals(true, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertTrue(datasetAccess.canWrite()); dataFileAccess.open(DataAccessOption.READ_ACCESS); - assertEquals(true, dataFileAccess.canRead()); - assertEquals(false, dataFileAccess.canWrite()); + assertTrue(dataFileAccess.canRead()); + assertFalse(dataFileAccess.canWrite()); } /** @@ -133,7 +133,7 @@ public void testOpenAuxChannel() throws IOException { */ @Test public void testIsAuxObjectCached() throws IOException { - assertEquals(true, datasetAccess.isAuxObjectCached("Dataset")); + assertTrue(datasetAccess.isAuxObjectCached("Dataset")); } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java index 84a241b90f6..3aab66dc63b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java @@ -189,48 +189,48 @@ public void testResponseHeaders() { @Test public void testFileLocation() { - assertEquals(true, instance.isLocalFile()); + assertTrue(instance.isLocalFile()); instance.setIsLocalFile(false); - assertEquals(false, instance.isLocalFile()); + assertFalse(instance.isLocalFile()); - assertEquals(false, instance.isRemoteAccess()); + assertFalse(instance.isRemoteAccess()); instance.setIsRemoteAccess(true); - assertEquals(true, instance.isRemoteAccess()); + assertTrue(instance.isRemoteAccess()); } @Test public void testHttpAccess() { - assertEquals(false, instance.isHttpAccess()); + assertFalse(instance.isHttpAccess()); instance.setIsHttpAccess(true); - assertEquals(true, instance.isHttpAccess()); + assertTrue(instance.isHttpAccess()); }*/ @Test public void testDownloadSupported() { - assertEquals(true, instance.isDownloadSupported()); + assertTrue(instance.isDownloadSupported()); instance.setIsDownloadSupported(false); - assertEquals(false, instance.isDownloadSupported()); + assertFalse(instance.isDownloadSupported()); } @Test public void testSubsetSupported() { - assertEquals(false, instance.isSubsetSupported()); + assertFalse(instance.isSubsetSupported()); instance.setIsSubsetSupported(true); - assertEquals(true, instance.isSubsetSupported()); + assertTrue(instance.isSubsetSupported()); } @Test public void testZippedStream() { - assertEquals(false, instance.isZippedStream()); + assertFalse(instance.isZippedStream()); instance.setIsZippedStream(true); - assertEquals(true, instance.isZippedStream()); + assertTrue(instance.isZippedStream()); } @Test public void testNoVarHeader() { - assertEquals(false, instance.noVarHeader()); + assertFalse(instance.noVarHeader()); instance.setNoVarHeader(true); - assertEquals(true, instance.noVarHeader()); + assertTrue(instance.noVarHeader()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java index 942e4329384..27e0ac758e0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java @@ -51,14 +51,14 @@ public void setUpClass() throws IOException { */ @Test public void testPerms() throws IOException { - assertEquals(false, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); } @Test public void testIsExpiryExpired() { long currentTime = 1502221467; - assertEquals(false, swiftAccess.isExpiryExpired(60, 1502281, currentTime)); + assertFalse(swiftAccess.isExpiryExpired(60, 1502281, currentTime)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java index eb19f22df63..148d34dc5f7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java @@ -18,6 +18,7 @@ import org.apache.http.message.BasicStatusLine; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import org.junit.jupiter.api.Test; public class DataCaptureModuleUtilTest { @@ -25,13 +26,13 @@ public class DataCaptureModuleUtilTest { @Test public void testRsyncSupportEnabled() { System.out.println("rsyncSupportEnabled"); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled(null)); - assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled(null)); + assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); // Comma sepratated lists of upload methods are supported. - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh")); - assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("junk")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh")); + assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("junk")); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java index 8eed2a33c5a..2db8851c48a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java @@ -70,8 +70,8 @@ public void testGetThumbnailRestricted() { */ @Test public void testDeleteDatasetLogo() { - assertEquals(false, DatasetUtil.deleteDatasetLogo(null)); - assertEquals(false, DatasetUtil.deleteDatasetLogo(new Dataset())); + assertFalse(DatasetUtil.deleteDatasetLogo(null)); + assertFalse(DatasetUtil.deleteDatasetLogo(new Dataset())); } /** @@ -106,7 +106,7 @@ public void testGetThumbnailAsInputStream() { @Test public void testIsDatasetLogoPresent() { Dataset dataset = MocksFactory.makeDataset(); - assertEquals(false, DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); + assertFalse(DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java index bfb9134cfca..475b4c1cff5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java @@ -66,16 +66,16 @@ void variableTest(Map vmMap) { assertEquals(vm.getLiteralquestion(), "This is a literal question."); assertEquals(vm.getNotes(), "These are notes.\nA lot of them."); assertEquals(vm.getUniverse(),"Our universe"); - assertEquals(false, vm.isIsweightvar()); - assertEquals(false, vm.isWeighted()); + assertFalse(vm.isIsweightvar()); + assertFalse(vm.isWeighted()); testCategoriesVar1(vm); vm = vmMap.get(1169L); assertNotNull(vm); - assertEquals(false, vm.isIsweightvar()); - assertEquals(true, vm.isWeighted()); + assertFalse(vm.isIsweightvar()); + assertTrue(vm.isWeighted()); assertEquals(vm.getLabel(), "age_rollup" ); assertEquals(vm.getInterviewinstruction(), null); @@ -90,8 +90,8 @@ void variableTest(Map vmMap) { vm = vmMap.get(1168L); assertNotNull(vm); - assertEquals(true, vm.isIsweightvar()); - assertEquals(false, vm.isWeighted()); + assertTrue(vm.isIsweightvar()); + assertFalse(vm.isWeighted()); assertEquals(vm.getLabel(), "weight" ); assertEquals(vm.getInterviewinstruction(), null); assertEquals(vm.getLiteralquestion(), "Literal question for weight"); diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java index 508eac46cb4..0ba29f74774 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java @@ -18,7 +18,9 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.sql.Timestamp; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.concurrent.Future; @@ -171,9 +173,9 @@ public void testCreatePrivateUrlSuccessfully() throws CommandException { assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test @@ -188,22 +190,24 @@ public void testCreateAnonymizedAccessPrivateUrlSuccessfully() throws CommandExc assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); assertTrue(privateUrl.isAnonymizedAccess()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test - public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { + public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() throws CommandException { dataset = new Dataset(); List versions = new ArrayList<>(); + dataset.setPublicationDate(new Timestamp(new Date().getTime())); DatasetVersion datasetVersion = new DatasetVersion(); datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); DatasetVersion datasetVersion2 = new DatasetVersion(); - - versions.add(datasetVersion); + datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT); + versions.add(datasetVersion2); + versions.add(datasetVersion); dataset.setVersions(versions); dataset.setId(versionIsReleased); PrivateUrl privateUrl = null; @@ -211,6 +215,7 @@ public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { privateUrl = testEngine.submit(new CreatePrivateUrlCommand(null, dataset, true)); assertTrue(false); } catch (CommandException ex) { + } assertNull(privateUrl); } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommandTest.java new file mode 100644 index 00000000000..3f6b3b0f393 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommandTest.java @@ -0,0 +1,371 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.api.dto.UserDTO; +import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.exception.InvalidFieldsCommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import java.util.List; + +import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeRequest; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.Mockito.*; + +@LocalJvmSettings +class RegisterOIDCUserCommandTest { + + private static final String TEST_BEARER_TOKEN = "Bearer test"; + private static final String TEST_USERNAME = "username"; + private static final AuthenticatedUserDisplayInfo TEST_MISSING_CLAIMS_DISPLAY_INFO = new AuthenticatedUserDisplayInfo( + null, + null, + null, + "", + "" + ); + private static final AuthenticatedUserDisplayInfo TEST_VALID_DISPLAY_INFO = new AuthenticatedUserDisplayInfo( + "FirstName", + "LastName", + "user@example.com", + "", + "" + ); + + private UserDTO testUserDTO; + + @Mock + private CommandContext contextStub; + + @Mock + private AuthenticationServiceBean authServiceStub; + + @InjectMocks + private RegisterOIDCUserCommand sut; + + private OAuth2UserRecord oAuth2UserRecordStub; + private UserRecordIdentifier userRecordIdentifierMock; + private AuthenticatedUser existingTestUser; + + @BeforeEach + void setUp() { + MockitoAnnotations.openMocks(this); + setUpDefaultUserDTO(); + + userRecordIdentifierMock = mock(UserRecordIdentifier.class); + oAuth2UserRecordStub = mock(OAuth2UserRecord.class); + existingTestUser = new AuthenticatedUser(); + + when(oAuth2UserRecordStub.getUserRecordIdentifier()).thenReturn(userRecordIdentifierMock); + when(contextStub.authentication()).thenReturn(authServiceStub); + + sut = new RegisterOIDCUserCommand(makeRequest(), TEST_BEARER_TOKEN, testUserDTO); + } + + private void setUpDefaultUserDTO() { + testUserDTO = new UserDTO(); + testUserDTO.setTermsAccepted(true); + testUserDTO.setFirstName("FirstName"); + testUserDTO.setLastName("LastName"); + testUserDTO.setUsername("username"); + testUserDTO.setEmailAddress("user@example.com"); + } + + @Test + public void execute_completedUserDTOWithUnacceptedTerms_missingClaimsInProvider_provideMissingClaimsFeatureFlagDisabled() throws AuthorizationException { + testUserDTO.setTermsAccepted(false); + testUserDTO.setEmailAddress(null); + testUserDTO.setUsername(null); + testUserDTO.setFirstName(null); + testUserDTO.setLastName(null); + + when(authServiceStub.getAuthenticatedUserByEmail(testUserDTO.getEmailAddress())).thenReturn(null); + when(authServiceStub.getAuthenticatedUser(testUserDTO.getUsername())).thenReturn(null); + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(null); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_MISSING_CLAIMS_DISPLAY_INFO); + + assertThatThrownBy(() -> sut.execute(contextStub)) + .isInstanceOf(InvalidFieldsCommandException.class) + .satisfies(exception -> { + InvalidFieldsCommandException ex = (InvalidFieldsCommandException) exception; + assertThat(ex.getFieldErrors()) + .containsEntry("termsAccepted", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.userShouldAcceptTerms")) + .containsEntry("emailAddress", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired", List.of("emailAddress"))) + .containsEntry("username", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired", List.of("username"))) + .containsEntry("firstName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired", List.of("firstName"))) + .containsEntry("lastName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired", List.of("lastName"))); + }); + } + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-provide-missing-claims") + public void execute_uncompletedUserDTOWithUnacceptedTerms_missingClaimsInProvider_provideMissingClaimsFeatureFlagEnabled() throws AuthorizationException { + testUserDTO.setTermsAccepted(false); + testUserDTO.setEmailAddress(null); + testUserDTO.setUsername(null); + testUserDTO.setFirstName(null); + testUserDTO.setLastName(null); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(null); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_MISSING_CLAIMS_DISPLAY_INFO); + + when(authServiceStub.getAuthenticatedUserByEmail(testUserDTO.getEmailAddress())).thenReturn(null); + when(authServiceStub.getAuthenticatedUser(testUserDTO.getUsername())).thenReturn(null); + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + assertThatThrownBy(() -> sut.execute(contextStub)) + .isInstanceOf(InvalidFieldsCommandException.class) + .satisfies(exception -> { + InvalidFieldsCommandException ex = (InvalidFieldsCommandException) exception; + assertThat(ex.getFieldErrors()) + .containsEntry("termsAccepted", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.userShouldAcceptTerms")) + .containsEntry("emailAddress", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired", List.of("emailAddress"))) + .containsEntry("username", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired", List.of("username"))) + .containsEntry("firstName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired", List.of("firstName"))) + .containsEntry("lastName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired", List.of("lastName"))); + }); + } + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-provide-missing-claims") + public void execute_acceptedTerms_unavailableEmailAndUsername_missingClaimsInProvider_provideMissingClaimsFeatureFlagEnabled() throws AuthorizationException { + when(authServiceStub.getAuthenticatedUserByEmail(testUserDTO.getEmailAddress())).thenReturn(existingTestUser); + when(authServiceStub.getAuthenticatedUser(testUserDTO.getUsername())).thenReturn(existingTestUser); + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(null); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_MISSING_CLAIMS_DISPLAY_INFO); + + assertThatThrownBy(() -> sut.execute(contextStub)) + .isInstanceOf(InvalidFieldsCommandException.class) + .satisfies(exception -> { + InvalidFieldsCommandException ex = (InvalidFieldsCommandException) exception; + assertThat(ex.getFieldErrors()) + .containsEntry("emailAddress", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.emailAddressInUse")) + .containsEntry("username", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.usernameInUse")) + .doesNotContainKey("termsAccepted"); + }); + } + + @Test + void execute_throwsPermissionException_onAuthorizationException() throws AuthorizationException { + String testAuthorizationExceptionMessage = "Authorization failed"; + when(contextStub.authentication().verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)) + .thenThrow(new AuthorizationException(testAuthorizationExceptionMessage)); + + assertThatThrownBy(() -> sut.execute(contextStub)) + .isInstanceOf(PermissionException.class) + .hasMessageContaining(testAuthorizationExceptionMessage); + + verify(contextStub.authentication(), times(1)).verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN); + } + + @Test + void execute_throwsIllegalCommandException_ifUserAlreadyRegisteredWithToken() throws AuthorizationException { + when(contextStub.authentication().verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)) + .thenReturn(oAuth2UserRecordStub); + when(contextStub.authentication().lookupUser(userRecordIdentifierMock)).thenReturn(new AuthenticatedUser()); + + assertThatThrownBy(() -> sut.execute(contextStub)) + .isInstanceOf(IllegalCommandException.class) + .hasMessageContaining(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.userAlreadyRegisteredWithToken")); + + verify(contextStub.authentication(), times(1)).lookupUser(userRecordIdentifierMock); + } + + @Test + void execute_throwsInvalidFieldsCommandException_ifUserDTOHasClaimsAndProvideMissingClaimsFeatureFlagIsDisabled() throws AuthorizationException { + when(contextStub.authentication().verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)) + .thenReturn(oAuth2UserRecordStub); + + assertThatThrownBy(() -> sut.execute(contextStub)) + .isInstanceOf(InvalidFieldsCommandException.class) + .satisfies(exception -> { + InvalidFieldsCommandException ex = (InvalidFieldsCommandException) exception; + assertThat(ex.getFieldErrors()) + .containsEntry("username", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", List.of("username"))) + .containsEntry("emailAddress", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", List.of("emailAddress"))) + .containsEntry("firstName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", List.of("firstName"))) + .containsEntry("lastName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", List.of("lastName"))) + .containsEntry("emailAddress", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON", List.of("emailAddress"))); + }); + } + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-provide-missing-claims") + void execute_happyPath_withoutAffiliationAndPosition_missingClaimsInProvider_provideMissingClaimsFeatureFlagEnabled() throws AuthorizationException, CommandException { + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(null); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_MISSING_CLAIMS_DISPLAY_INFO); + + sut.execute(contextStub); + + verify(authServiceStub, times(1)).createAuthenticatedUser( + eq(userRecordIdentifierMock), + eq(testUserDTO.getUsername()), + eq(new AuthenticatedUserDisplayInfo( + testUserDTO.getFirstName(), + testUserDTO.getLastName(), + testUserDTO.getEmailAddress(), + "", + "") + ), + eq(true) + ); + } + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-provide-missing-claims") + void execute_happyPath_withAffiliationAndPosition_missingClaimsInProvider_provideMissingClaimsFeatureFlagEnabled() throws AuthorizationException, CommandException { + testUserDTO.setPosition("test position"); + testUserDTO.setAffiliation("test affiliation"); + + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(null); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_MISSING_CLAIMS_DISPLAY_INFO); + + sut.execute(contextStub); + + verify(authServiceStub, times(1)).createAuthenticatedUser( + eq(userRecordIdentifierMock), + eq(testUserDTO.getUsername()), + eq(new AuthenticatedUserDisplayInfo( + testUserDTO.getFirstName(), + testUserDTO.getLastName(), + testUserDTO.getEmailAddress(), + testUserDTO.getAffiliation(), + testUserDTO.getPosition()) + ), + eq(true) + ); + } + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-provide-missing-claims") + void execute_conflictingClaimsInProvider_provideMissingClaimsFeatureFlagEnabled() throws AuthorizationException { + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(TEST_USERNAME); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_VALID_DISPLAY_INFO); + + testUserDTO.setUsername("conflictingUsername"); + testUserDTO.setFirstName("conflictingFirstName"); + testUserDTO.setLastName("conflictingLastName"); + testUserDTO.setEmailAddress("conflictingemail@example.com"); + + assertThatThrownBy(() -> sut.execute(contextStub)) + .isInstanceOf(InvalidFieldsCommandException.class) + .satisfies(exception -> { + InvalidFieldsCommandException ex = (InvalidFieldsCommandException) exception; + assertThat(ex.getFieldErrors()) + .containsEntry("username", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("username"))) + .containsEntry("emailAddress", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("emailAddress"))) + .containsEntry("firstName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("firstName"))) + .containsEntry("lastName", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("lastName"))) + .containsEntry("emailAddress", BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider", List.of("emailAddress"))); + }); + } + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-provide-missing-claims") + void execute_happyPath_withoutAffiliationAndPosition_claimsInProvider_provideMissingClaimsFeatureFlagEnabled() throws AuthorizationException, CommandException { + testUserDTO.setTermsAccepted(true); + testUserDTO.setEmailAddress(null); + testUserDTO.setUsername(null); + testUserDTO.setFirstName(null); + testUserDTO.setLastName(null); + + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(TEST_USERNAME); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_VALID_DISPLAY_INFO); + + sut.execute(contextStub); + + verify(authServiceStub, times(1)).createAuthenticatedUser( + eq(userRecordIdentifierMock), + eq(TEST_USERNAME), + eq(new AuthenticatedUserDisplayInfo( + TEST_VALID_DISPLAY_INFO.getFirstName(), + TEST_VALID_DISPLAY_INFO.getLastName(), + TEST_VALID_DISPLAY_INFO.getEmailAddress(), + "", + "") + ), + eq(true) + ); + } + + @ParameterizedTest + @ValueSource(strings = {" ", ""}) + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-provide-missing-claims") + void execute_happyPath_withoutAffiliationAndPosition_blankClaimInProviderProvidedInJson_provideMissingClaimsFeatureFlagEnabled(String testBlankUsername) throws AuthorizationException, CommandException { + String testUsernameNotBlank = "usernameNotBlank"; + testUserDTO.setUsername(testUsernameNotBlank); + testUserDTO.setTermsAccepted(true); + testUserDTO.setEmailAddress(null); + testUserDTO.setFirstName(null); + testUserDTO.setLastName(null); + + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(testBlankUsername); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_VALID_DISPLAY_INFO); + + sut.execute(contextStub); + + verify(authServiceStub, times(1)).createAuthenticatedUser( + eq(userRecordIdentifierMock), + eq(testUsernameNotBlank), + eq(new AuthenticatedUserDisplayInfo( + TEST_VALID_DISPLAY_INFO.getFirstName(), + TEST_VALID_DISPLAY_INFO.getLastName(), + TEST_VALID_DISPLAY_INFO.getEmailAddress(), + "", + "") + ), + eq(true) + ); + } + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth-handle-tos-acceptance-in-idp") + void execute_doNotThrowUnacceptedTermsError_unacceptedTermsInUserDTOAndAllClaimsInProvider_handleTosAcceptanceInIdpFeatureFlagEnabled() throws AuthorizationException { + testUserDTO.setTermsAccepted(false); + testUserDTO.setEmailAddress(null); + testUserDTO.setUsername(null); + testUserDTO.setFirstName(null); + testUserDTO.setLastName(null); + + when(authServiceStub.verifyOIDCBearerTokenAndGetOAuth2UserRecord(TEST_BEARER_TOKEN)).thenReturn(oAuth2UserRecordStub); + + when(oAuth2UserRecordStub.getUsername()).thenReturn(TEST_USERNAME); + when(oAuth2UserRecordStub.getDisplayInfo()).thenReturn(TEST_VALID_DISPLAY_INFO); + + assertDoesNotThrow(() -> sut.execute(contextStub)); + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index 9850e9d80e9..2121aa4d9f9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -253,7 +253,7 @@ public void testGetDisplayName() { */ @Test public void testIsXMLFormat() { - assertEquals(false, schemaDotOrgExporter instanceof XMLExporter); + assertFalse(schemaDotOrgExporter instanceof XMLExporter); } /** @@ -261,7 +261,7 @@ public void testIsXMLFormat() { */ @Test public void testIsHarvestable() { - assertEquals(false, schemaDotOrgExporter.isHarvestable()); + assertFalse(schemaDotOrgExporter.isHarvestable()); } /** @@ -269,7 +269,7 @@ public void testIsHarvestable() { */ @Test public void testIsAvailableToUsers() { - assertEquals(true, schemaDotOrgExporter.isAvailableToUsers()); + assertTrue(schemaDotOrgExporter.isAvailableToUsers()); } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java index 41e6be61bb8..f594de4757d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java @@ -64,6 +64,23 @@ public void testJson2DdiNoFiles() throws Exception { XmlAssert.assertThat(result).and(datasetAsDdi).ignoreWhitespace().areSimilar(); } + @Test + public void testJson2DdiNoFilesTermsOfUse() throws Exception { + // given + Path datasetVersionJson = Path.of("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.json"); + String datasetVersionAsJson = Files.readString(datasetVersionJson, StandardCharsets.UTF_8); + Path ddiFile = Path.of("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.xml"); + String datasetAsDdi = XmlPrinter.prettyPrintXml(Files.readString(ddiFile, StandardCharsets.UTF_8)); + logger.fine(datasetAsDdi); + + // when + String result = DdiExportUtil.datasetDtoAsJson2ddi(datasetVersionAsJson); + logger.fine(result); + + // then + XmlAssert.assertThat(result).and(datasetAsDdi).ignoreWhitespace().areSimilar(); + } + @Test public void testExportDDI() throws Exception { // given diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.json new file mode 100644 index 00000000000..b3d6caff2e9 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.json @@ -0,0 +1,404 @@ +{ + "id": 11, + "identifier": "PCA2E3", + "persistentUrl": "https://doi.org/10.5072/FK2/PCA2E3", + "protocol": "doi", + "authority": "10.5072/FK2", + "metadataLanguage": "en", + "datasetVersion": { + "id": 2, + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "RELEASED", + "productionDate": "Production Date", + "lastUpdateTime": "2015-09-24T17:07:57Z", + "releaseTime": "2015-09-24T17:07:57Z", + "createTime": "2015-09-24T16:47:51Z", + "termsOfUse":"This dataset is made available without information on how it can be used. You should communicate with the Contact(s) specified before use.", + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "name":"citation", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Darwin's Finches" + }, + { + "typeName": "alternativeTitle", + "multiple": true, + "typeClass": "primitive", + "value": ["Darwin's Finches Alternative Title1", "Darwin's Finches Alternative Title2"] + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Finch, Fiona" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Birds Inc." + } + } + ] + }, + { + "typeName": "timePeriodCovered", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "timePeriodStart": { + "typeName": "timePeriodCoveredStart", + "multiple": false, + "typeClass": "primitive", + "value": "20020816" + }, + "timePeriodEnd": { + "typeName": "timePeriodCoveredEnd", + "multiple": false, + "typeClass": "primitive", + "value": "20160630" + } + } + ] + }, + { + "typeName": "dateOfCollection", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "timePeriodStart": { + "typeName": "dateOfCollectionStart", + "multiple": false, + "typeClass": "primitive", + "value": "20070831" + }, + "timePeriodEnd": { + "typeName": "dateOfCollectionEnd", + "multiple": false, + "typeClass": "primitive", + "value": "20130630" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "finch@mailinator.com" + }, + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Jimmy Finch" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Finch Academy" + } + } + ] + }, + { + "typeName": "producer", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "producerAbbreviation": { + "typeName": "producerAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "ProdAbb" + }, + "producerName": { + "typeName": "producerName", + "multiple": false, + "typeClass": "primitive", + "value": "Johnny Hawk" + }, + "producerAffiliation": { + "typeName": "producerAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Hawk Institute" + }, + "producerURL": { + "typeName": "producerURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://www.hawk.edu/url" + }, + "producerLogoURL": { + "typeName": "producerLogoURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://www.hawk.edu/logo" + } + } + ] + }, + { + "typeName": "distributor", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "distributorAbbreviation": { + "typeName": "distributorAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "Dist-Abb" + }, + "producerName": { + "typeName": "distributorName", + "multiple": false, + "typeClass": "primitive", + "value": "Odin Raven" + }, + "distributorAffiliation": { + "typeName": "distributorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Valhalla Polytechnic" + }, + "distributorURL": { + "typeName": "distributorURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://www.valhalla.edu/url" + }, + "distributorLogoURL": { + "typeName": "distributorLogoURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://www.valhalla.edu/logo" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Medicine, Health and Life Sciences" + ] + }, + { + "typeName": "keyword", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "keywordValue": { + "typeName": "keywordValue", + "multiple": false, + "typeClass": "primitive", + "value": "Keyword Value 1" + }, + "keywordTermURI": { + "typeName": "keywordTermURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://keywordTermURI1.org" + }, + "keywordVocabulary": { + "typeName": "keywordVocabulary", + "multiple": false, + "typeClass": "primitive", + "value": "Keyword Vocabulary" + }, + "keywordVocabularyURI": { + "typeName": "keywordVocabularyURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://www.keyword.com/one" + } + }, + { + "keywordValue": { + "typeName": "keywordValue", + "multiple": false, + "typeClass": "primitive", + "value": "Keyword Value Two" + }, + "keywordTermURI": { + "typeName": "keywordTermURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://keywordTermURI1.org" + }, + "keywordVocabulary": { + "typeName": "keywordVocabulary", + "multiple": false, + "typeClass": "primitive", + "value": "Keyword Vocabulary" + }, + "keywordVocabularyURI": { + "typeName": "keywordVocabularyURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://www.keyword.com/one" + } + } + ] + }, + { + "typeName": "topicClassification", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "topicClassValue": { + "typeName": "topicClassValue", + "multiple": false, + "typeClass": "primitive", + "value": "TC Value 1" + }, + "topicClassVocab": { + "typeName": "topicClassVocab", + "multiple": false, + "typeClass": "primitive", + "value": "TC Vocabulary" + }, + "topicClassVocabURI": { + "typeName": "topicClassVocabURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://www.topicClass.com/one" + } + } + ] + }, + { + "typeName": "kindOfData", + "multiple": true, + "typeClass": "primitive", + "value": [ + "Kind of Data" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Added, Depositor" + } + ] + }, + "geospatial": { + "displayName": "Geospatial", + "name":"geospatial", + "fields": [ + { + "typeName": "geographicCoverage", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "country": { + "typeName": "country", + "multiple": false, + "typeClass": "primitive", + "value": "USA" + }, + "state": { + "typeName": "state", + "multiple": false, + "typeClass": "primitive", + "value": "MA" + }, + "city": { + "typeName": "city", + "multiple": false, + "typeClass": "primitive", + "value": "Cambridge" + }, + "otherGeographicCoverage": { + "typeName": "otherGeographicCoverage", + "multiple": false, + "typeClass": "primitive", + "value": "Other Geographic Coverage" + } + } + ] + }, + { + "typeName": "geographicBoundingBox", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "westLongitude": { + "typeName": "westLongitude", + "multiple": false, + "typeClass": "primitive", + "value": "60.3" + }, + "eastLongitude": { + "typeName": "eastLongitude", + "multiple": false, + "typeClass": "primitive", + "value": "59.8" + }, + "southLatitude": { + "typeName": "southLatitude", + "multiple": false, + "typeClass": "primitive", + "value": "41.6" + }, + "northLatitude": { + "typeName": "northLatitude", + "multiple": false, + "typeClass": "primitive", + "value": "43.8" + } + } + ] + } + ] + } + }, + "files": [], + "citation": "Finch, Fiona, 2015, \"Darwin's Finches\", https://doi.org/10.5072/FK2/PCA2E3, Root Dataverse, V1" + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.xml b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.xml new file mode 100644 index 00000000000..d813d155a90 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch-terms-of-use.xml @@ -0,0 +1,78 @@ + + + + + + Darwin's Finches + doi:10.5072/FK2/PCA2E3 + + + + 1 + + Finch, Fiona, 2015, "Darwin's Finches", https://doi.org/10.5072/FK2/PCA2E3, Root Dataverse, V1 + + + + + + Darwin's Finches + Darwin's Finches Alternative Title1 + Darwin's Finches Alternative Title2 + doi:10.5072/FK2/PCA2E3 + + + Finch, Fiona + + + Johnny Hawk + + + Odin Raven + Jimmy Finch + Added, Depositor + + + + + + Medicine, Health and Life Sciences + Keyword Value 1 + Keyword Value Two + TC Value 1 + + Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds. + + 20020816 + 20160630 + 20070831 + 20130630 + USA + Cambridge + MA + Other Geographic Coverage + + 60.3 + 59.8 + 41.6 + 43.8 + + Kind of Data + + + + + + + + + + + + + This dataset is made available without information on how it can be used. You should communicate with the Contact(s) specified before use. + + + + + diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml index 6730c44603a..010a5db4f2b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml @@ -69,6 +69,7 @@ + <a href="http://creativecommons.org/publicdomain/zero/1.0">CC0 1.0</a> diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml b/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml index 507d752192d..e865dc0ffe4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml @@ -161,6 +161,7 @@ Disclaimer Terms of Access + <a href="http://creativecommons.org/publicdomain/zero/1.0">CC0 1.0</a> RelatedMaterial1 diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java index 4dfedf5aa17..955070a662a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java @@ -112,8 +112,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -128,8 +128,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); } @Test @@ -218,8 +218,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -234,8 +234,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); } @Test @@ -347,9 +347,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); - assertEquals(false, file3NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); + assertFalse(file3NameAltered); // add duplicate file in root datasetVersion.getFileMetadatas().add(fmd3); @@ -371,9 +371,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); - assertEquals(true, file3NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); + assertTrue(file3NameAltered); } @Test @@ -457,7 +457,7 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception { } // check filename is altered since tabular and will change to .tab after ingest - assertEquals(true, file2NameAltered); + assertTrue(file2NameAltered); } @@ -553,8 +553,8 @@ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception { } // check filenames are unique and unaltered - assertEquals(true, file1NameAltered); - assertEquals(false, file2NameAltered); + assertTrue(file1NameAltered); + assertFalse(file2NameAltered); } @Test @@ -657,7 +657,7 @@ public void testRecalculateDatasetVersionUNF() { DataTable dataTable = new DataTable(); dataTable.setUnf("unfOnDataTable"); datafile1.setDataTable(dataTable); - assertEquals(true, datafile1.isTabularData()); + assertTrue(datafile1.isTabularData()); FileMetadata fmd1 = new FileMetadata(); fmd1.setId(1L); @@ -692,7 +692,7 @@ public void testGetUnfValuesOfFiles() { @Test public void testshouldHaveUnf() { - assertEquals(false, IngestUtil.shouldHaveUnf(null)); + assertFalse(IngestUtil.shouldHaveUnf(null)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java index a0ac22f99f3..a2729ce7514 100644 --- a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java @@ -2,6 +2,8 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.Arrays; @@ -171,13 +173,13 @@ public void testBasics() { pager1 = new Pager(102, 10, 1); msgt("Test: 102 results, 10 per page, page 1"); - assertEquals(true, pager1.isPagerNecessary()); + assertTrue(pager1.isPagerNecessary()); assertEquals(102, pager1.getNumResults()); assertEquals(1, pager1.getPreviousPageNumber()); assertEquals(2, pager1.getNextPageNumber()); - assertEquals(false, pager1.hasPreviousPageNumber()); - assertEquals(true, pager1.hasNextPageNumber()); + assertFalse(pager1.hasPreviousPageNumber()); + assertTrue(pager1.hasNextPageNumber()); msg("page list: " + Arrays.toString(pager1.getPageNumberList())); //assertEquals(new int[]{1, 2, 3, 4, 5}, pager1.getPageNumberList()); @@ -232,13 +234,13 @@ public void testNoResults() { System.out.println("getNumResults"); Pager pager1 = new Pager(0, 10, 1); - assertEquals(false, pager1.isPagerNecessary()); + assertFalse(pager1.isPagerNecessary()); assertEquals(0, pager1.getNumResults()); assertEquals(0, pager1.getPreviousPageNumber()); assertEquals(0, pager1.getNextPageNumber()); - assertEquals(false, pager1.hasPreviousPageNumber()); - assertEquals(false, pager1.hasNextPageNumber()); + assertFalse(pager1.hasPreviousPageNumber()); + assertFalse(pager1.hasNextPageNumber()); msgt("page list: " + Arrays.toString(pager1.getPageNumberList())); //assertEquals(null, pager1.getPageNumberList()); diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java index da94b288bee..f06be37578d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java @@ -277,7 +277,7 @@ public void testGetPrivateUrlFromRoleAssignmentSuccess() { PrivateUrl privateUrl = PrivateUrlUtil.getPrivateUrlFromRoleAssignment(ra, dataverseSiteUrl); assertNotNull(privateUrl); assertEquals(new Long(42), privateUrl.getDataset().getId()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index 124ce19369c..8e24c546556 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -53,6 +53,7 @@ public void setUp() { indexService.dataverseService = Mockito.mock(DataverseServiceBean.class); indexService.datasetFieldService = Mockito.mock(DatasetFieldServiceBean.class); indexService.datasetVersionService = Mockito.mock(DatasetVersionServiceBean.class); + indexService.datasetVersionFilesServiceBean = Mockito.mock(DatasetVersionFilesServiceBean.class); BrandingUtil.injectServices(indexService.dataverseService, indexService.settingsService); Mockito.when(indexService.dataverseService.findRootDataverse()).thenReturn(dataverse); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 11da71e1980..7ec8e0b25f3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -133,7 +133,7 @@ public void testJson_PrivateUrl() { assertNotNull(job); JsonObject jsonObject = job.build(); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token")); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken")); assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getJsonObject("roleAssignment").getString("assignee")); } @@ -290,7 +290,7 @@ public void testDataversePrinter() { assertEquals("42 Inc.", jsonObject.getString("affiliation")); assertEquals(0, jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getInt("displayOrder")); assertEquals("dv42@mailinator.com", jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getString("contactEmail")); - assertEquals(false, jsonObject.getBoolean("permissionRoot")); + assertFalse(jsonObject.getBoolean("permissionRoot")); assertEquals("Description for Dataverse 42.", jsonObject.getString("description")); assertEquals("UNCATEGORIZED", jsonObject.getString("dataverseType")); } diff --git a/src/test/resources/json/export-formats.json b/src/test/resources/json/export-formats.json new file mode 100644 index 00000000000..b4dc0168629 --- /dev/null +++ b/src/test/resources/json/export-formats.json @@ -0,0 +1,83 @@ +{ + "status": "OK", + "data": { + "OAI_ORE": { + "displayName": "OAI_ORE", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "Datacite": { + "displayName": "DataCite", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.5/metadata.xsd", + "XMLSchemaVersion": "4.5" + }, + "oai_dc": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "http://www.openarchives.org/OAI/2.0/oai_dc/", + "XMLSchemaLocation": "http://www.openarchives.org/OAI/2.0/oai_dc.xsd", + "XMLSchemaVersion": "2.0" + }, + "oai_datacite": { + "displayName": "OpenAIRE", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://schema.datacite.org/meta/kernel-4.1/metadata.xsd", + "XMLSchemaVersion": "4.1" + }, + "schema.org": { + "displayName": "Schema.org JSON-LD", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "ddi": { + "displayName": "DDI Codebook v2", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + }, + "dcterms": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://purl.org/dc/terms/", + "XMLSchemaLocation": "http://dublincore.org/schemas/xmls/qdc/dcterms.xsd", + "XMLSchemaVersion": "2.0" + }, + "html": { + "displayName": "DDI HTML Codebook", + "mediaType": "text/html", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "dataverse_json": { + "displayName": "JSON", + "mediaType": "application/json", + "isHarvestable": true, + "isVisibleInUserInterface": true + }, + "oai_ddi": { + "displayName": "DDI Codebook v2", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + } + } +} diff --git a/src/test/resources/json/importGenericWithOtherId.json b/src/test/resources/json/importGenericWithOtherId.json new file mode 100644 index 00000000000..af9241393e9 --- /dev/null +++ b/src/test/resources/json/importGenericWithOtherId.json @@ -0,0 +1,307 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "otherId", + "value": [ + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "my agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId2" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "doi:10.7910/DVN/TJCLKP" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } diff --git a/src/test/resources/json/importGenericWithoutOtherId.json b/src/test/resources/json/importGenericWithoutOtherId.json new file mode 100644 index 00000000000..ceb2263c2cf --- /dev/null +++ b/src/test/resources/json/importGenericWithoutOtherId.json @@ -0,0 +1,258 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } diff --git a/src/test/resources/tsv/whitespace-test.tsv b/src/test/resources/tsv/whitespace-test.tsv new file mode 100644 index 00000000000..5485c948825 --- /dev/null +++ b/src/test/resources/tsv/whitespace-test.tsv @@ -0,0 +1,10 @@ +#metadataBlock name dataverseAlias displayName + whitespaceDemo Whitespace Demo +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + whitespaceDemoOne One Trailing Space text 0 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo + whitespaceDemoTwo Two Leading Space text 1 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo + whitespaceDemoThree Three CV with errors text 2 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo +#controlledVocabulary DatasetField Value identifier displayOrder + whitespaceDemoThree CV1 0 + whitespaceDemoThree CV2 1 + whitespaceDemoThree CV3 2