diff --git a/.github/workflows/maven_cache_management.yml b/.github/workflows/maven_cache_management.yml new file mode 100644 index 00000000000..fedf63b7c54 --- /dev/null +++ b/.github/workflows/maven_cache_management.yml @@ -0,0 +1,101 @@ +name: Maven Cache Management + +on: + # Every push to develop should trigger cache rejuvenation (dependencies might have changed) + push: + branches: + - develop + # According to https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy + # all caches are deleted after 7 days of no access. Make sure we rejuvenate every 7 days to keep it available. + schedule: + - cron: '23 2 * * 0' # Run for 'develop' every Sunday at 02:23 UTC (3:23 CET, 21:23 ET) + # Enable manual cache management + workflow_dispatch: + # Delete branch caches once a PR is merged + pull_request: + types: + - closed + +env: + COMMON_CACHE_KEY: "dataverse-maven-cache" + COMMON_CACHE_PATH: "~/.m2/repository" + +jobs: + seed: + name: Drop and Re-Seed Local Repository + runs-on: ubuntu-latest + if: ${{ github.event_name != 'pull_request' }} + permissions: + # Write permission needed to delete caches + # See also: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + actions: write + contents: read + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Determine Java version from Parent POM + run: echo "JAVA_VERSION=$(grep '' modules/dataverse-parent/pom.xml | cut -f2 -d'>' | cut -f1 -d'<')" >> ${GITHUB_ENV} + - name: Set up JDK ${{ env.JAVA_VERSION }} + uses: actions/setup-java@v4 + with: + java-version: ${{ env.JAVA_VERSION }} + distribution: temurin + - name: Seed common cache + run: | + mvn -B -f modules/dataverse-parent dependency:go-offline dependency:resolve-plugins + # This non-obvious order is due to the fact that the download via Maven above will take a very long time (7-8 min). + # Jobs should not be left without a cache. Deleting and saving in one go leaves only a small chance for a cache miss. + - name: Drop common cache + run: | + gh extension install actions/gh-actions-cache + echo "🛒 Fetching list of cache keys" + cacheKeys=$(gh actions-cache list -R ${{ github.repository }} -B develop | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "🗑️ Deleting caches..." + for cacheKey in $cacheKeys + do + gh actions-cache delete $cacheKey -R ${{ github.repository }} -B develop --confirm + done + echo "✅ Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Save the common cache + uses: actions/cache@v4 + with: + path: ${{ env.COMMON_CACHE_PATH }} + key: ${{ env.COMMON_CACHE_KEY }} + enableCrossOsArchive: true + + # Let's delete feature branch caches once their PR is merged - we only have 10 GB of space before eviction kicks in + deplete: + name: Deplete feature branch caches + runs-on: ubuntu-latest + if: ${{ github.event_name == 'pull_request' }} + permissions: + # `actions:write` permission is required to delete caches + # See also: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + actions: write + contents: read + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Cleanup caches + run: | + gh extension install actions/gh-actions-cache + + BRANCH=refs/pull/${{ github.event.pull_request.number }}/merge + echo "🛒 Fetching list of cache keys" + cacheKeysForPR=$(gh actions-cache list -R ${{ github.repository }} -B $BRANCH | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "🗑️ Deleting caches..." + for cacheKey in $cacheKeysForPR + do + gh actions-cache delete $cacheKey -R ${{ github.repository }} -B $BRANCH --confirm + done + echo "✅ Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index a9733538f7c..514f82116de 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,7 @@ oauth-credentials.md /src/main/webapp/oauth2/newAccount.html scripts/api/setup-all.sh* scripts/api/setup-all.*.log +src/main/resources/edu/harvard/iq/dataverse/openapi/ # ctags generated tag file tags diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1430ba951a6..4fa6e955b70 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,67 +1,7 @@ # Contributing to Dataverse -Thank you for your interest in contributing to Dataverse! We are open to contributions from everyone. You don't need permission to participate. Just jump in. If you have questions, please reach out using one or more of the channels described below. +Thank you for your interest in contributing to Dataverse! We are open to contributions from everyone. -We aren't just looking for developers. There are many ways to contribute to Dataverse. We welcome contributions of ideas, bug reports, usability research/feedback, documentation, code, and more! +Please see our [Contributor Guide][] for how you can help! -## Ideas/Feature Requests - -Your idea or feature request might already be captured in the Dataverse [issue tracker] on GitHub but if not, the best way to bring it to the community's attention is by posting on the [dataverse-community Google Group][] or bringing it up on a [Community Call][]. You're also welcome to make some noise in [chat.dataverse.org][] or cram your idea into 280 characters and mention [@dataverseorg][] on Twitter. To discuss your idea privately, please email it to support@dataverse.org - -There's a chance your idea is already on our roadmap, which is available at https://www.iq.harvard.edu/roadmap-dataverse-project - -[chat.dataverse.org]: http://chat.dataverse.org -[issue tracker]: https://github.com/IQSS/dataverse/issues -[@dataverseorg]: https://twitter.com/dataverseorg - -## Usability testing - -Please email us at support@dataverse.org if you are interested in participating in usability testing. - -## Bug Reports/Issues - -An issue is a bug (a feature is no longer behaving the way it should) or a feature (something new to Dataverse that helps users complete tasks). You can browse the Dataverse [issue tracker] on GitHub by open or closed issues or by milestones. - -Before submitting an issue, please search the existing issues by using the search bar at the top of the page. If there is an existing open issue that matches the issue you want to report, please add a comment to it. - -If there is no pre-existing issue or it has been closed, please click on the "New Issue" button, log in, and write in what the issue is (unless it is a security issue which should be reported privately to security@dataverse.org). - -If you do not receive a reply to your new issue or comment in a timely manner, please email support@dataverse.org with a link to the issue. - -### Writing an Issue - -For the subject of an issue, please start it by writing the feature or functionality it relates to, i.e. "Create Account:..." or "Dataset Page:...". In the body of the issue, please outline the issue you are reporting with as much detail as possible. In order for the Dataverse development team to best respond to the issue, we need as much information about the issue as you can provide. Include steps to reproduce bugs. Indicate which version you're using, which is shown at the bottom of the page. We love screenshots! - -### Issue Attachments - -You can attach certain files (images, screenshots, logs, etc.) by dragging and dropping, selecting them, or pasting from the clipboard. Files must be one of GitHub's [supported attachment formats] such as png, gif, jpg, txt, pdf, zip, etc. (Pro tip: A file ending in .log can be renamed to .txt so you can upload it.) If there's no easy way to attach your file, please include a URL that points to the file in question. - -[supported attachment formats]: https://help.github.com/articles/file-attachments-on-issues-and-pull-requests/ - -## Documentation - -The source for the documentation at http://guides.dataverse.org/en/latest/ is in the GitHub repo under the "[doc][]" folder. If you find a typo or inaccuracy or something to clarify, please send us a pull request! For more on the tools used to write docs, please see the [documentation][] section of the Developer Guide. - -[doc]: https://github.com/IQSS/dataverse/tree/develop/doc/sphinx-guides/source -[documentation]: http://guides.dataverse.org/en/latest/developers/documentation.html - -## Code/Pull Requests - -We love code contributions. Developers are not limited to the main Dataverse code in this git repo. You can help with API client libraries in your favorite language that are mentioned in the [API Guide][] or create a new library. You can help work on configuration management code that's mentioned in the [Installation Guide][]. The Installation Guide also covers a relatively new concept called "external tools" that allows developers to create their own tools that are available from within an installation of Dataverse. - -[API Guide]: http://guides.dataverse.org/en/latest/api -[Installation Guide]: http://guides.dataverse.org/en/latest/installation - -If you are interested in working on the main Dataverse code, great! Before you start coding, please reach out to us either on the [dataverse-community Google Group][], the [dataverse-dev Google Group][], [chat.dataverse.org][], or via support@dataverse.org to make sure the effort is well coordinated and we avoid merge conflicts. We maintain a list of [community contributors][] and [dev efforts][] the community is working on so please let us know if you'd like to be added or removed from either list. - -Please read http://guides.dataverse.org/en/latest/developers/version-control.html to understand how we use the "git flow" model of development and how we will encourage you to create a GitHub issue (if it doesn't exist already) to associate with your pull request. That page also includes tips on making a pull request. - -After making your pull request, your goal should be to help it advance through our kanban board at https://github.com/orgs/IQSS/projects/34 . If no one has moved your pull request to the code review column in a timely manner, please reach out. Note that once a pull request is created for an issue, we'll remove the issue from the board so that we only track one card (the pull request). - -Thanks for your contribution! - -[dataverse-community Google Group]: https://groups.google.com/group/dataverse-community -[Community Call]: https://dataverse.org/community-calls -[dataverse-dev Google Group]: https://groups.google.com/group/dataverse-dev -[community contributors]: https://docs.google.com/spreadsheets/d/1o9DD-MQ0WkrYaEFTD5rF_NtyL8aUISgURsAXSL7Budk/edit?usp=sharing -[dev efforts]: https://github.com/orgs/IQSS/projects/34/views/6 +[Contributor Guide]: https://guides.dataverse.org/en/latest/contributor/index.html diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index b0864a0c55f..00000000000 --- a/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -# See http://guides.dataverse.org/en/latest/developers/containers.html diff --git a/README.md b/README.md index 651d0352dec..77720453d5f 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Dataverse is an [open source][] software platform for sharing, finding, citing, We maintain a demo site at [demo.dataverse.org][] which you are welcome to use for testing and evaluating Dataverse. -To install Dataverse, please see our [Installation Guide][] which will prompt you to download our [latest release][]. +To install Dataverse, please see our [Installation Guide][] which will prompt you to download our [latest release][]. Docker users should consult the [Container Guide][]. To discuss Dataverse with the community, please join our [mailing list][], participate in a [community call][], chat with us at [chat.dataverse.org][], or attend our annual [Dataverse Community Meeting][]. @@ -28,6 +28,7 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi [Dataverse community]: https://dataverse.org/developers [Installation Guide]: https://guides.dataverse.org/en/latest/installation/index.html [latest release]: https://github.com/IQSS/dataverse/releases +[Container Guide]: https://guides.dataverse.org/en/latest/container/index.html [features]: https://dataverse.org/software-features [project board]: https://github.com/orgs/IQSS/projects/34 [roadmap]: https://www.iq.harvard.edu/roadmap-dataverse-project diff --git a/conf/solr/9.3.0/schema.xml b/conf/solr/schema.xml similarity index 99% rename from conf/solr/9.3.0/schema.xml rename to conf/solr/schema.xml index 90e9287d659..5dde750573d 100644 --- a/conf/solr/9.3.0/schema.xml +++ b/conf/solr/schema.xml @@ -157,7 +157,8 @@ - + + @@ -325,6 +326,7 @@ + @@ -564,6 +566,7 @@ + diff --git a/conf/solr/9.3.0/solrconfig.xml b/conf/solr/solrconfig.xml similarity index 99% rename from conf/solr/9.3.0/solrconfig.xml rename to conf/solr/solrconfig.xml index 36ed4f23390..34386375fe1 100644 --- a/conf/solr/9.3.0/solrconfig.xml +++ b/conf/solr/solrconfig.xml @@ -290,7 +290,7 @@ have some sort of hard autoCommit to limit the log size. --> - ${solr.autoCommit.maxTime:15000} + ${solr.autoCommit.maxTime:30000} false @@ -301,7 +301,7 @@ --> - ${solr.autoSoftCommit.maxTime:-1} + ${solr.autoSoftCommit.maxTime:1000} - conf/solr/9.3.0 + conf/solr solr diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 612902b47a4..62efbf62317 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,7 +131,7 @@ - 6.2 + 6.3 17 UTF-8 @@ -148,14 +148,13 @@ -Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region} - 6.2023.8 + 6.2024.6 42.7.2 - 9.3.0 - 1.12.290 + 9.4.1 + 1.12.748 26.30.0 - 8.0.0 1.7.35 2.15.1 1.2 @@ -386,18 +385,6 @@ false - - - payara-patched-externals - Payara Patched Externals - https://raw.github.com/payara/Payara_PatchedProjects/master - - true - - - false - - central-repo Central Repository diff --git a/pom.xml b/pom.xml index 8f9d06b8744..76a8f61444f 100644 --- a/pom.xml +++ b/pom.xml @@ -32,6 +32,12 @@ 5.2.1 2.4.1 5.5.3 + + Dataverse API + ${project.version} + Open source research data repository software. + + ${project.build.outputDirectory}/META-INF - ${payara.version} @@ -311,7 +319,7 @@ org.apache.solr solr-solrj - 9.3.0 + 9.4.1 colt @@ -542,6 +550,11 @@ opennlp-tools 1.9.1 + + org.xmlunit + xmlunit-core + 2.9.1 + com.google.cloud google-cloud-storage @@ -560,6 +573,12 @@ java-json-canonicalization 1.1 + + + io.gdcc + sitemapgen4j + 2.1.2 + edu.ucar cdm-core @@ -924,6 +943,30 @@ true + + io.smallrye + smallrye-open-api-maven-plugin + 3.10.0 + + + + generate-schema + + + process-classes + + ${openapi.outputDirectory} + openapi + ${openapi.infoTitle} + ${openapi.infoVersion} + ${openapi.infoDescription} + CLASS_METHOD + edu.harvard.iq.dataverse + true + + + + @@ -1081,4 +1124,4 @@ - + \ No newline at end of file diff --git a/scripts/api/data/dataset-create-new-all-default-fields.json b/scripts/api/data/dataset-create-new-all-default-fields.json index 3bcf134bc76..cc856c6372f 100644 --- a/scripts/api/data/dataset-create-new-all-default-fields.json +++ b/scripts/api/data/dataset-create-new-all-default-fields.json @@ -231,6 +231,12 @@ "typeClass": "primitive", "value": "KeywordTerm1" }, + "keywordTermURI": { + "typeName": "keywordTermURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://keywordTermURI1.org" + }, "keywordVocabulary": { "typeName": "keywordVocabulary", "multiple": false, @@ -251,6 +257,12 @@ "typeClass": "primitive", "value": "KeywordTerm2" }, + "keywordTermURI": { + "typeName": "keywordTermURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://keywordTermURI2.org" + }, "keywordVocabulary": { "typeName": "keywordVocabulary", "multiple": false, diff --git a/scripts/api/data/licenses/licenseApache-2.0.json b/scripts/api/data/licenses/licenseApache-2.0.json new file mode 100644 index 00000000000..5b7c3cf5c95 --- /dev/null +++ b/scripts/api/data/licenses/licenseApache-2.0.json @@ -0,0 +1,8 @@ +{ + "name": "Apache-2.0", + "uri": "http://www.apache.org/licenses/LICENSE-2.0", + "shortDescription": "Apache License 2.0", + "active": true, + "sortOrder": 9 + } + \ No newline at end of file diff --git a/scripts/api/data/licenses/licenseMIT.json b/scripts/api/data/licenses/licenseMIT.json new file mode 100644 index 00000000000..a879e8a5595 --- /dev/null +++ b/scripts/api/data/licenses/licenseMIT.json @@ -0,0 +1,7 @@ +{ + "name": "MIT", + "uri": "https://opensource.org/licenses/MIT", + "shortDescription": "MIT License", + "active": true, + "sortOrder": 8 +} diff --git a/scripts/api/data/metadatablocks/biomedical.tsv b/scripts/api/data/metadatablocks/biomedical.tsv index d70f754336a..06f1ebec1b4 100644 --- a/scripts/api/data/metadatablocks/biomedical.tsv +++ b/scripts/api/data/metadatablocks/biomedical.tsv @@ -45,6 +45,7 @@ studyFactorType Treatment Compound EFO_0000369 17 studyFactorType Treatment Type EFO_0000727 18 studyFactorType Other OTHER_FACTOR 19 + studyAssayMeasurementType cell counting ERO_0001899 0 studyAssayMeasurementType cell sorting CHMO_0001085 1 studyAssayMeasurementType clinical chemistry analysis OBI_0000520 2 studyAssayMeasurementType copy number variation profiling OBI_0000537 3 diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index c5af05927dc..18354f2b1f7 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -23,62 +23,63 @@ subject Subject The area of study relevant to the Dataset text 19 TRUE TRUE TRUE TRUE TRUE TRUE citation http://purl.org/dc/terms/subject keyword Keyword A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation keywordValue Term A key term that describes important aspects of the Dataset text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation - keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation - keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation - topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation - topicClassValue Term A topic or subject term text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation - topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation - topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation - publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy - publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation - publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme - publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier - publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution - notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation - language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language - producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation - producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation - producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerURL URL The URL of the producer's website https:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerLogoURL Logo URL The URL of the producer's logo https:// url 40
FALSE FALSE FALSE FALSE FALSE FALSE producer citation - productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation - productionPlace Production Location The location where the data and any related materials were produced or collected text 42 TRUE FALSE TRUE TRUE FALSE FALSE citation - contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor - contributorType Type Indicates the type of contribution made to the dataset text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation - contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation - grantNumber Funding Information Information about the Dataset's financial support none 46 : FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/sponsor - grantNumberAgency Agency The agency that provided financial support for the Dataset Organization XYZ text 47 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation - grantNumberValue Identifier The grant identifier or contract identifier of the agency that provided financial support for the Dataset text 48 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation - distributor Distributor The entity, such as a person or organization, designated to generate copies of the Dataset, including any editions or revisions none 49 FALSE FALSE TRUE FALSE FALSE FALSE citation - distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation - distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorURL URL The URL of the distributor's webpage https:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54
FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation - depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation - dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted - timePeriodCovered Time Period The time period that the data refer to. Also known as span. This is the time period covered by the data, not the dates of coding, collecting data, or making documents machine-readable none 58 ; FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/temporalCoverage - timePeriodCoveredStart Start Date The start date of the time period that the data refer to YYYY-MM-DD date 59 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation - timePeriodCoveredEnd End Date The end date of the time period that the data refer to YYYY-MM-DD date 60 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation - dateOfCollection Date of Collection The dates when the data were collected or generated none 61 ; FALSE FALSE TRUE FALSE FALSE FALSE citation - dateOfCollectionStart Start Date The date when the data collection started YYYY-MM-DD date 62 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation - dateOfCollectionEnd End Date The date when the data collection ended YYYY-MM-DD date 63 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation - kindOfData Data Type The type of data included in the files (e.g. survey data, clinical data, or machine-readable text) text 64 TRUE FALSE TRUE TRUE FALSE FALSE citation http://rdf-vocabulary.ddialliance.org/discovery#kindOfData - series Series Information about the dataset series to which the Dataset belong none 65 : FALSE FALSE TRUE FALSE FALSE FALSE citation - seriesName Name The name of the dataset series text 66 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE series citation - seriesInformation Information Can include 1) a history of the series and 2) a summary of features that apply to the series textbox 67 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE series citation - software Software Information about the software used to generate the Dataset none 68 , FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasGeneratedBy - softwareName Name The name of software used to generate the Dataset text 69 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation - softwareVersion Version The version of the software used to generate the Dataset, e.g. 4.11 text 70 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation - relatedMaterial Related Material Information, such as a persistent ID or citation, about the material related to the Dataset, such as appendices or sampling information available outside of the Dataset textbox 71 FALSE FALSE TRUE FALSE FALSE FALSE citation - relatedDatasets Related Dataset Information, such as a persistent ID or citation, about a related dataset, such as previous research on the Dataset's subject textbox 72 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/relation - otherReferences Other Reference Information, such as a persistent ID or citation, about another type of resource that provides background or supporting material to the Dataset text 73 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/references - dataSources Data Source Information, such as a persistent ID or citation, about sources of the Dataset (e.g. a book, article, serial, or machine-readable data file) textbox 74 FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasDerivedFrom - originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation - characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation - accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation + keywordTermURI Term URI A URI that points to the web presence of the Keyword Term https:// url 22 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 23 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 24 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 25 FALSE FALSE TRUE FALSE FALSE FALSE citation + topicClassValue Term A topic or subject term text 26 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation + topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 27 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 28 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 29 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy + publicationCitation Citation The full bibliographic citation for the related publication textbox 30 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation + publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 31 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme + publicationIDNumber Identifier The identifier for a related publication text 32 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier + publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 33 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE publication citation https://schema.org/distribution + notesText Notes Additional information about the Dataset textbox 34 FALSE FALSE FALSE FALSE TRUE FALSE citation + language Language A language that the Dataset's files is written in text 35 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language + producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 36 FALSE FALSE TRUE FALSE FALSE FALSE citation + producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 37 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation + producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 39 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL The URL of the producer's website https:// url 40 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL The URL of the producer's logo https:// url 41
FALSE FALSE FALSE FALSE FALSE FALSE producer citation + productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 42 TRUE FALSE FALSE TRUE FALSE FALSE citation + productionPlace Production Location The location where the data and any related materials were produced or collected text 43 TRUE FALSE TRUE TRUE FALSE FALSE citation + contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 44 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor + contributorType Type Indicates the type of contribution made to the dataset text 45 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation + contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 46 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation + grantNumber Funding Information Information about the Dataset's financial support none 47 : FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/sponsor + grantNumberAgency Agency The agency that provided financial support for the Dataset Organization XYZ text 48 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + grantNumberValue Identifier The grant identifier or contract identifier of the agency that provided financial support for the Dataset text 49 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + distributor Distributor The entity, such as a person or organization, designated to generate copies of the Dataset, including any editions or revisions none 50 FALSE FALSE TRUE FALSE FALSE FALSE citation + distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 51 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation + distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 53 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL The URL of the distributor's webpage https:// url 54 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 55
FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 56 TRUE FALSE FALSE TRUE FALSE FALSE citation + depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 57 FALSE FALSE FALSE FALSE FALSE FALSE citation + dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 58 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted + timePeriodCovered Time Period The time period that the data refer to. Also known as span. This is the time period covered by the data, not the dates of coding, collecting data, or making documents machine-readable none 59 ; FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/temporalCoverage + timePeriodCoveredStart Start Date The start date of the time period that the data refer to YYYY-MM-DD date 60 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + timePeriodCoveredEnd End Date The end date of the time period that the data refer to YYYY-MM-DD date 61 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + dateOfCollection Date of Collection The dates when the data were collected or generated none 62 ; FALSE FALSE TRUE FALSE FALSE FALSE citation + dateOfCollectionStart Start Date The date when the data collection started YYYY-MM-DD date 63 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + dateOfCollectionEnd End Date The date when the data collection ended YYYY-MM-DD date 64 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + kindOfData Data Type The type of data included in the files (e.g. survey data, clinical data, or machine-readable text) text 65 TRUE FALSE TRUE TRUE FALSE FALSE citation http://rdf-vocabulary.ddialliance.org/discovery#kindOfData + series Series Information about the dataset series to which the Dataset belong none 66 : FALSE FALSE TRUE FALSE FALSE FALSE citation + seriesName Name The name of the dataset series text 67 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE series citation + seriesInformation Information Can include 1) a history of the series and 2) a summary of features that apply to the series textbox 68 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE series citation + software Software Information about the software used to generate the Dataset none 69 , FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasGeneratedBy + softwareName Name The name of software used to generate the Dataset text 70 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation + softwareVersion Version The version of the software used to generate the Dataset, e.g. 4.11 text 71 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation + relatedMaterial Related Material Information, such as a persistent ID or citation, about the material related to the Dataset, such as appendices or sampling information available outside of the Dataset textbox 72 FALSE FALSE TRUE FALSE FALSE FALSE citation + relatedDatasets Related Dataset Information, such as a persistent ID or citation, about a related dataset, such as previous research on the Dataset's subject textbox 73 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/relation + otherReferences Other Reference Information, such as a persistent ID or citation, about another type of resource that provides background or supporting material to the Dataset text 74 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/references + dataSources Data Source Information, such as a persistent ID or citation, about sources of the Dataset (e.g. a book, article, serial, or machine-readable data file) textbox 75 FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasDerivedFrom + originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation + characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation + accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 78 FALSE FALSE FALSE FALSE FALSE FALSE citation #controlledVocabulary DatasetField Value identifier displayOrder subject Agricultural Sciences D01 0 subject Arts and Humanities D0 1 @@ -138,189 +139,189 @@ authorIdentifierScheme DAI 5 authorIdentifierScheme ResearcherID 6 authorIdentifierScheme ScopusID 7 - language Abkhaz 0 - language Afar 1 aar aa - language Afrikaans 2 afr af - language Akan 3 aka ak - language Albanian 4 sqi alb sq - language Amharic 5 amh am - language Arabic 6 ara ar - language Aragonese 7 arg an - language Armenian 8 hye arm hy - language Assamese 9 asm as - language Avaric 10 ava av - language Avestan 11 ave ae - language Aymara 12 aym ay - language Azerbaijani 13 aze az - language Bambara 14 bam bm - language Bashkir 15 bak ba - language Basque 16 eus baq eu - language Belarusian 17 bel be - language Bengali, Bangla 18 ben bn - language Bihari 19 bih bh - language Bislama 20 bis bi - language Bosnian 21 bos bs - language Breton 22 bre br - language Bulgarian 23 bul bg - language Burmese 24 mya bur my - language Catalan,Valencian 25 cat ca - language Chamorro 26 cha ch - language Chechen 27 che ce - language Chichewa, Chewa, Nyanja 28 nya ny - language Chinese 29 zho chi zh - language Chuvash 30 chv cv - language Cornish 31 cor kw - language Corsican 32 cos co - language Cree 33 cre cr - language Croatian 34 hrv src hr - language Czech 35 ces cze cs - language Danish 36 dan da - language Divehi, Dhivehi, Maldivian 37 div dv - language Dutch 38 nld dut nl - language Dzongkha 39 dzo dz - language English 40 eng en - language Esperanto 41 epo eo - language Estonian 42 est et - language Ewe 43 ewe ee - language Faroese 44 fao fo - language Fijian 45 fij fj - language Finnish 46 fin fi - language French 47 fra fre fr - language Fula, Fulah, Pulaar, Pular 48 ful ff - language Galician 49 glg gl - language Georgian 50 kat geo ka - language German 51 deu ger de - language Greek (modern) 52 gre ell el - language Guaraní 53 grn gn - language Gujarati 54 guj gu - language Haitian, Haitian Creole 55 hat ht - language Hausa 56 hau ha - language Hebrew (modern) 57 heb he - language Herero 58 her hz - language Hindi 59 hin hi - language Hiri Motu 60 hmo ho - language Hungarian 61 hun hu - language Interlingua 62 ina ia - language Indonesian 63 ind id - language Interlingue 64 ile ie - language Irish 65 gle ga - language Igbo 66 ibo ig - language Inupiaq 67 ipk ik - language Ido 68 ido io - language Icelandic 69 isl ice is - language Italian 70 ita it - language Inuktitut 71 iku iu - language Japanese 72 jpn ja - language Javanese 73 jav jv - language Kalaallisut, Greenlandic 74 kal kl - language Kannada 75 kan kn - language Kanuri 76 kau kr - language Kashmiri 77 kas ks - language Kazakh 78 kaz kk - language Khmer 79 khm km - language Kikuyu, Gikuyu 80 kik ki - language Kinyarwanda 81 kin rw - language Kyrgyz 82 - language Komi 83 kom kv - language Kongo 84 kon kg - language Korean 85 kor ko - language Kurdish 86 kur ku - language Kwanyama, Kuanyama 87 kua kj - language Latin 88 lat la - language Luxembourgish, Letzeburgesch 89 ltz lb - language Ganda 90 lug lg - language Limburgish, Limburgan, Limburger 91 lim li - language Lingala 92 lin ln - language Lao 93 lao lo - language Lithuanian 94 lit lt - language Luba-Katanga 95 lub lu - language Latvian 96 lav lv - language Manx 97 glv gv - language Macedonian 98 mkd mac mk - language Malagasy 99 mlg mg - language Malay 100 may msa ms - language Malayalam 101 mal ml - language Maltese 102 mlt mt - language Māori 103 mao mri mi - language Marathi (Marāṭhī) 104 mar mr - language Marshallese 105 mah mh - language Mixtepec Mixtec 106 mix - language Mongolian 107 mon mn - language Nauru 108 nau na - language Navajo, Navaho 109 nav nv - language Northern Ndebele 110 nde nd - language Nepali 111 nep ne - language Ndonga 112 ndo ng - language Norwegian Bokmål 113 nob nb - language Norwegian Nynorsk 114 nno nn - language Norwegian 115 nor no - language Nuosu 116 - language Southern Ndebele 117 nbl nr - language Occitan 118 oci oc - language Ojibwe, Ojibwa 119 oji oj - language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu - language Oromo 121 orm om - language Oriya 122 ori or - language Ossetian, Ossetic 123 oss os - language Panjabi, Punjabi 124 pan pa - language Pāli 125 pli pi - language Persian (Farsi) 126 per fas fa - language Polish 127 pol pl - language Pashto, Pushto 128 pus ps - language Portuguese 129 por pt - language Quechua 130 que qu - language Romansh 131 roh rm - language Kirundi 132 run rn - language Romanian 133 ron rum ro - language Russian 134 rus ru - language Sanskrit (Saṁskṛta) 135 san sa - language Sardinian 136 srd sc - language Sindhi 137 snd sd - language Northern Sami 138 sme se - language Samoan 139 smo sm - language Sango 140 sag sg - language Serbian 141 srp scc sr - language Scottish Gaelic, Gaelic 142 gla gd - language Shona 143 sna sn - language Sinhala, Sinhalese 144 sin si - language Slovak 145 slk slo sk - language Slovene 146 slv sl - language Somali 147 som so - language Southern Sotho 148 sot st - language Spanish, Castilian 149 spa es - language Sundanese 150 sun su - language Swahili 151 swa sw - language Swati 152 ssw ss - language Swedish 153 swe sv - language Tamil 154 tam ta - language Telugu 155 tel te - language Tajik 156 tgk tg - language Thai 157 tha th - language Tigrinya 158 tir ti - language Tibetan Standard, Tibetan, Central 159 tib bod bo - language Turkmen 160 tuk tk - language Tagalog 161 tgl tl - language Tswana 162 tsn tn - language Tonga (Tonga Islands) 163 ton to - language Turkish 164 tur tr - language Tsonga 165 tso ts - language Tatar 166 tat tt - language Twi 167 twi tw - language Tahitian 168 tah ty - language Uyghur, Uighur 169 uig ug - language Ukrainian 170 ukr uk - language Urdu 171 urd ur - language Uzbek 172 uzb uz - language Venda 173 ven ve - language Vietnamese 174 vie vi - language Volapük 175 vol vo - language Walloon 176 wln wa - language Welsh 177 cym wel cy - language Wolof 178 wol wo - language Western Frisian 179 fry fy - language Xhosa 180 xho xh - language Yiddish 181 yid yi - language Yoruba 182 yor yo - language Zhuang, Chuang 183 zha za - language Zulu 184 zul zu + language Abkhaz abk 0 abk ab + language Afar aar 1 aar aa + language Afrikaans afr 2 afr af + language Akan aka 3 aka ak + language Albanian sqi 4 sqi alb sq + language Amharic amh 5 amh am + language Arabic ara 6 ara ar + language Aragonese arg 7 arg an + language Armenian hye 8 hye arm hy + language Assamese asm 9 asm as + language Avaric ava 10 ava av + language Avestan ave 11 ave ae + language Aymara aym 12 aym ay + language Azerbaijani aze 13 aze az + language Bambara bam 14 bam bm + language Bashkir bak 15 bak ba + language Basque eus 16 eus baq eu + language Belarusian bel 17 bel be + language Bengali, Bangla ben 18 ben bn Bengali Bangla + language Bihari bih 19 bih bh + language Bislama bis 20 bis bi + language Bosnian bos 21 bos bs + language Breton bre 22 bre br + language Bulgarian bul 23 bul bg + language Burmese mya 24 mya bur my + language Catalan,Valencian cat 25 cat ca Catalan Valencian + language Chamorro cha 26 cha ch + language Chechen che 27 che ce + language Chichewa, Chewa, Nyanja nya 28 nya ny Chichewa Chewa Nyanja + language Chinese zho 29 zho chi zh + language Chuvash chv 30 chv cv + language Cornish cor 31 cor kw + language Corsican cos 32 cos co + language Cree cre 33 cre cr + language Croatian hrv 34 hrv src hr + language Czech ces 35 ces cze cs + language Danish dan 36 dan da + language Divehi, Dhivehi, Maldivian div 37 div dv Divehi Dhivehi Maldivian + language Dutch nld 38 nld dut nl + language Dzongkha dzo 39 dzo dz + language English eng 40 eng en + language Esperanto epo 41 epo eo + language Estonian est 42 est et + language Ewe ewe 43 ewe ee + language Faroese fao 44 fao fo + language Fijian fij 45 fij fj + language Finnish fin 46 fin fi + language French fra 47 fra fre fr + language Fula, Fulah, Pulaar, Pular ful 48 ful ff Fula Fulah Pulaar Pular + language Galician glg 49 glg gl + language Georgian kat 50 kat geo ka + language German deu 51 deu ger de + language Greek (modern) ell 52 ell gre el Greek + language Guaraní grn 53 grn gn + language Gujarati guj 54 guj gu + language Haitian, Haitian Creole hat 55 hat ht Haitian Haitian Creole + language Hausa hau 56 hau ha + language Hebrew (modern) heb 57 heb he + language Herero her 58 her hz + language Hindi hin 59 hin hi + language Hiri Motu hmo 60 hmo ho + language Hungarian hun 61 hun hu + language Interlingua ina 62 ina ia + language Indonesian ind 63 ind id + language Interlingue ile 64 ile ie + language Irish gle 65 gle ga + language Igbo ibo 66 ibo ig + language Inupiaq ipk 67 ipk ik + language Ido ido 68 ido io + language Icelandic isl 69 isl ice is + language Italian ita 70 ita it + language Inuktitut iku 71 iku iu + language Japanese jpn 72 jpn ja + language Javanese jav 73 jav jv + language Kalaallisut, Greenlandic kal 74 kal kl Kalaallisut Greenlandic + language Kannada kan 75 kan kn + language Kanuri kau 76 kau kr + language Kashmiri kas 77 kas ks + language Kazakh kaz 78 kaz kk + language Khmer khm 79 khm km + language Kikuyu, Gikuyu kik 80 kik ki Kikuyu Gikuyu + language Kinyarwanda kin 81 kin rw + language Kyrgyz kir 82 kir ky Kirghiz + language Komi kom 83 kom kv + language Kongo kon 84 kon kg + language Korean kor 85 kor ko + language Kurdish kur 86 kur ku + language Kwanyama, Kuanyama kua 87 kua kj Kwanyama Kuanyama + language Latin lat 88 lat la + language Luxembourgish, Letzeburgesch ltz 89 ltz lb Luxembourgish Letzeburgesch + language Ganda lug 90 lug lg + language Limburgish, Limburgan, Limburger lim 91 lim li Limburgish Limburgan Limburger + language Lingala lin 92 lin ln + language Lao lao 93 lao lo + language Lithuanian lit 94 lit lt + language Luba-Katanga lub 95 lub lu + language Latvian lav 96 lav lv + language Manx glv 97 glv gv + language Macedonian mkd 98 mkd mac mk + language Malagasy mlg 99 mlg mg + language Malay msa 100 msa may ms + language Malayalam mal 101 mal ml + language Maltese mlt 102 mlt mt + language Māori mri 103 mri mao mi Maori + language Marathi (Marāṭhī) mar 104 mar mr + language Marshallese mah 105 mah mh + language Mixtepec Mixtec mix 106 mix + language Mongolian mon 107 mon mn + language Nauru nau 108 nau na + language Navajo, Navaho nav 109 nav nv Navajo Navaho + language Northern Ndebele nde 110 nde nd + language Nepali nep 111 nep ne + language Ndonga ndo 112 ndo ng + language Norwegian Bokmål nob 113 nob nb + language Norwegian Nynorsk nno 114 nno nn + language Norwegian nor 115 nor no + language Nuosu iii 116 iii ii Sichuan Yi + language Southern Ndebele nbl 117 nbl nr + language Occitan oci 118 oci oc + language Ojibwe, Ojibwa oji 119 oji oj Ojibwe Ojibwa + language Old Church Slavonic,Church Slavonic,Old Bulgarian chu 120 chu cu + language Oromo orm 121 orm om + language Oriya ori 122 ori or + language Ossetian, Ossetic oss 123 oss os Ossetian Ossetic + language Panjabi, Punjabi pan 124 pan pa Panjabi Punjabi + language Pāli pli 125 pli pi + language Persian (Farsi) fas 126 fas per fa + language Polish pol 127 pol pl + language Pashto, Pushto pus 128 pus ps Pashto Pushto + language Portuguese por 129 por pt + language Quechua que 130 que qu + language Romansh roh 131 roh rm + language Kirundi run 132 run rn + language Romanian ron 133 ron rum ro + language Russian rus 134 rus ru + language Sanskrit (Saṁskṛta) san 135 san sa + language Sardinian srd 136 srd sc + language Sindhi snd 137 snd sd + language Northern Sami sme 138 sme se + language Samoan smo 139 smo sm + language Sango sag 140 sag sg + language Serbian srp 141 srp scc sr + language Scottish Gaelic, Gaelic gla 142 gla gd Scottish Gaelic Gaelic + language Shona sna 143 sna sn + language Sinhala, Sinhalese sin 144 sin si Sinhala Sinhalese + language Slovak slk 145 slk slo sk + language Slovene slv 146 slv sl Slovenian + language Somali som 147 som so + language Southern Sotho sot 148 sot st + language Spanish, Castilian spa 149 spa es Spanish Castilian + language Sundanese sun 150 sun su + language Swahili swa 151 swa sw + language Swati ssw 152 ssw ss + language Swedish swe 153 swe sv + language Tamil tam 154 tam ta + language Telugu tel 155 tel te + language Tajik tgk 156 tgk tg + language Thai tha 157 tha th + language Tigrinya tir 158 tir ti + language Tibetan Standard, Tibetan, Central bod 159 bod tib bo Tibetan Standard Tibetan Central + language Turkmen tuk 160 tuk tk + language Tagalog tgl 161 tgl tl + language Tswana tsn 162 tsn tn + language Tonga (Tonga Islands) ton 163 ton to Tonga + language Turkish tur 164 tur tr + language Tsonga tso 165 tso ts + language Tatar tat 166 tat tt + language Twi twi 167 twi tw + language Tahitian tah 168 tah ty + language Uyghur, Uighur uig 169 uig ug Uyghur Uighur + language Ukrainian ukr 170 ukr uk + language Urdu urd 171 urd ur + language Uzbek uzb 172 uzb uz + language Venda ven 173 ven ve + language Vietnamese vie 174 vie vi + language Volapük vol 175 vol vo + language Walloon wln 176 wln wa + language Welsh cym 177 cym wel cy + language Wolof wol 178 wol wo + language Western Frisian fry 179 fry fy + language Xhosa xho 180 xho xh + language Yiddish yid 181 yid yi + language Yoruba yor 182 yor yo + language Zhuang, Chuang zha 183 zha za Zhuang Chuang + language Zulu zul 184 zul zu language Not applicable 185 diff --git a/scripts/api/data/metadatablocks/computational_workflow.tsv b/scripts/api/data/metadatablocks/computational_workflow.tsv index 51b69cfdb80..3cd0c26a464 100644 --- a/scripts/api/data/metadatablocks/computational_workflow.tsv +++ b/scripts/api/data/metadatablocks/computational_workflow.tsv @@ -2,7 +2,7 @@ computationalworkflow Computational Workflow Metadata #datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI workflowType Computational Workflow Type The kind of Computational Workflow, which is designed to compose and execute a series of computational or data manipulation steps in a scientific application text 0 TRUE TRUE TRUE TRUE TRUE FALSE computationalworkflow - workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow + workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 #VALUE FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow workflowDocumentation Documentation A link (URL) to the documentation or text describing the Computational Workflow and its use textbox 2 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow #controlledVocabulary DatasetField Value identifier displayOrder workflowType Common Workflow Language (CWL) workflowtype_cwl 1 diff --git a/scripts/api/data/storageSites/add-storage-site.json b/scripts/api/data/storageSites/add-storage-site.json deleted file mode 100644 index d13ec2f165d..00000000000 --- a/scripts/api/data/storageSites/add-storage-site.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "hostname": "dataverse.librascholar.edu", - "name": "LibraScholar, USA", - "primaryStorage": true, - "transferProtocols": "rsync,posix,globus" -} diff --git a/scripts/api/setup-all.sh b/scripts/api/setup-all.sh index 5ddd9a35fdc..b7f962209e4 100755 --- a/scripts/api/setup-all.sh +++ b/scripts/api/setup-all.sh @@ -65,7 +65,7 @@ echo echo "Setting up the admin user (and as superuser)" adminResp=$(curl -s -H "Content-type:application/json" -X POST -d @"$SCRIPT_PATH"/data/user-admin.json "${DATAVERSE_URL}/api/builtin-users?password=$DV_SU_PASSWORD&key=burrito") echo "$adminResp" -curl -X POST "${DATAVERSE_URL}/api/admin/superuser/dataverseAdmin" +curl -X PUT "${DATAVERSE_URL}/api/admin/superuser/dataverseAdmin" -d "true" echo echo "Setting up the root dataverse" diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile index 399bc65168a..8ea95534986 100644 --- a/scripts/installer/Makefile +++ b/scripts/installer/Makefile @@ -55,13 +55,13 @@ ${JHOVE_SCHEMA}: ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} @echo copying jhove schema file /bin/cp ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} -${SOLR_SCHEMA}: ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR} +${SOLR_SCHEMA}: ../../conf/solr/schema.xml ../../conf/solr/update-fields.sh ${INSTALLER_ZIP_DIR} @echo copying Solr schema file - /bin/cp ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/schema.xml ../../conf/solr/update-fields.sh ${INSTALLER_ZIP_DIR} -${SOLR_CONFIG}: ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR} +${SOLR_CONFIG}: ../../conf/solr/solrconfig.xml ${INSTALLER_ZIP_DIR} @echo copying Solr config file - /bin/cp ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/solrconfig.xml ${INSTALLER_ZIP_DIR} ${PYTHON_FILES}: README_python.txt install.py installConfig.py installAppServer.py installUtils.py requirements.txt default.config interactive.config ${INSTALLER_ZIP_DIR} @echo copying Python installer files diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh index 34deddf51a3..e87122ba77c 100755 --- a/scripts/installer/as-setup.sh +++ b/scripts/installer/as-setup.sh @@ -111,15 +111,13 @@ function preliminary_setup() ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.fake.label=Fake DOI Provider" ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.fake.authority=10.5072" ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.fake.shoulder=FK2/" + ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.default-provider=fake" # jvm-options use colons as separators, escape as literal #DOI_DATACITERESTAPIURL_ESC=`echo $DOI_DATACITERESTAPIURL | sed -e 's/:/\\\:/'` #./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.testDC.datacite.rest-api-url=$DOI_DATACITERESTAPIURL_ESC" ./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true" - # Workaround for FISH-7722: Failed to deploy war with @Stateless https://github.com/payara/Payara/issues/6337 - ./asadmin $ASADMIN_OPTS create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED - # enable comet support ./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true" diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java index 214e26965fa..003d1057972 100644 --- a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java +++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java @@ -46,7 +46,7 @@ public void setBannerMessageTexts(Collection bannerMessageTex public String getDisplayValue(){ - String retVal = ""; + String retVal = null; for (BannerMessageText msgTxt : this.getBannerMessageTexts()) { if (msgTxt.getLang().equals(BundleUtil.getCurrentLocale().getLanguage())) { retVal = msgTxt.getMessage(); diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java index 0e757998d58..3961bd064db 100644 --- a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java @@ -46,8 +46,10 @@ public List findAllBannerMessages() { public void save( BannerMessage message ) { em.persist(message); + em.flush(); } + public void deleteBannerMessage(Object pk) { BannerMessage message = em.find(BannerMessage.class, pk); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 53cdff31cc2..29a4a14c021 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -242,6 +242,18 @@ public void setEmbargo(Embargo embargo) { this.embargo = embargo; } + @ManyToOne + @JoinColumn(name="retention_id") + private Retention retention; + + public Retention getRetention() { + return retention; + } + + public void setRetention(Retention retention) { + this.retention = retention; + } + public DataFile() { this.fileMetadatas = new ArrayList<>(); initFileReplaceAttributes(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 8ceb529a5d4..21f925f8981 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.DatasetVersion.VersionState; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; @@ -383,7 +384,8 @@ public FileMetadata findMostRecentVersionFileIsIn(DataFile file) { if (fileMetadatas == null || fileMetadatas.isEmpty()) { return null; } else { - return fileMetadatas.get(0); + // This assumes the order of filemetadatas is from first to most recent, which is true as of v6.3 + return fileMetadatas.get(fileMetadatas.size() - 1); } } @@ -759,6 +761,13 @@ public List findAll() { return em.createQuery("select object(o) from DataFile as o order by o.id", DataFile.class).getResultList(); } + public List findVersionStates(Long fileId) { + Query query = em.createQuery( + "select distinct dv.versionState from DatasetVersion dv where dv.id in (select fm.datasetVersion.id from FileMetadata fm where fm.dataFile.id=:fileId)"); + query.setParameter("fileId", fileId); + return query.getResultList(); + } + public DataFile save(DataFile dataFile) { if (dataFile.isMergeable()) { @@ -959,6 +968,7 @@ public boolean isThumbnailAvailable (DataFile file) { return true; } file.setPreviewImageFail(true); + file.setPreviewImageAvailable(false); this.save(file); return false; } @@ -1365,7 +1375,10 @@ public Embargo findEmbargo(Long id) { DataFile d = find(id); return d.getEmbargo(); } - + + public boolean isRetentionExpired(FileMetadata fm) { + return FileUtil.isRetentionExpired(fm); + } /** * Checks if the supplied DvObjectContainer (Dataset or Collection; although * only collection-level storage quotas are officially supported as of now) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java index 22bad42df96..d91aa101eb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java @@ -91,8 +91,9 @@ public class DatasetFieldConstant implements java.io.Serializable { public final static String datasetVersionValue="datasetVersionValue"; public final static String versionDate="versionDate"; public final static String keywordValue="keywordValue"; - public final static String keywordVocab="keywordVocabulary"; //SEK 6/10/2016 to match what is in the db - public final static String keywordVocabURI="keywordVocabularyURI"; //SEK 6/10/2016 to match what is in the db + public final static String keywordTermURI="keywordTermURI"; + public final static String keywordVocab="keywordVocabulary"; + public final static String keywordVocabURI="keywordVocabularyURI"; public final static String topicClassValue="topicClassValue"; public final static String topicClassVocab="topicClassVocab"; public final static String topicClassVocabURI="topicClassVocabURI"; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 6223cd83773..34595728fa7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -4,7 +4,9 @@ import java.io.StringReader; import java.net.URI; import java.net.URISyntaxException; +import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.security.InvalidParameterException; import java.sql.Timestamp; import java.text.MessageFormat; import java.time.Instant; @@ -19,8 +21,6 @@ import jakarta.ejb.EJB; import jakarta.ejb.Stateless; -import jakarta.ejb.TransactionAttribute; -import jakarta.ejb.TransactionAttributeType; import jakarta.inject.Named; import jakarta.json.Json; import jakarta.json.JsonArray; @@ -41,6 +41,7 @@ import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.httpclient.HttpException; +import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpResponse; import org.apache.http.HttpResponseInterceptor; import org.apache.http.client.methods.HttpGet; @@ -321,14 +322,15 @@ public Map getCVocConf(boolean byTermUriField){ + jo.getString("term-uri-field")); } } - if (jo.containsKey("child-fields")) { - JsonArray childFields = jo.getJsonArray("child-fields"); - for (JsonString elm : childFields.getValuesAs(JsonString.class)) { - dft = findByNameOpt(elm.getString()); - logger.info("Found: " + dft.getName()); + if (jo.containsKey("managed-fields")) { + JsonObject managedFields = jo.getJsonObject("managed-fields"); + for (String s : managedFields.keySet()) { + dft = findByNameOpt(managedFields.getString(s)); if (dft == null) { logger.warning("Ignoring External Vocabulary setting for non-existent child field: " - + elm.getString()); + + managedFields.getString(s)); + } else { + logger.fine("Found: " + dft.getName()); } } } @@ -345,12 +347,16 @@ public Map getCVocConf(boolean byTermUriField){ * @param df - the primitive/parent compound field containing a newly saved value */ public void registerExternalVocabValues(DatasetField df) { - DatasetFieldType dft =df.getDatasetFieldType(); + DatasetFieldType dft = df.getDatasetFieldType(); logger.fine("Registering for field: " + dft.getName()); JsonObject cvocEntry = getCVocConf(true).get(dft.getId()); if (dft.isPrimitive()) { + List siblingsDatasetFields = new ArrayList<>(); + if(dft.getParentDatasetFieldType()!=null) { + siblingsDatasetFields = df.getParentDatasetFieldCompoundValue().getChildDatasetFields(); + } for (DatasetFieldValue dfv : df.getDatasetFieldValues()) { - registerExternalTerm(cvocEntry, dfv.getValue()); + registerExternalTerm(cvocEntry, dfv.getValue(), siblingsDatasetFields); } } else { if (df.getDatasetFieldType().isCompound()) { @@ -359,45 +365,55 @@ public void registerExternalVocabValues(DatasetField df) { for (DatasetField cdf : cv.getChildDatasetFields()) { logger.fine("Found term uri field type id: " + cdf.getDatasetFieldType().getId()); if (cdf.getDatasetFieldType().equals(termdft)) { - registerExternalTerm(cvocEntry, cdf.getValue()); + registerExternalTerm(cvocEntry, cdf.getValue(), cv.getChildDatasetFields()); } } } } } } - + /** - * Retrieves indexable strings from a cached externalvocabularyvalue entry. - * - * This method assumes externalvocabularyvalue entries have been filtered and - * the externalvocabularyvalue entry contain a single JsonObject whose "personName" or "termName" values - * are either Strings or an array of objects with "lang" and ("value" or "content") keys. The - * string, or the "value/content"s for each language are added to the set. - * + * Retrieves indexable strings from a cached externalvocabularyvalue entry filtered through retrieval-filtering configuration. + *

+ * This method assumes externalvocabularyvalue entries have been filtered and that they contain a single JsonObject. + * Cases Handled : A String, an Array of Strings, an Array of Objects with "value" or "content" keys, an Object with one or more entries that have String values or Array values with a set of String values. + * The string(s), or the "value/content"s for each language are added to the set. + * Retrieved string values are indexed in the term-uri-field (parameter defined in CVOC configuration) by default, or in the field specified by an optional "indexIn" parameter in the retrieval-filtering defined in the CVOC configuration. + *

* Any parsing error results in no entries (there can be unfiltered entries with * unknown structure - getting some strings from such an entry could give fairly * random info that would be bad to addd for searches, etc.) - * - * @param termUri + * + * @param termUri unique identifier to search in database + * @param cvocEntry related cvoc configuration + * @param indexingField name of solr field that will be filled with getStringsFor while indexing * @return - a set of indexable strings */ - public Set getStringsFor(String termUri) { - Set strings = new HashSet(); + public Set getIndexableStringsByTermUri(String termUri, JsonObject cvocEntry, String indexingField) { + Set strings = new HashSet<>(); JsonObject jo = getExternalVocabularyValue(termUri); + JsonObject filtering = cvocEntry.getJsonObject("retrieval-filtering"); + String termUriField = cvocEntry.getJsonString("term-uri-field").getString(); if (jo != null) { try { for (String key : jo.keySet()) { - if (key.equals("termName") || key.equals("personName")) { + String indexIn = filtering.getJsonObject(key).getString("indexIn", null); + // Either we are in mapping mode so indexingField (solr field) equals indexIn (cvoc config) + // Or we are in default mode indexingField is termUriField, indexIn is not defined then only termName and personName keys are used + if (indexingField.equals(indexIn) || + (indexIn == null && termUriField.equals(indexingField) && (key.equals("termName")) || key.equals("personName"))) { JsonValue jv = jo.get(key); if (jv.getValueType().equals(JsonValue.ValueType.STRING)) { logger.fine("adding " + jo.getString(key) + " for " + termUri); strings.add(jo.getString(key)); - } else { - if (jv.getValueType().equals(JsonValue.ValueType.ARRAY)) { - JsonArray jarr = jv.asJsonArray(); - for (int i = 0; i < jarr.size(); i++) { + } else if (jv.getValueType().equals(JsonValue.ValueType.ARRAY)) { + JsonArray jarr = jv.asJsonArray(); + for (int i = 0; i < jarr.size(); i++) { + if (jarr.get(i).getValueType().equals(JsonValue.ValueType.STRING)) { + strings.add(jarr.getString(i)); + } else if (jarr.get(i).getValueType().equals(ValueType.OBJECT)) { // This condition handles SKOMOS format like [{"lang": "en","value": "non-apis bee"},{"lang": "fr","value": "abeille non apis"}] JsonObject entry = jarr.getJsonObject(i); if (entry.containsKey("value")) { logger.fine("adding " + entry.getString("value") + " for " + termUri); @@ -409,6 +425,22 @@ public Set getStringsFor(String termUri) { } } } + } else if (jv.getValueType().equals(JsonValue.ValueType.OBJECT)) { + JsonObject joo = jv.asJsonObject(); + for (Map.Entry entry : joo.entrySet()) { + if (entry.getValue().getValueType().equals(JsonValue.ValueType.STRING)) { // This condition handles format like { "fr": "association de quartier", "en": "neighborhood associations"} + logger.fine("adding " + joo.getString(entry.getKey()) + " for " + termUri); + strings.add(joo.getString(entry.getKey())); + } else if (entry.getValue().getValueType().equals(ValueType.ARRAY)) { // This condition handles format like {"en": ["neighbourhood societies"]} + JsonArray jarr = entry.getValue().asJsonArray(); + for (int i = 0; i < jarr.size(); i++) { + if (jarr.get(i).getValueType().equals(JsonValue.ValueType.STRING)) { + logger.fine("adding " + jarr.getString(i) + " for " + termUri); + strings.add(jarr.getString(i)); + } + } + } + } } } } @@ -420,7 +452,7 @@ public Set getStringsFor(String termUri) { } logger.fine("Returning " + String.join(",", strings) + " for " + termUri); return strings; - } + } /** * Perform a query to retrieve a cached value from the externalvocabularvalue table @@ -447,17 +479,20 @@ public JsonObject getExternalVocabularyValue(String termUri) { /** * Perform a call to the external service to retrieve information about the term URI - * @param cvocEntry - the configuration for the DatasetFieldType associated with this term - * @param term - the term uri as a string + * + * @param cvocEntry - the configuration for the DatasetFieldType associated with this term + * @param term - the term uri as a string + * @param relatedDatasetFields - siblings or childs of the term */ - @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void registerExternalTerm(JsonObject cvocEntry, String term) { + public void registerExternalTerm(JsonObject cvocEntry, String term, List relatedDatasetFields) { String retrievalUri = cvocEntry.getString("retrieval-uri"); + String termUriFieldName = cvocEntry.getString("term-uri-field"); String prefix = cvocEntry.getString("prefix", null); - if(term.isBlank()) { - logger.fine("Ingoring blank term"); + if(StringUtils.isBlank(term)) { + logger.fine("Ignoring blank term"); return; } + boolean isExternal = false; JsonObject vocabs = cvocEntry.getJsonObject("vocabs"); for (String key: vocabs.keySet()) { @@ -486,7 +521,22 @@ public void registerExternalTerm(JsonObject cvocEntry, String term) { } if (evv.getValue() == null) { String adjustedTerm = (prefix==null)? term: term.replace(prefix, ""); - retrievalUri = retrievalUri.replace("{0}", adjustedTerm); + + try { + retrievalUri = tryToReplaceRetrievalUriParam(retrievalUri, "0", adjustedTerm); + retrievalUri = tryToReplaceRetrievalUriParam(retrievalUri, termUriFieldName, adjustedTerm); + for (DatasetField f : relatedDatasetFields) { + retrievalUri = tryToReplaceRetrievalUriParam(retrievalUri, f.getDatasetFieldType().getName(), f.getValue()); + } + } catch (InvalidParameterException e) { + logger.warning("InvalidParameterException in tryReplaceRetrievalUriParam : " + e.getMessage()); + return; + } + if (retrievalUri.contains("{")) { + logger.severe("Retrieval URI still contains unreplaced parameter :" + retrievalUri); + return; + } + logger.fine("Didn't find " + term + ", calling " + retrievalUri); try (CloseableHttpClient httpClient = HttpClients.custom() .addInterceptorLast(new HttpResponseInterceptor() { @@ -505,14 +555,21 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep HttpGet httpGet = new HttpGet(retrievalUri); //application/json+ld is for backward compatibility httpGet.addHeader("Accept", "application/ld+json, application/json+ld, application/json"); - + //Adding others custom HTTP request headers if exists + final JsonObject headers = cvocEntry.getJsonObject("headers"); + if (headers != null) { + final Set headerKeys = headers.keySet(); + for (final String hKey: headerKeys) { + httpGet.addHeader(hKey, headers.getString(hKey)); + } + } HttpResponse response = httpClient.execute(httpGet); String data = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode == 200) { logger.fine("Returned data: " + data); try (JsonReader jsonReader = Json.createReader(new StringReader(data))) { - String dataObj =filterResponse(cvocEntry, jsonReader.readObject(), term).toString(); + String dataObj = filterResponse(cvocEntry, jsonReader.readObject(), term).toString(); evv.setValue(dataObj); evv.setLastUpdateDate(Timestamp.from(Instant.now())); logger.fine("JsonObject: " + dataObj); @@ -531,19 +588,42 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep } catch (IOException ioe) { logger.severe("IOException when retrieving url: " + retrievalUri + " : " + ioe.getMessage()); } - } } catch (URISyntaxException e) { logger.fine("Term is not a URI: " + term); } + } + + private String tryToReplaceRetrievalUriParam(String retrievalUri, String paramName, String value) throws InvalidParameterException { + + if(StringUtils.isBlank(paramName)) { + throw new InvalidParameterException("Empty or null paramName is not allowed while replacing retrieval uri parameter"); + } + + if(retrievalUri.contains(paramName)) { + logger.fine("Parameter {" + paramName + "} found in retrievalUri"); + + if(StringUtils.isBlank(value)) { + throw new InvalidParameterException("Empty or null value is not allowed while replacing retrieval uri parameter"); + } + + if(retrievalUri.contains("encodeUrl:" + paramName)) { + retrievalUri = retrievalUri.replace("{encodeUrl:"+paramName+"}", URLEncoder.encode(value, StandardCharsets.UTF_8)); + } else { + retrievalUri = retrievalUri.replace("{"+paramName+"}", value); + } + } else { + logger.fine("Parameter {" + paramName + "} not found in retrievalUri"); + } + return retrievalUri; } /** * Parse the raw value returned by an external service for a give term uri and * filter it according to the 'retrieval-filtering' configuration for this * DatasetFieldType, creating a Json value with the specified structure - * + * * @param cvocEntry - the config for this DatasetFieldType * @param readObject - the raw response from the service * @param termUri - the term uri @@ -602,6 +682,8 @@ private JsonObject filterResponse(JsonObject cvocEntry, JsonObject readObject, S if (pattern.equals("{0}")) { if (vals.get(0) instanceof JsonArray) { job.add(filterKey, (JsonArray) vals.get(0)); + } else if (vals.get(0) instanceof JsonObject) { + job.add(filterKey, (JsonObject) vals.get(0)); } else { job.add(filterKey, (String) vals.get(0)); } @@ -639,7 +721,7 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri String[] keyVal = pathParts[index].split("="); logger.fine("Looking for object where " + keyVal[0] + " is " + keyVal[1]); String expected = keyVal[1]; - + if (!expected.equals("*")) { if (expected.equals("@id")) { expected = termUri; @@ -668,7 +750,7 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri } return parts.build(); } - + } else { curPath = ((JsonObject) curPath).get(pathParts[index]); logger.fine("Found next Path object " + curPath.toString()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetKeyword.java b/src/main/java/edu/harvard/iq/dataverse/DatasetKeyword.java deleted file mode 100644 index 747e3c068f1..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetKeyword.java +++ /dev/null @@ -1,68 +0,0 @@ -package edu.harvard.iq.dataverse; - -/** - * - * @author skraffmiller - */ - -public class DatasetKeyword { - - private int displayOrder; - public int getDisplayOrder() { - return this.displayOrder; - } - public void setDisplayOrder(int displayOrder) { - this.displayOrder = displayOrder; - } - - private DatasetField value; - public DatasetField getValue() { - return this.value; - } - public void setValue(DatasetField value) { - this.value = value; - } - - private DatasetVersion datasetVersion; - public DatasetVersion getDatasetVersion() { - return datasetVersion; - } - public void setDatasetVersion(DatasetVersion metadata) { - this.datasetVersion = metadata; - } - /* - @Version - private Long version; - public Long getVersion() { - return this.version; - } - public void setVersion(Long version) { - this.version = version; - } */ - - private DatasetField vocab; - public DatasetField getVocab() { - return this.vocab; - } - public void setVocab(DatasetField vocab) { - this.vocab = vocab; - } - - private DatasetField vocabURI; - public DatasetField getVocabURI() { - return this.vocabURI; - } - public void setVocabURI(DatasetField vocabURI) { - this.vocabURI = vocabURI; - } - - - public boolean isEmpty() { - /*return ((value==null || value.getValue().trim().equals("")) - && (vocab==null || vocab.getValue().trim().equals("")) - && (vocabURI==null || vocabURI.getValue().trim().equals("")));*/ - return false; - } - - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 4c436715f0d..eae4a9f2977 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; +import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataaccess.StorageIO; @@ -77,6 +78,7 @@ import java.lang.reflect.Method; import java.sql.Timestamp; import java.text.SimpleDateFormat; +import java.time.LocalDate; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; @@ -137,6 +139,7 @@ import jakarta.faces.event.AjaxBehaviorEvent; import jakarta.servlet.ServletOutputStream; import jakarta.servlet.http.HttpServletResponse; +import jakarta.servlet.http.HttpServletRequest; import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.lang3.mutable.MutableBoolean; @@ -269,6 +272,8 @@ public enum DisplayMode { @Inject EmbargoServiceBean embargoService; @Inject + RetentionServiceBean retentionService; + @Inject LicenseServiceBean licenseServiceBean; @Inject DataFileCategoryServiceBean dataFileCategoryService; @@ -784,6 +789,25 @@ public boolean isIndexedVersion() { return isIndexedVersion = false; } + // plus we have mechanisms for disabling the facets selectively, just for + // the guests, or anonymous users: + if (session.getUser() instanceof GuestUser) { + if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacetsForGuestUsers, false)) { + return isIndexedVersion = false; + } + + // An even lower grade of user than Guest is a truly anonymous user - + // a guest user who came without the session cookie: + Map cookies = FacesContext.getCurrentInstance().getExternalContext().getRequestCookieMap(); + if (!(cookies != null && cookies.containsKey("JSESSIONID"))) { + if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacetsWithoutJsession, false)) { + return isIndexedVersion = false; + } + } + + } + + // The version is SUPPOSED to be indexed if it's the latest published version, or a // draft. So if none of the above is true, we can return false right away. if (!(workingVersion.isDraft() || isThisLatestReleasedVersion())) { @@ -1234,8 +1258,17 @@ public boolean canDownloadFiles() { canDownloadFiles = false; for (FileMetadata fmd : workingVersion.getFileMetadatas()) { if (fileDownloadHelper.canDownloadFile(fmd)) { - canDownloadFiles = true; - break; + if (isVersionHasGlobus()) { + String driverId = DataAccess + .getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier()); + if (StorageIO.isDataverseAccessible(driverId)) { + canDownloadFiles = true; + break; + } + } else { + canDownloadFiles = true; + break; + } } } } @@ -2203,6 +2236,11 @@ private String init(boolean initFull) { } } + LocalDate minRetentiondate = settingsWrapper.getMinRetentionDate(); + if (minRetentiondate != null){ + selectionRetention.setDateUnavailable(minRetentiondate.plusDays(1L)); + } + displayLockInfo(dataset); displayPublishMessage(); @@ -2279,13 +2317,11 @@ private void displayPublishMessage(){ public boolean isValid() { if (valid == null) { - DatasetVersion version = dataset.getLatestVersion(); - if (!version.isDraft()) { + if (workingVersion.isDraft() || (canUpdateDataset() && JvmSettings.UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED.lookupOptional(Boolean.class).orElse(true))) { + valid = workingVersion.isValid(); + } else { valid = true; } - DatasetVersion newVersion = version.cloneDatasetVersion(); - newVersion.setDatasetFields(newVersion.initDatasetFields()); - valid = newVersion.isValid(); } return valid; } @@ -3260,7 +3296,7 @@ public void startDownloadSelectedOriginal() { private void startDownload(boolean downloadOriginal){ boolean guestbookRequired = isDownloadPopupRequired(); - boolean validate = validateFilesForDownload(downloadOriginal); + boolean validate = validateFilesForDownload(downloadOriginal, false); if (validate) { updateGuestbookResponse(guestbookRequired, downloadOriginal, false); if(!guestbookRequired && !getValidateFilesOutcome().equals("Mixed")){ @@ -3283,7 +3319,7 @@ public void setValidateFilesOutcome(String validateFilesOutcome) { this.validateFilesOutcome = validateFilesOutcome; } - public boolean validateFilesForDownload(boolean downloadOriginal){ + public boolean validateFilesForDownload(boolean downloadOriginal, boolean isGlobusTransfer){ if (this.selectedFiles.isEmpty()) { PrimeFaces.current().executeScript("PF('selectFilesForDownload').show()"); return false; @@ -3300,33 +3336,39 @@ public boolean validateFilesForDownload(boolean downloadOriginal){ return false; } - for (FileMetadata fmd : getSelectedDownloadableFiles()) { - DataFile dataFile = fmd.getDataFile(); - if (downloadOriginal && dataFile.isTabularData()) { - bytes += dataFile.getOriginalFileSize() == null ? 0 : dataFile.getOriginalFileSize(); - } else { - bytes += dataFile.getFilesize(); + if (!isGlobusTransfer) { + for (FileMetadata fmd : getSelectedDownloadableFiles()) { + DataFile dataFile = fmd.getDataFile(); + if (downloadOriginal && dataFile.isTabularData()) { + bytes += dataFile.getOriginalFileSize() == null ? 0 : dataFile.getOriginalFileSize(); + } else { + bytes += dataFile.getFilesize(); + } } - } - //if there are two or more files, with a total size - //over the zip limit, post a "too large" popup - if (bytes > settingsWrapper.getZipDownloadLimit() && selectedDownloadableFiles.size() > 1) { - setValidateFilesOutcome("FailSize"); - return false; + // if there are two or more files, with a total size + // over the zip limit, post a "too large" popup + if (bytes > settingsWrapper.getZipDownloadLimit() && selectedDownloadableFiles.size() > 1) { + setValidateFilesOutcome("FailSize"); + return false; + } } - + // If some of the files were restricted and we had to drop them off the // list, and NONE of the files are left on the downloadable list - // - we show them a "you're out of luck" popup: - if (getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty()) { + // - we show them a "you're out of luck" popup + // Same for globus transfer + if ((!isGlobusTransfer + && (getSelectedDownloadableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty())) + || (isGlobusTransfer && (getSelectedGlobusTransferableFiles().isEmpty() + && !getSelectedNonGlobusTransferableFiles().isEmpty()))) { setValidateFilesOutcome("FailRestricted"); return false; } - //Some are selected and there are non-downloadable ones or there are both downloadable and globus transferable files - if ((!(getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty()) - && (!getSelectedNonDownloadableFiles().isEmpty()) || (!getSelectedDownloadableFiles().isEmpty() && !getSelectedGlobusTransferableFiles().isEmpty()))) { + //For download or transfer, there are some that can be downloaded/transferred and some that can't + if ((!isGlobusTransfer && (!getSelectedNonDownloadableFiles().isEmpty() && !getSelectedDownloadableFiles().isEmpty())) || + (isGlobusTransfer && (!getSelectedNonGlobusTransferableFiles().isEmpty() && !getSelectedGlobusTransferableFiles().isEmpty()))) { setValidateFilesOutcome("Mixed"); return true; } @@ -3684,6 +3726,25 @@ public String deleteFiles() throws CommandException{ } } + //Remove retentions that are no longer referenced + //Identify which ones are involved here + List orphanedRetentions = new ArrayList(); + if (selectedFiles != null && selectedFiles.size() > 0) { + for (FileMetadata fmd : workingVersion.getFileMetadatas()) { + for (FileMetadata fm : selectedFiles) { + if (fm.getDataFile().equals(fmd.getDataFile()) && !fmd.getDataFile().isReleased()) { + Retention ret = fmd.getDataFile().getRetention(); + if (ret != null) { + ret.getDataFiles().remove(fmd.getDataFile()); + if (ret.getDataFiles().isEmpty()) { + orphanedRetentions.add(ret); + } + } + } + } + } + } + deleteFiles(filesToDelete); String retVal; @@ -3693,12 +3754,14 @@ public String deleteFiles() throws CommandException{ } else { retVal = save(); } - - - //And delete them only after the dataset is updated + + // And delete them only after the dataset is updated for(Embargo emb: orphanedEmbargoes) { embargoService.deleteById(emb.getId(), ((AuthenticatedUser)session.getUser()).getUserIdentifier()); } + for(Retention ret: orphanedRetentions) { + retentionService.delete(ret, ((AuthenticatedUser)session.getUser()).getUserIdentifier()); + } return retVal; } @@ -3888,12 +3951,6 @@ public String save() { ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); } dataset = commandEngine.submit(cmd); - for (DatasetField df : dataset.getLatestVersion().getFlatDatasetFields()) { - logger.fine("Found id: " + df.getDatasetFieldType().getId()); - if (fieldService.getCVocConf(true).containsKey(df.getDatasetFieldType().getId())) { - fieldService.registerExternalVocabValues(df); - } - } if (editMode == EditMode.CREATE) { if (session.getUser() instanceof AuthenticatedUser) { userNotificationService.sendNotification((AuthenticatedUser) session.getUser(), dataset.getCreateDate(), UserNotification.Type.CREATEDS, dataset.getLatestVersion().getId()); @@ -5365,7 +5422,7 @@ public boolean isFileAccessRequestMultiSignUpButtonEnabled(){ return false; } for (FileMetadata fmd : this.selectedRestrictedFiles){ - if (!this.fileDownloadHelper.canDownloadFile(fmd)&& !FileUtil.isActivelyEmbargoed(fmd)){ + if (!this.fileDownloadHelper.canDownloadFile(fmd) && !FileUtil.isActivelyEmbargoed(fmd)){ return true; } } @@ -5726,7 +5783,10 @@ public boolean isShowPreviewButton(Long fileId) { public boolean isShowQueryButton(Long fileId) { DataFile dataFile = datafileService.find(fileId); - if(dataFile.isRestricted() || !dataFile.isReleased() || FileUtil.isActivelyEmbargoed(dataFile)){ + if(dataFile.isRestricted() + || !dataFile.isReleased() + || FileUtil.isActivelyEmbargoed(dataFile) + || FileUtil.isRetentionExpired(dataFile)){ return false; } @@ -6284,12 +6344,18 @@ public void clearSelectionEmbargo() { PrimeFaces.current().resetInputs("datasetForm:embargoInputs"); } - public boolean isCantDownloadDueToEmbargo() { + public boolean isCantDownloadDueToEmbargoOrDVAccess() { if (getSelectedNonDownloadableFiles() != null) { for (FileMetadata fmd : getSelectedNonDownloadableFiles()) { if (FileUtil.isActivelyEmbargoed(fmd)) { return true; } + if (isVersionHasGlobus()) { + if (StorageIO.isDataverseAccessible( + DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier()))) { + return true; + } + } } } return false; @@ -6315,6 +6381,195 @@ private boolean containsOnlyActivelyEmbargoedFiles(List selectedFi return true; } + public Retention getSelectionRetention() { + return selectionRetention; + } + + public void setSelectionRetention(Retention selectionRetention) { + this.selectionRetention = selectionRetention; + } + + + private Retention selectionRetention = new Retention(); + + public boolean isValidRetentionSelection() { + //If fileMetadataForAction is set, someone is using the kebab/single file menu + if (fileMetadataForAction != null) { + if (!fileMetadataForAction.getDataFile().isReleased()) { + return true; + } else { + return false; + } + } + //Otherwise we check the selected files + for (FileMetadata fmd : selectedFiles) { + if (!fmd.getDataFile().isReleased()) { + return true; + } + } + return false; + } + + /* + * This method checks to see if the selected file/files have a retention that could be removed. It doesn't return true of a released file has a retention. + */ + public boolean isExistingRetention() { + if (fileMetadataForAction != null) { + if (!fileMetadataForAction.getDataFile().isReleased() + && (fileMetadataForAction.getDataFile().getRetention() != null)) { + return true; + } else { + return false; + } + } + for (FileMetadata fmd : selectedFiles) { + if (!fmd.getDataFile().isReleased() && (fmd.getDataFile().getRetention() != null)) { + return true; + } + } + + return false; + } + + public boolean isRetentionExpired(List fmdList) { + return FileUtil.isRetentionExpired(fmdList); + } + + public boolean isRetentionForWholeSelection() { + for (FileMetadata fmd : selectedFiles) { + if (fmd.getDataFile().isReleased()) { + return false; + } + } + return true; + } + + private boolean removeRetention=false; + + public boolean isRemoveRetention() { + return removeRetention; + } + + public void setRemoveRetention(boolean removeRetention) { + boolean existing = this.removeRetention; + this.removeRetention = removeRetention; + //If we flipped the state, update the selectedRetention. Otherwise (e.g. when save is hit) don't make changes + if(existing != this.removeRetention) { + logger.fine("State flip"); + selectionRetention= new Retention(); + if(removeRetention) { + logger.fine("Setting empty retention"); + selectionRetention= new Retention(null, null); + } + PrimeFaces.current().resetInputs("datasetForm:retentionInputs"); + } + } + + public String saveRetention() { + if (workingVersion.isReleased()) { + refreshSelectedFiles(selectedFiles); + } + + if(isRemoveRetention() || (selectionRetention.getDateUnavailable()==null && selectionRetention.getReason()==null)) { + selectionRetention=null; + } + + if(!(selectionRetention==null || (selectionRetention!=null && settingsWrapper.isValidRetentionDate(selectionRetention)))) { + logger.fine("Validation error: " + selectionRetention.getFormattedDateUnavailable()); + FacesContext.getCurrentInstance().validationFailed(); + return ""; + } + List orphanedRetentions = new ArrayList(); + List retentionFMs = null; + if (fileMetadataForAction != null) { + retentionFMs = new ArrayList(); + retentionFMs.add(fileMetadataForAction); + } else if (selectedFiles != null && selectedFiles.size() > 0) { + retentionFMs = selectedFiles; + } + + if(retentionFMs!=null && !retentionFMs.isEmpty()) { + if(selectionRetention!=null) { + selectionRetention = retentionService.merge(selectionRetention); + } + for (FileMetadata fmd : workingVersion.getFileMetadatas()) { + for (FileMetadata fm : retentionFMs) { + if (fm.getDataFile().equals(fmd.getDataFile()) && (isSuperUser()||!fmd.getDataFile().isReleased())) { + Retention ret = fmd.getDataFile().getRetention(); + if (ret != null) { + logger.fine("Before: " + ret.getDataFiles().size()); + ret.getDataFiles().remove(fmd.getDataFile()); + if (ret.getDataFiles().isEmpty()) { + orphanedRetentions.add(ret); + } + logger.fine("After: " + ret.getDataFiles().size()); + } + fmd.getDataFile().setRetention(selectionRetention); + } + } + } + } + if (selectionRetention != null) { + retentionService.save(selectionRetention, ((AuthenticatedUser) session.getUser()).getIdentifier()); + } + // success message: + String successMessage = BundleUtil.getStringFromBundle("file.assignedRetention.success"); + logger.fine(successMessage); + successMessage = successMessage.replace("{0}", "Selected Files"); + JsfHelper.addFlashMessage(successMessage); + selectionRetention = new Retention(); + + save(); + for(Retention ret: orphanedRetentions) { + retentionService.delete(ret, ((AuthenticatedUser)session.getUser()).getUserIdentifier()); + } + return returnToDraftVersion(); + } + + public void clearRetentionPopup() { + logger.fine("clearRetentionPopup called"); + selectionRetention= new Retention(); + setRemoveRetention(false); + PrimeFaces.current().resetInputs("datasetForm:retentionInputs"); + } + + public void clearSelectionRetention() { + logger.fine("clearSelectionRetention called"); + selectionRetention= new Retention(); + PrimeFaces.current().resetInputs("datasetForm:retentionInputs"); + } + + public boolean isCantDownloadDueToRetention() { + if (getSelectedNonDownloadableFiles() != null) { + for (FileMetadata fmd : getSelectedNonDownloadableFiles()) { + if (FileUtil.isRetentionExpired(fmd)) { + return true; + } + } + } + return false; + } + + public boolean isCantRequestDueToRetention() { + if (fileDownloadHelper.getFilesForRequestAccess() != null) { + for (DataFile df : fileDownloadHelper.getFilesForRequestAccess()) { + if (FileUtil.isRetentionExpired(df)) { + return true; + } + } + } + return false; + } + + private boolean containsOnlyRetentionExpiredFiles(List selectedFiles) { + for (FileMetadata fmd : selectedFiles) { + if (!FileUtil.isRetentionExpired(fmd)) { + return false; + } + } + return true; + } + public String getIngestMessage() { return BundleUtil.getStringFromBundle("file.ingestFailed.message", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion())); } @@ -6381,7 +6636,8 @@ public void startGlobusTransfer(boolean transferAll, boolean popupShown) { } boolean guestbookRequired = isDownloadPopupRequired(); - boolean validated = validateFilesForDownload(true); + boolean validated = validateFilesForDownload(true, true); + if (validated) { globusTransferRequested = true; boolean mixed = "Mixed".equals(getValidateFilesOutcome()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 9c182164d37..dab0ff43fcf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -19,8 +19,6 @@ import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean; -import edu.harvard.iq.dataverse.pidproviders.PidProvider; -import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -41,11 +39,10 @@ import jakarta.ejb.TransactionAttributeType; import jakarta.inject.Named; import jakarta.persistence.EntityManager; -import jakarta.persistence.LockModeType; import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; import jakarta.persistence.PersistenceContext; import jakarta.persistence.Query; -import jakarta.persistence.StoredProcedureQuery; import jakarta.persistence.TypedQuery; import org.apache.commons.lang3.StringUtils; @@ -115,26 +112,32 @@ public Dataset find(Object pk) { * @return a dataset with pre-fetched file objects */ public Dataset findDeep(Object pk) { - return (Dataset) em.createNamedQuery("Dataset.findById") - .setParameter("id", pk) - // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files - .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest") - .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset") - .setHint("eclipselink.left-join-fetch", "o.files.dataTables") - .setHint("eclipselink.left-join-fetch", "o.files.auxiliaryFiles") - .setHint("eclipselink.left-join-fetch", "o.files.ingestReports") - .setHint("eclipselink.left-join-fetch", "o.files.dataFileTags") - .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas") - .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.fileCategories") - //.setHint("eclipselink.left-join-fetch", "o.files.guestbookResponses") - .setHint("eclipselink.left-join-fetch", "o.files.embargo") - .setHint("eclipselink.left-join-fetch", "o.files.fileAccessRequests") - .setHint("eclipselink.left-join-fetch", "o.files.owner") - .setHint("eclipselink.left-join-fetch", "o.files.releaseUser") - .setHint("eclipselink.left-join-fetch", "o.files.creator") - .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers") - .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") - .getSingleResult(); + try { + return (Dataset) em.createNamedQuery("Dataset.findById") + .setParameter("id", pk) + // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files + .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest") + .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset") + .setHint("eclipselink.left-join-fetch", "o.files.dataTables") + .setHint("eclipselink.left-join-fetch", "o.files.auxiliaryFiles") + .setHint("eclipselink.left-join-fetch", "o.files.ingestReports") + .setHint("eclipselink.left-join-fetch", "o.files.dataFileTags") + .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas") + .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.fileCategories") + .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.varGroups") + //.setHint("eclipselink.left-join-fetch", "o.files.guestbookResponses + .setHint("eclipselink.left-join-fetch", "o.files.embargo") + .setHint("eclipselink.left-join-fetch", "o.files.retention") + .setHint("eclipselink.left-join-fetch", "o.files.fileAccessRequests") + .setHint("eclipselink.left-join-fetch", "o.files.owner") + .setHint("eclipselink.left-join-fetch", "o.files.releaseUser") + .setHint("eclipselink.left-join-fetch", "o.files.creator") + .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers") + .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") + .getSingleResult(); + } catch (NoResultException | NonUniqueResultException ex) { + return null; + } } public List findByOwnerId(Long ownerId) { @@ -860,18 +863,33 @@ public Dataset setDatasetFileAsThumbnail(Dataset dataset, DataFile datasetFileTh logger.fine("In setDatasetFileAsThumbnail but dataset is null! Returning null."); return null; } + // Just in case the previously designated thumbnail for the dataset was + // a "custom" kind, i.e. an uploaded "dataset_logo" file, the following method + // will try to delete it, and all the associated caches here (because there + // are no other uses for the file). This method is apparently called in all + // cases, without trying to check if the dataset was in fact using a custom + // logo; probably under the assumption that it can't hurt. DatasetUtil.deleteDatasetLogo(dataset); dataset.setThumbnailFile(datasetFileThumbnailToSwitchTo); dataset.setUseGenericThumbnail(false); return merge(dataset); } - public Dataset removeDatasetThumbnail(Dataset dataset) { + public Dataset clearDatasetLevelThumbnail(Dataset dataset) { if (dataset == null) { - logger.fine("In removeDatasetThumbnail but dataset is null! Returning null."); + logger.fine("In clearDatasetLevelThumbnail but dataset is null! Returning null."); return null; } + + // Just in case the thumbnail that was designated for the dataset was + // a "custom logo" kind, i.e. an uploaded "dataset_logo" file, the following method + // will try to delete it, and all the associated caches here (because there + // are no other uses for the file). This method is apparently called in all + // cases, without trying to check if the dataset was in fact using a custom + // logo; probably under the assumption that it can't hurt. DatasetUtil.deleteDatasetLogo(dataset); + + // Clear any designated thumbnails for the dataset: dataset.setThumbnailFile(null); dataset.setUseGenericThumbnail(true); return merge(dataset); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 5fd963f3931..943693355a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -1728,7 +1728,36 @@ public List> validateRequired() { } public boolean isValid() { - return validate().isEmpty(); + // first clone to leave the original untouched + final DatasetVersion newVersion = this.cloneDatasetVersion(); + // initDatasetFields + newVersion.setDatasetFields(newVersion.initDatasetFields()); + // remove special "N/A" values and empty values + newVersion.removeEmptyValues(); + // check validity of present fields and detect missing mandatory fields + return newVersion.validate().isEmpty(); + } + + private void removeEmptyValues() { + if (this.getDatasetFields() != null) { + for (DatasetField dsf : this.getDatasetFields()) { + removeEmptyValues(dsf); + } + } + } + + private void removeEmptyValues(DatasetField dsf) { + if (dsf.getDatasetFieldType().isPrimitive()) { // primitive + final Iterator i = dsf.getDatasetFieldValues().iterator(); + while (i.hasNext()) { + final String v = i.next().getValue(); + if (StringUtils.isBlank(v) || DatasetField.NA_VALUE.equals(v)) { + i.remove(); + } + } + } else { + dsf.getDatasetFieldCompoundValues().forEach(cv -> cv.getChildDatasetFields().forEach(v -> removeEmptyValues(v))); + } } public Set validate() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 99c3c65e3b8..afcfafe976c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -42,6 +42,16 @@ public enum FileDownloadSizeMode { All, Original, Archival } + /** + * Given a DatasetVersion, returns its total file metadata count + * + * @param datasetVersion the DatasetVersion to access + * @return long value of total file metadata count + */ + public long getFileMetadataCount(DatasetVersion datasetVersion) { + return getFileMetadataCount(datasetVersion, new FileSearchCriteria(null, null, null, null, null)); + } + /** * Given a DatasetVersion, returns its total file metadata count * @@ -189,6 +199,32 @@ public long getFilesDownloadSize(DatasetVersion datasetVersion, FileSearchCriter }; } + /** + * Determines whether or not a DataFile is present in a DatasetVersion + * + * @param datasetVersion the DatasetVersion to check + * @param dataFile the DataFile to check + * @return boolean value + */ + public boolean isDataFilePresentInDatasetVersion(DatasetVersion datasetVersion, DataFile dataFile) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root dataFileRoot = criteriaQuery.from(DataFile.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Root datasetVersionRoot = criteriaQuery.from(DatasetVersion.class); + criteriaQuery + .select(criteriaBuilder.count(dataFileRoot)) + .where(criteriaBuilder.and( + criteriaBuilder.equal(dataFileRoot.get("id"), dataFile.getId()), + criteriaBuilder.equal(datasetVersionRoot.get("id"), datasetVersion.getId()), + fileMetadataRoot.in(dataFileRoot.get("fileMetadatas")), + fileMetadataRoot.in(datasetVersionRoot.get("fileMetadatas")) + ) + ); + Long count = em.createQuery(criteriaQuery).getSingleResult(); + return count != null && count > 0; + } + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus, FileSearchCriteria searchCriteria) { long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus, searchCriteria); if (fileMetadataCount > 0) { @@ -210,6 +246,8 @@ private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, F private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus accessStatus, CriteriaBuilder criteriaBuilder, Root fileMetadataRoot) { Path dataFile = fileMetadataRoot.get("dataFile"); + Path retention = dataFile.get("retention"); + Predicate retentionExpiredPredicate = criteriaBuilder.lessThan(retention.get("dateUnavailable"), criteriaBuilder.currentDate()); Path embargo = dataFile.get("embargo"); Predicate activelyEmbargoedPredicate = criteriaBuilder.greaterThanOrEqualTo(embargo.get("dateAvailable"), criteriaBuilder.currentDate()); Predicate inactivelyEmbargoedPredicate = criteriaBuilder.isNull(embargo); @@ -217,6 +255,7 @@ private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus acc Predicate isRestrictedPredicate = criteriaBuilder.isTrue(isRestricted); Predicate isUnrestrictedPredicate = criteriaBuilder.isFalse(isRestricted); return switch (accessStatus) { + case RetentionPeriodExpired -> criteriaBuilder.and(retentionExpiredPredicate); case EmbargoedThenRestricted -> criteriaBuilder.and(activelyEmbargoedPredicate, isRestrictedPredicate); case EmbargoedThenPublic -> criteriaBuilder.and(activelyEmbargoedPredicate, isUnrestrictedPredicate); case Restricted -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isRestrictedPredicate); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 1ee517c9831..ab23fa779d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -163,6 +163,7 @@ public DatasetVersion findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.dataTables") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.fileCategories") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.embargo") + .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.retention") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.datasetVersion") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.releaseUser") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.creator") @@ -802,6 +803,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND fm.datafile_id = df.id " + "AND df.restricted = false " + "AND df.embargo_id is null " + + "AND df.retention_id is null " + "AND o.previewImageAvailable = true " + "ORDER BY df.id LIMIT 1;").getSingleResult(); } catch (Exception ex) { @@ -828,6 +830,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND o.previewimagefail = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + + "AND df.retention_id is null " + "AND df.contenttype LIKE 'image/%' " + "AND NOT df.contenttype = 'image/fits' " + "AND df.filesize < " + imageThumbnailSizeLimit + " " @@ -862,6 +865,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND o.previewimagefail = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + + "AND df.retention_id is null " + "AND df.contenttype = 'application/pdf' " + "AND df.filesize < " + imageThumbnailSizeLimit + " " + "ORDER BY df.filesize ASC LIMIT 1;").getSingleResult(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index c1de9d63410..978c716e058 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -411,6 +411,20 @@ public List getDataverseFieldTypeInputLevels() { return dataverseFieldTypeInputLevels; } + public boolean isDatasetFieldTypeRequiredAsInputLevel(Long datasetFieldTypeId) { + return dataverseFieldTypeInputLevels.stream() + .anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId) && inputLevel.isRequired()); + } + + public boolean isDatasetFieldTypeIncludedAsInputLevel(Long datasetFieldTypeId) { + return dataverseFieldTypeInputLevels.stream() + .anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId) && inputLevel.isInclude()); + } + + public boolean isDatasetFieldTypeInInputLevels(Long datasetFieldTypeId) { + return dataverseFieldTypeInputLevels.stream() + .anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId)); + } public Template getDefaultTemplate() { return defaultTemplate; @@ -466,9 +480,6 @@ public void setTemplateRoot(boolean templateRoot) { this.templateRoot = templateRoot; } - - - public List getMetadataBlocks() { return getMetadataBlocks(false); } diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index bb3fa475847..c8537f2a424 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -124,6 +124,9 @@ public class EjbDataverseEngine { @EJB GuestbookResponseServiceBean responses; + + @EJB + MetadataBlockServiceBean metadataBlockService; @EJB DataverseLinkingServiceBean dvLinking; @@ -131,6 +134,9 @@ public class EjbDataverseEngine { @EJB DatasetLinkingServiceBean dsLinking; + @EJB + DatasetFieldServiceBean dsField; + @EJB ExplicitGroupServiceBean explicitGroups; @@ -506,7 +512,12 @@ public DataverseLinkingServiceBean dvLinking() { public DatasetLinkingServiceBean dsLinking() { return dsLinking; } - + + @Override + public DatasetFieldServiceBean dsField() { + return dsField; + } + @Override public StorageUseServiceBean storageUse() { return storageUseService; @@ -587,6 +598,11 @@ public ActionLogServiceBean actionLog() { return logSvc; } + @Override + public MetadataBlockServiceBean metadataBlocks() { + return metadataBlockService; + } + @Override public void beginCommandSequence() { this.commandsCalled = new Stack(); diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index 33e708e7467..80cf3db8d53 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -223,7 +223,10 @@ public boolean canDownloadFile(FileMetadata fileMetadata){ // Always allow download for PrivateUrlUser return true; } - + + // Retention expired files are always made unavailable, because they might be destroyed + if (FileUtil.isRetentionExpired(fileMetadata)) return false; + Long fid = fileMetadata.getId(); //logger.info("calling candownloadfile on filemetadata "+fid); // Note that `isRestricted` at the FileMetadata level is for expressing intent by version. Enforcement is done with `isRestricted` at the DataFile level. @@ -246,7 +249,9 @@ public boolean canDownloadFile(FileMetadata fileMetadata){ } } - if (!isRestrictedFile && !FileUtil.isActivelyEmbargoed(fileMetadata)){ + if (!isRestrictedFile + && !FileUtil.isActivelyEmbargoed(fileMetadata) + && !FileUtil.isRetentionExpired(fileMetadata)) { // Yes, save answer and return true this.fileDownloadPermissionMap.put(fid, true); return true; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index de3f4d2ab56..5370e9ac564 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -354,7 +354,8 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter ApiToken apiToken = null; User user = session.getUser(); DatasetVersion version = fmd.getDatasetVersion(); - if (version.isDraft() || fmd.getDatasetVersion().isDeaccessioned() || (fmd.getDataFile().isRestricted()) || (FileUtil.isActivelyEmbargoed(fmd))) { + if (version.isDraft() || fmd.getDatasetVersion().isDeaccessioned() || (fmd.getDataFile().isRestricted()) + || (FileUtil.isActivelyEmbargoed(fmd)) || (FileUtil.isRetentionExpired(fmd))) { apiToken = authService.getValidApiTokenForUser(user); } DataFile dataFile = null; @@ -382,28 +383,26 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter } } - public void downloadDatasetCitationXML(Dataset dataset) { - downloadCitationXML(null, dataset, false); + public void downloadDatasetCitationXML(DatasetVersion version) { + // DatasetVersion-level citation: + DataCitation citation=null; + citation = new DataCitation(version); + String fileNameString; + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".xml"; + downloadXML(citation, fileNameString); } public void downloadDatafileCitationXML(FileMetadata fileMetadata) { - downloadCitationXML(fileMetadata, null, false); + downloadCitationXML(fileMetadata, false); } public void downloadDirectDatafileCitationXML(FileMetadata fileMetadata) { - downloadCitationXML(fileMetadata, null, true); + downloadCitationXML(fileMetadata, true); } - public void downloadCitationXML(FileMetadata fileMetadata, Dataset dataset, boolean direct) { - DataCitation citation=null; - if (dataset != null){ - citation = new DataCitation(dataset.getLatestVersion()); - } else { - citation= new DataCitation(fileMetadata, direct); - } - FacesContext ctx = FacesContext.getCurrentInstance(); - HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); - response.setContentType("text/xml"); + public void downloadCitationXML(FileMetadata fileMetadata, boolean direct) { + DataCitation citation=null; + citation= new DataCitation(fileMetadata, direct); String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: @@ -412,43 +411,46 @@ public void downloadCitationXML(FileMetadata fileMetadata, Dataset dataset, bool // Datafile-level citation: fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.ENDNOTE); } + downloadXML(citation, fileNameString); + } + + public void downloadXML(DataCitation citation, String fileNameString) { + FacesContext ctx = FacesContext.getCurrentInstance(); + HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); + response.setContentType("text/xml"); response.setHeader("Content-Disposition", fileNameString); + try { ServletOutputStream out = response.getOutputStream(); citation.writeAsEndNoteCitation(out); out.flush(); ctx.responseComplete(); } catch (IOException e) { - } } - - public void downloadDatasetCitationRIS(Dataset dataset) { - downloadCitationRIS(null, dataset, false); + public void downloadDatasetCitationRIS(DatasetVersion version) { + // DatasetVersion-level citation: + DataCitation citation=null; + citation = new DataCitation(version); + String fileNameString; + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".ris"; + downloadRIS(citation, fileNameString); } public void downloadDatafileCitationRIS(FileMetadata fileMetadata) { - downloadCitationRIS(fileMetadata, null, false); + downloadCitationRIS(fileMetadata, false); } public void downloadDirectDatafileCitationRIS(FileMetadata fileMetadata) { - downloadCitationRIS(fileMetadata, null, true); + downloadCitationRIS(fileMetadata, true); } - public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, boolean direct) { - DataCitation citation=null; - if (dataset != null){ - citation = new DataCitation(dataset.getLatestVersion()); - } else { - citation= new DataCitation(fileMetadata, direct); - } - - FacesContext ctx = FacesContext.getCurrentInstance(); - HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); - response.setContentType("application/download"); - + public void downloadCitationRIS(FileMetadata fileMetadata, boolean direct) { + DataCitation citation=null; + citation= new DataCitation(fileMetadata, direct); + String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: @@ -457,6 +459,14 @@ public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, bool // Datafile-level citation: fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.RIS); } + downloadRIS(citation, fileNameString); + } + + public void downloadRIS(DataCitation citation, String fileNameString) { + //SEK 12/3/2018 changing this to open the json in a new tab. + FacesContext ctx = FacesContext.getCurrentInstance(); + HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); + response.setContentType("application/download"); response.setHeader("Content-Disposition", fileNameString); try { @@ -468,38 +478,33 @@ public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, bool } } - + private String getFileNameFromPid(GlobalId id) { return id.asString(); } - public void downloadDatasetCitationBibtex(Dataset dataset) { - downloadCitationBibtex(null, dataset, false); + public void downloadDatasetCitationBibtex(DatasetVersion version) { + // DatasetVersion-level citation: + DataCitation citation=null; + citation = new DataCitation(version); + String fileNameString; + fileNameString = "inline;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".bib"; + downloadBibtex(citation, fileNameString); } public void downloadDatafileCitationBibtex(FileMetadata fileMetadata) { - downloadCitationBibtex(fileMetadata, null, false); + downloadCitationBibtex(fileMetadata, false); } public void downloadDirectDatafileCitationBibtex(FileMetadata fileMetadata) { - downloadCitationBibtex(fileMetadata, null, true); + downloadCitationBibtex(fileMetadata, true); } - public void downloadCitationBibtex(FileMetadata fileMetadata, Dataset dataset, boolean direct) { - DataCitation citation=null; - if (dataset != null){ - citation = new DataCitation(dataset.getLatestVersion()); - } else { - citation= new DataCitation(fileMetadata, direct); - } - //SEK 12/3/2018 changing this to open the json in a new tab. - FacesContext ctx = FacesContext.getCurrentInstance(); - HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); - - //Fix for 6029 FireFox was failing to parse it when content type was set to json - response.setContentType("text/plain"); + public void downloadCitationBibtex(FileMetadata fileMetadata, boolean direct) { + DataCitation citation=null; + citation= new DataCitation(fileMetadata, direct); String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { @@ -509,6 +514,16 @@ public void downloadCitationBibtex(FileMetadata fileMetadata, Dataset dataset, b // Datafile-level citation: fileNameString = "inline;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.BIBTEX); } + downloadBibtex(citation, fileNameString); + } + + public void downloadBibtex(DataCitation citation, String fileNameString) { + //SEK 12/3/2018 changing this to open the json in a new tab. + FacesContext ctx = FacesContext.getCurrentInstance(); + HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); + + //Fix for 6029 FireFox was failing to parse it when content type was set to json + response.setContentType("text/plain"); response.setHeader("Content-Disposition", fileNameString); try { @@ -558,7 +573,7 @@ public boolean requestAccess(DataFile dataFile, GuestbookResponse gbr){ public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, AuthenticatedUser requestor) { Timestamp ts = new Timestamp(new Date().getTime()); - permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset).stream().forEach((au) -> { + permissionService.getUsersWithPermissionOn(Permission.ManageFilePermissions, dataset).stream().forEach((au) -> { userNotificationService.sendNotification(au, ts, UserNotification.Type.REQUESTFILEACCESS, fileId, null, requestor, true); }); //send the user that requested access a notification that they requested the access diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index ca225dccb1c..afede00f3eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -34,6 +34,7 @@ import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -44,6 +45,7 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; +import java.time.LocalDate; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -152,6 +154,9 @@ public class FilePage implements java.io.Serializable { @Inject EmbargoServiceBean embargoService; + @Inject + RetentionServiceBean retentionService; + private static final Logger logger = Logger.getLogger(FilePage.class.getCanonicalName()); private boolean fileDeleteInProgress = false; @@ -277,7 +282,12 @@ public String init() { if(!hasValidTermsOfAccess && canUpdateDataset() ){ JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("dataset.message.editMetadata.invalid.TOUA.message")); } - + + LocalDate minRetentiondate = settingsWrapper.getMinRetentionDate(); + if (minRetentiondate != null){ + selectionRetention.setDateUnavailable(minRetentiondate.plusDays(1L)); + } + displayPublishMessage(); return null; } @@ -305,13 +315,18 @@ private void displayPublishMessage(){ } } + Boolean valid = null; + public boolean isValid() { - if (!fileMetadata.getDatasetVersion().isDraft()) { - return true; + if (valid == null) { + final DatasetVersion workingVersion = fileMetadata.getDatasetVersion(); + if (workingVersion.isDraft() || (canUpdateDataset() && JvmSettings.UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED.lookupOptional(Boolean.class).orElse(true))) { + valid = workingVersion.isValid(); + } else { + valid = true; + } } - DatasetVersion newVersion = fileMetadata.getDatasetVersion().cloneDatasetVersion(); - newVersion.setDatasetFields(newVersion.initDatasetFields()); - return newVersion.isValid(); + return valid; } private boolean canViewUnpublishedDataset() { @@ -507,10 +522,9 @@ public String ingestFile() throws CommandException{ return null; } - DataFile dataFile = fileMetadata.getDataFile(); - editDataset = dataFile.getOwner(); + editDataset = file.getOwner(); - if (dataFile.isTabularData()) { + if (file.isTabularData()) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.alreadyIngestedWarning")); return null; } @@ -522,25 +536,25 @@ public String ingestFile() throws CommandException{ return null; } - if (!FileUtil.canIngestAsTabular(dataFile)) { + if (!FileUtil.canIngestAsTabular(file)) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantIngestFileWarning")); return null; } - dataFile.SetIngestScheduled(); + file.SetIngestScheduled(); - if (dataFile.getIngestRequest() == null) { - dataFile.setIngestRequest(new IngestRequest(dataFile)); + if (file.getIngestRequest() == null) { + file.setIngestRequest(new IngestRequest(file)); } - dataFile.getIngestRequest().setForceTypeCheck(true); + file.getIngestRequest().setForceTypeCheck(true); // update the datafile, to save the newIngest request in the database: datafileService.save(file); // queue the data ingest job for asynchronous execution: - String status = ingestService.startIngestJobs(editDataset.getId(), new ArrayList<>(Arrays.asList(dataFile)), (AuthenticatedUser) session.getUser()); + String status = ingestService.startIngestJobs(editDataset.getId(), new ArrayList<>(Arrays.asList(file)), (AuthenticatedUser) session.getUser()); if (!StringUtil.isEmpty(status)) { // This most likely indicates some sort of a problem (for example, @@ -550,9 +564,9 @@ public String ingestFile() throws CommandException{ // successfully gone through the process of trying to schedule the // ingest job... - logger.warning("Ingest Status for file: " + dataFile.getId() + " : " + status); + logger.warning("Ingest Status for file: " + file.getId() + " : " + status); } - logger.fine("File: " + dataFile.getId() + " ingest queued"); + logger.fine("File: " + file.getId() + " ingest queued"); init(); JsfHelper.addInfoMessage(BundleUtil.getStringFromBundle("file.ingest.ingestQueued")); @@ -1389,7 +1403,129 @@ public String getEmbargoPhrase() { return BundleUtil.getStringFromBundle("embargoed.willbeuntil"); } } - + + public boolean isValidRetentionSelection() { + if (!fileMetadata.getDataFile().isReleased()) { + return true; + } + return false; + } + + public boolean isExistingRetention() { + if (!fileMetadata.getDataFile().isReleased() && (fileMetadata.getDataFile().getRetention() != null)) { + return true; + } + return false; + } + + public boolean isRetentionForWholeSelection() { + return isValidRetentionSelection(); + } + + public Retention getSelectionRetention() { + return selectionRetention; + } + + public void setSelectionRetention(Retention selectionRetention) { + this.selectionRetention = selectionRetention; + } + + private Retention selectionRetention = new Retention(); + + private boolean removeRetention=false; + + public boolean isRemoveRetention() { + return removeRetention; + } + + public void setRemoveRetention(boolean removeRetention) { + boolean existing = this.removeRetention; + this.removeRetention = removeRetention; + if (existing != this.removeRetention) { + logger.info("State flip"); + selectionRetention = new Retention(); + if (removeRetention) { + selectionRetention = new Retention(null, null); + } + } + PrimeFaces.current().resetInputs("fileForm:retentionInputs"); + } + + public String saveRetention() { + + if(isRemoveRetention() || (selectionRetention.getDateUnavailable()==null && selectionRetention.getReason()==null)) { + selectionRetention=null; + } + + Retention ret = null; + // Note: this.fileMetadata.getDataFile() is not the same object as this.file. + // (Not sure there's a good reason for this other than that's the way it is.) + // So changes to this.fileMetadata.getDataFile() will not be saved with + // editDataset = this.file.getOwner() set as it is below. + if (!file.isReleased()) { + ret = file.getRetention(); + if (ret != null) { + logger.fine("Before: " + ret.getDataFiles().size()); + ret.getDataFiles().remove(fileMetadata.getDataFile()); + logger.fine("After: " + ret.getDataFiles().size()); + } + if (selectionRetention != null) { + retentionService.merge(selectionRetention); + } + file.setRetention(selectionRetention); + if (ret != null && !ret.getDataFiles().isEmpty()) { + ret = null; + } + } + if(selectionRetention!=null) { + retentionService.save(selectionRetention, ((AuthenticatedUser)session.getUser()).getIdentifier()); + } + // success message: + String successMessage = BundleUtil.getStringFromBundle("file.assignedRetention.success"); + logger.fine(successMessage); + successMessage = successMessage.replace("{0}", "Selected Files"); + JsfHelper.addFlashMessage(successMessage); + selectionRetention = new Retention(); + + //Caller has to set editDataset before calling save() + editDataset = this.file.getOwner(); + + save(); + init(); + if(ret!=null) { + retentionService.delete(ret,((AuthenticatedUser)session.getUser()).getIdentifier()); + } + return returnToDraftVersion(); + } + + public void clearRetentionPopup() { + setRemoveRetention(false); + selectionRetention = new Retention(); + PrimeFaces.current().resetInputs("fileForm:retentionInputs"); + } + + public void clearSelectionRetention() { + selectionRetention = new Retention(); + PrimeFaces.current().resetInputs("fileForm:retentionInputs"); + } + + public boolean isCantRequestDueToRetention() { + return FileUtil.isRetentionExpired(fileMetadata); + } + + public String getRetentionPhrase() { + //Should only be getting called when there is a retention + if(file.isReleased()) { + if(FileUtil.isRetentionExpired(file)) { + return BundleUtil.getStringFromBundle("retention.after"); + } else { + return BundleUtil.getStringFromBundle("retention.isfrom"); + } + } else { + return BundleUtil.getStringFromBundle("retention.willbeafter"); + } + } + public String getToolTabTitle(){ if (getAllAvailableTools().size() > 1) { return BundleUtil.getStringFromBundle("file.toolTab.header"); diff --git a/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java index 62f10c18bdf..e3ed507a9c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java @@ -12,7 +12,7 @@ public class FileSearchCriteria { * Status of the particular DataFile based on active embargoes and restriction state */ public enum FileAccessStatus { - Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic + Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic, RetentionPeriodExpired } public FileSearchCriteria(String contentType, FileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText) { diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 9041ccf887c..1ea7d02791d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -17,6 +17,8 @@ import java.util.List; import jakarta.persistence.*; import jakarta.validation.constraints.Size; +import java.util.Collections; +import java.util.Comparator; /** * @@ -178,7 +180,7 @@ public GuestbookResponse(GuestbookResponse source){ this.setSessionId(source.getSessionId()); List customQuestionResponses = new ArrayList<>(); if (!source.getCustomQuestionResponses().isEmpty()){ - for (CustomQuestionResponse customQuestionResponse : source.getCustomQuestionResponses() ){ + for (CustomQuestionResponse customQuestionResponse : source.getCustomQuestionResponsesSorted() ){ CustomQuestionResponse customQuestionResponseAdd = new CustomQuestionResponse(); customQuestionResponseAdd.setResponse(customQuestionResponse.getResponse()); customQuestionResponseAdd.setCustomQuestion(customQuestionResponse.getCustomQuestion()); @@ -254,6 +256,18 @@ public String getResponseDate() { public List getCustomQuestionResponses() { return customQuestionResponses; } + + public List getCustomQuestionResponsesSorted(){ + + Collections.sort(customQuestionResponses, (CustomQuestionResponse cqr1, CustomQuestionResponse cqr2) -> { + int a = cqr1.getCustomQuestion().getDisplayOrder(); + int b = cqr2.getCustomQuestion().getDisplayOrder(); + return Integer.valueOf(a).compareTo(b); + }); + + + return customQuestionResponses; + } public void setCustomQuestionResponses(List customQuestionResponses) { this.customQuestionResponses = customQuestionResponses; @@ -317,7 +331,11 @@ public void setSessionId(String sessionId) { this.sessionId= sessionId; } - public String toHtmlFormattedResponse() { + public String toHtmlFormattedResponse(){ + return toHtmlFormattedResponse(null); + } + + public String toHtmlFormattedResponse(AuthenticatedUser requestor) { StringBuilder sb = new StringBuilder(); @@ -326,17 +344,25 @@ public String toHtmlFormattedResponse() { sb.append(BundleUtil.getStringFromBundle("dataset.guestbookResponse.respondent") + "
    \n
  • " + BundleUtil.getStringFromBundle("name") + ": " + getName() + "
  • \n
  • "); sb.append(" " + BundleUtil.getStringFromBundle("email") + ": " + getEmail() + "
  • \n
  • "); - sb.append( - " " + BundleUtil.getStringFromBundle("institution") + ": " + wrapNullAnswer(getInstitution()) + "
  • \n
  • "); - sb.append(" " + BundleUtil.getStringFromBundle("position") + ": " + wrapNullAnswer(getPosition()) + "
\n"); + sb.append(" " + BundleUtil.getStringFromBundle("institution") + ": " + wrapNullAnswer(getInstitution()) + "\n
  • "); + sb.append(" " + BundleUtil.getStringFromBundle("position") + ": " + wrapNullAnswer(getPosition()) + "
  • "); + + //Add requestor information to response to help dataset admin with request processing + if (requestor != null){ + sb.append("\n
  • " + BundleUtil.getStringFromBundle("dataset.guestbookResponse.requestor.id") + ": " + requestor.getId()+ "
  • "); + sb.append("\n
  • " + BundleUtil.getStringFromBundle("dataset.guestbookResponse.requestor.identifier") + ": " + requestor.getIdentifier()+ "
  • \n"); + } else { + sb.append("\n"); + } + sb.append(BundleUtil.getStringFromBundle("dataset.guestbookResponse.guestbook.additionalQuestions") + ":
      \n"); - for (CustomQuestionResponse cqr : getCustomQuestionResponses()) { + for (CustomQuestionResponse cqr : getCustomQuestionResponsesSorted()) { sb.append("
    • " + BundleUtil.getStringFromBundle("dataset.guestbookResponse.question") + ": " + cqr.getCustomQuestion().getQuestionString() + "
      " + BundleUtil.getStringFromBundle("dataset.guestbookResponse.answer") + ": " - + wrapNullAnswer(cqr.getResponse()) + "
    • \n"); + + wrapNullAnswer(cqr.getResponse()) + "\n
      "); } sb.append("
    "); return sb.toString(); diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 1eee9c65501..7359ef8eb33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -456,7 +456,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio GuestbookResponse gbr = far.getGuestbookResponse(); if (gbr != null) { messageText += MessageFormat.format( - BundleUtil.getStringFromBundle("notification.email.requestFileAccess.guestbookResponse"), gbr.toHtmlFormattedResponse()); + BundleUtil.getStringFromBundle("notification.email.requestFileAccess.guestbookResponse"), gbr.toHtmlFormattedResponse(requestor)); } return messageText; case GRANTFILEACCESS: diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java index bb6daa264ba..1e2a34f5472 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java @@ -1,43 +1,82 @@ package edu.harvard.iq.dataverse; -import java.util.List; import jakarta.ejb.Stateless; import jakarta.inject.Named; import jakarta.persistence.EntityManager; import jakarta.persistence.NoResultException; import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.*; + +import java.util.List; /** - * * @author michael */ @Stateless @Named public class MetadataBlockServiceBean { - + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; - + public MetadataBlock save(MetadataBlock mdb) { - return em.merge(mdb); - } - - + return em.merge(mdb); + } + public List listMetadataBlocks() { + return listMetadataBlocks(false); + } + + public List listMetadataBlocks(boolean onlyDisplayedOnCreate) { + if (onlyDisplayedOnCreate) { + return listMetadataBlocksDisplayedOnCreate(null); + } return em.createNamedQuery("MetadataBlock.listAll", MetadataBlock.class).getResultList(); } - - public MetadataBlock findById( Long id ) { + + public MetadataBlock findById(Long id) { return em.find(MetadataBlock.class, id); } - - public MetadataBlock findByName( String name ) { + + public MetadataBlock findByName(String name) { try { return em.createNamedQuery("MetadataBlock.findByName", MetadataBlock.class) - .setParameter("name", name) - .getSingleResult(); - } catch ( NoResultException nre ) { + .setParameter("name", name) + .getSingleResult(); + } catch (NoResultException nre) { return null; } } + + public List listMetadataBlocksDisplayedOnCreate(Dataverse ownerDataverse) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(MetadataBlock.class); + Root metadataBlockRoot = criteriaQuery.from(MetadataBlock.class); + Join datasetFieldTypeJoin = metadataBlockRoot.join("datasetFieldTypes"); + Predicate displayOnCreatePredicate = criteriaBuilder.isTrue(datasetFieldTypeJoin.get("displayOnCreate")); + + if (ownerDataverse != null) { + Root dataverseRoot = criteriaQuery.from(Dataverse.class); + Join datasetFieldTypeInputLevelJoin = dataverseRoot.join("dataverseFieldTypeInputLevels", JoinType.LEFT); + + Predicate requiredPredicate = criteriaBuilder.and( + datasetFieldTypeInputLevelJoin.get("datasetFieldType").in(metadataBlockRoot.get("datasetFieldTypes")), + criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))); + + Predicate unionPredicate = criteriaBuilder.or(displayOnCreatePredicate, requiredPredicate); + + criteriaQuery.where(criteriaBuilder.and( + criteriaBuilder.equal(dataverseRoot.get("id"), ownerDataverse.getId()), + metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), + unionPredicate + )); + } else { + criteriaQuery.where(displayOnCreatePredicate); + } + + criteriaQuery.select(metadataBlockRoot).distinct(true); + TypedQuery typedQuery = em.createQuery(criteriaQuery); + return typedQuery.getResultList(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 8fb762e3e5b..a389cbc735b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -97,6 +97,9 @@ public class PermissionServiceBean { @Inject DataverseRequestServiceBean dvRequestService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + /** * A request-level permission query (e.g includes IP ras). */ @@ -442,23 +445,14 @@ private Set getInferredPermissions(DvObject dvo) { * download permission for everybody: */ private boolean isPublicallyDownloadable(DvObject dvo) { - if (dvo instanceof DataFile) { + if (dvo instanceof DataFile df) { // unrestricted files that are part of a release dataset // automatically get download permission for everybody: // -- L.A. 4.0 beta12 - - DataFile df = (DataFile) dvo; - if (!df.isRestricted()) { - if (df.getOwner().getReleasedVersion() != null) { - List fileMetadatas = df.getOwner().getReleasedVersion().getFileMetadatas(); - if (fileMetadatas != null) { - for (FileMetadata fm : fileMetadatas) { - if (df.equals(fm.getDataFile())) { - return true; - } - } - } + DatasetVersion releasedVersion = df.getOwner().getReleasedVersion(); + if (releasedVersion != null) { + return datasetVersionFilesServiceBean.isDataFilePresentInDatasetVersion(releasedVersion, df); } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/Retention.java b/src/main/java/edu/harvard/iq/dataverse/Retention.java new file mode 100644 index 00000000000..e1bd2231570 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/Retention.java @@ -0,0 +1,102 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.persistence.*; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.Objects; + +@NamedQueries({ + @NamedQuery( name="Retention.findAll", + query = "SELECT r FROM Retention r"), + @NamedQuery( name="Retention.findById", + query = "SELECT r FROM Retention r WHERE r.id=:id"), + @NamedQuery( name="Retention.findByDateUnavailable", + query = "SELECT r FROM Retention r WHERE r.dateUnavailable=:dateUnavailable"), + @NamedQuery( name="Retention.deleteById", + query = "DELETE FROM Retention r WHERE r.id=:id") +}) +@Entity +public class Retention { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(nullable = false) + private LocalDate dateUnavailable; + + @Column(columnDefinition="TEXT") + private String reason; + + @OneToMany(mappedBy="retention", cascade={ CascadeType.REMOVE, CascadeType.PERSIST}) + private List dataFiles; + + public Retention(){ + dateUnavailable = LocalDate.now().plusYears(1000); // Most likely valid with respect to configuration + } + + public Retention(LocalDate dateUnavailable, String reason) { + this.dateUnavailable = dateUnavailable; + this.reason = reason; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public LocalDate getDateUnavailable() { + return dateUnavailable; + } + + public void setDateUnavailable(LocalDate dateUnavailable) { + this.dateUnavailable = dateUnavailable; + } + + public String getFormattedDateUnavailable() { + return getDateUnavailable().format(DateTimeFormatter.ISO_LOCAL_DATE.withLocale(BundleUtil.getCurrentLocale())); + } + + public String getReason() { + return reason; + } + + public void setReason(String reason) { + this.reason = reason; + } + + public List getDataFiles() { + return dataFiles; + } + + public void setDataFiles(List dataFiles) { + this.dataFiles = dataFiles; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Retention retention = (Retention) o; + return id.equals(retention.id) && dateUnavailable.equals(retention.dateUnavailable) && Objects.equals(reason, retention.reason); + } + + @Override + public int hashCode() { + return Objects.hash(id, dateUnavailable, reason); + } + + @Override + public String toString() { + return "Retention{" + + "id=" + id + + ", dateUnavailable=" + dateUnavailable + + ", reason='" + reason + '\'' + + '}'; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/RetentionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RetentionServiceBean.java new file mode 100644 index 00000000000..1421ac61120 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/RetentionServiceBean.java @@ -0,0 +1,66 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; + +import java.util.List; + + +@Stateless +@Named +public class RetentionServiceBean { + + @PersistenceContext + EntityManager em; + + @EJB + ActionLogServiceBean actionLogSvc; + + public List findAllRetentions() { + return em.createNamedQuery("Retention.findAll", Retention.class).getResultList(); + } + + public Retention findByRetentionId(Long id) { + Query query = em.createNamedQuery("Retention.findById", Retention.class); + query.setParameter("id", id); + try { + return (Retention) query.getSingleResult(); + } catch (Exception ex) { + return null; + } + } + + public Retention merge(Retention r) { + return em.merge(r); + } + + public Long save(Retention retention, String userIdentifier) { + if (retention.getId() == null) { + em.persist(retention); + em.flush(); + } + //Not quite from a command, but this action can be done by anyone, so command seems better than Admin or other alternatives + actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.Command, "retentionCreate") + .setInfo("id: " + retention.getId() + " date unavailable: " + retention.getDateUnavailable() + " reason: " + retention.getReason()).setUserIdentifier(userIdentifier)); + return retention.getId(); + } + + private int deleteById(long id, String userIdentifier) { + //Not quite from a command, but this action can be done by anyone, so command seems better than Admin or other alternatives + actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.Command, "retentionDelete") + .setInfo(Long.toString(id)) + .setUserIdentifier(userIdentifier)); + return em.createNamedQuery("Retention.deleteById") + .setParameter("id", id) + .executeUpdate(); + } + public int delete(Retention retention, String userIdentifier) { + return deleteById(retention.getId(), userIdentifier); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java index 5a522eb7e45..46941c8b5b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java +++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java @@ -129,6 +129,10 @@ public void setUserSum(Long userSum) { } public String getMessageTo() { + if (op1 == null || op2 == null) { + // Fix for 403 error page: initUserInput method doesn't call before + initUserInput(null); + } if (feedbackTarget == null) { return BrandingUtil.getSupportTeamName(systemAddress); } else if (feedbackTarget.isInstanceofDataverse()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 91bcc508b78..48196591b19 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -78,6 +78,9 @@ public class SettingsWrapper implements java.io.Serializable { private boolean embargoDateChecked = false; private LocalDate maxEmbargoDate = null; + private boolean retentionDateChecked = false; + private LocalDate minRetentionDate = null; + private String siteUrl = null; private Dataverse rootDataverse = null; @@ -302,14 +305,16 @@ public boolean isPublicInstall(){ } return publicInstall; } - + + @Deprecated(forRemoval = true, since = "2024-07-07") public boolean isRsyncUpload() { if (rsyncUpload == null) { rsyncUpload = getUploadMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString()); } return rsyncUpload; } - + + @Deprecated(forRemoval = true, since = "2024-07-07") public boolean isRsyncDownload() { if (rsyncDownload == null) { rsyncDownload = systemConfig.isRsyncDownload(); @@ -376,7 +381,8 @@ public boolean isWebloaderUpload() { } return webloaderUpload; } - + + @Deprecated(forRemoval = true, since = "2024-07-07") public boolean isRsyncOnly() { if (rsyncOnly == null) { String downloadMethods = getValueForKey(SettingsServiceBean.Key.DownloadMethods); @@ -395,7 +401,7 @@ public boolean isRsyncOnly() { } return rsyncOnly; } - + public boolean isHTTPUpload(){ if (httpUpload == null) { httpUpload = getUploadMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString()); @@ -582,6 +588,89 @@ public void validateEmbargoDate(FacesContext context, UIComponent component, Obj } } + public LocalDate getMinRetentionDate() { + if (!retentionDateChecked) { + String months = getValueForKey(Key.MinRetentionDurationInMonths); + Long minMonths = null; + if (months != null) { + try { + minMonths = Long.parseLong(months); + } catch (NumberFormatException nfe) { + logger.warning("Cant interpret :MinRetentionDurationInMonths as a long"); + } + } + + if (minMonths != null && minMonths != 0) { + if (minMonths == -1) { + minMonths = 0l; // Absolute minimum is 0 + } + minRetentionDate = LocalDate.now().plusMonths(minMonths); + } + retentionDateChecked = true; + } + return minRetentionDate; + } + + public LocalDate getMaxRetentionDate() { + Long maxMonths = 12000l; // Arbitrary cutoff at 1000 years - needs to keep maxDate < year 999999999 and + // somehwere 1K> x >10K years the datepicker widget stops showing a popup + // calendar + return LocalDate.now().plusMonths(maxMonths); + } + + public boolean isValidRetentionDate(Retention r) { + + if (r.getDateUnavailable()==null || + isRetentionAllowed() && r.getDateUnavailable().isAfter(getMinRetentionDate())) { + return true; + } + + return false; + } + + public boolean isRetentionAllowed() { + //Need a valid :MinRetentionDurationInMonths setting to allow retentions + return getMinRetentionDate()!=null; + } + + public void validateRetentionDate(FacesContext context, UIComponent component, Object value) + throws ValidatorException { + if (isRetentionAllowed()) { + UIComponent cb = component.findComponent("retentionCheckbox"); + UIInput endComponent = (UIInput) cb; + boolean removedState = false; + if (endComponent != null) { + try { + removedState = (Boolean) endComponent.getSubmittedValue(); + } catch (NullPointerException npe) { + // Do nothing - checkbox is not being shown (and is therefore not checked) + } + } + if (!removedState && value == null) { + String msgString = BundleUtil.getStringFromBundle("retention.date.required"); + FacesMessage msg = new FacesMessage(msgString); + msg.setSeverity(FacesMessage.SEVERITY_ERROR); + throw new ValidatorException(msg); + } + Retention newR = new Retention(((LocalDate) value), null); + if (!isValidRetentionDate(newR)) { + String minDate = getMinRetentionDate().format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + String maxDate = getMaxRetentionDate().format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + String msgString = BundleUtil.getStringFromBundle("retention.date.invalid", + Arrays.asList(minDate, maxDate)); + // If we don't throw an exception here, the datePicker will use it's own + // vaidator and display a default message. The value for that can be set by + // adding validatorMessage="#{bundle['retention.date.invalid']}" (a version with + // no params) to the datepicker + // element in file-edit-popup-fragment.html, but it would be better to catch all + // problems here (so we can show a message with the min/max dates). + FacesMessage msg = new FacesMessage(msgString); + msg.setSeverity(FacesMessage.SEVERITY_ERROR); + throw new ValidatorException(msg); + } + } + } + Map languageMap = null; public Map getBaseMetadataLanguageMap(boolean refresh) { diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 24c0f9d7926..f9cf061e771 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -59,6 +59,8 @@ public class Shib implements java.io.Serializable { SettingsServiceBean settingsService; @EJB SystemConfig systemConfig; + @EJB + UserServiceBean userService; HttpServletRequest request; @@ -259,6 +261,7 @@ else if (ShibAffiliationOrder.equals("firstAffiliation")) { state = State.REGULAR_LOGIN_INTO_EXISTING_SHIB_ACCOUNT; logger.fine("Found user based on " + userPersistentId + ". Logging in."); logger.fine("Updating display info for " + au.getName()); + userService.updateLastLogin(au); authSvc.updateAuthenticatedUser(au, displayInfo); logInUserAndSetShibAttributes(au); String prettyFacesHomePageString = getPrettyFacesHomePageString(false); diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index b6ab23848e2..d31fdd4e380 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -177,7 +177,7 @@ public String getDatasetCardImageAsUrl(Dataset dataset, Long versionId, boolean StorageIO storageIO = null; try { storageIO = DataAccess.getStorageIO(dataset); - if (storageIO.isAuxObjectCached(DatasetUtil.datasetLogoFilenameFinal)) { + if (storageIO != null && storageIO.isAuxObjectCached(DatasetUtil.datasetLogoFilenameFinal)) { // If not, return null/use the default, otherwise pass the logo URL hasDatasetLogo = true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index b7305a24f69..19df6d8c1c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.RateLimitCommandException; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.locality.StorageSiteServiceBean; import edu.harvard.iq.dataverse.metrics.MetricsServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; @@ -42,6 +43,7 @@ import jakarta.persistence.NoResultException; import jakarta.persistence.PersistenceContext; import jakarta.servlet.http.HttpServletRequest; +import jakarta.validation.constraints.NotNull; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.MediaType; @@ -370,6 +372,11 @@ protected DataverseLinkingDataverse findDataverseLinkingDataverseOrDie(String da } protected Dataset findDatasetOrDie(String id) throws WrappedResponse { + return findDatasetOrDie(id, false); + } + + protected Dataset findDatasetOrDie(String id, boolean deep) throws WrappedResponse { + Long datasetId; Dataset dataset; if (id.equals(PERSISTENT_ID_KEY)) { String persistentId = getRequestParameter(PERSISTENT_ID_KEY.substring(1)); @@ -377,24 +384,38 @@ protected Dataset findDatasetOrDie(String id) throws WrappedResponse { throw new WrappedResponse( badRequest(BundleUtil.getStringFromBundle("find.dataset.error.dataset_id_is_null", Collections.singletonList(PERSISTENT_ID_KEY.substring(1))))); } - dataset = datasetSvc.findByGlobalId(persistentId); - if (dataset == null) { - throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.persistentId", Collections.singletonList(persistentId)))); + GlobalId globalId; + try { + globalId = PidUtil.parseAsGlobalID(persistentId); + } catch (IllegalArgumentException e) { + throw new WrappedResponse( + badRequest(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.bad.id", Collections.singletonList(persistentId)))); + } + datasetId = dvObjSvc.findIdByGlobalId(globalId, DvObject.DType.Dataset); + if (datasetId == null) { + datasetId = dvObjSvc.findIdByAltGlobalId(globalId, DvObject.DType.Dataset); + } + if (datasetId == null) { + throw new WrappedResponse( + notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset_id_is_null", Collections.singletonList(PERSISTENT_ID_KEY.substring(1))))); } - return dataset; - } else { try { - dataset = datasetSvc.find(Long.parseLong(id)); - if (dataset == null) { - throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.id", Collections.singletonList(id)))); - } - return dataset; + datasetId = Long.parseLong(id); } catch (NumberFormatException nfe) { throw new WrappedResponse( badRequest(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.bad.id", Collections.singletonList(id)))); } } + if (deep) { + dataset = datasetSvc.findDeep(datasetId); + } else { + dataset = datasetSvc.find(datasetId); + } + if (dataset == null) { + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.id", Collections.singletonList(id)))); + } + return dataset; } protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { @@ -531,17 +552,21 @@ protected DvObject findDvo( Long id ) { * with that alias. If that fails, tries to get a {@link Dataset} with that global id. * @param id a value identifying the DvObject, either numeric of textual. * @return A DvObject, or {@code null} + * @throws WrappedResponse */ - protected DvObject findDvo( String id ) { - if ( isNumeric(id) ) { - return findDvo( Long.valueOf(id)) ; + @NotNull + protected DvObject findDvo(@NotNull final String id) throws WrappedResponse { + DvObject d = null; + if (isNumeric(id)) { + d = findDvo(Long.valueOf(id)); } else { - Dataverse d = dataverseSvc.findByAlias(id); - return ( d != null ) ? - d : datasetSvc.findByGlobalId(id); - + d = dataverseSvc.findByAlias(id); } - } + if (d == null) { + return findDatasetOrDie(id); + } + return d; + } protected T failIfNull( T t, String errorMessage ) throws WrappedResponse { if ( t != null ) return t; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 297ec2d3681..00da4990996 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -130,6 +130,14 @@ import jakarta.ws.rs.core.MediaType; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; + +import org.eclipse.microprofile.openapi.annotations.Operation; +import org.eclipse.microprofile.openapi.annotations.media.Content; +import org.eclipse.microprofile.openapi.annotations.media.Schema; +import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody; +import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; +import org.eclipse.microprofile.openapi.annotations.responses.APIResponses; +import org.eclipse.microprofile.openapi.annotations.tags.Tag; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataParam; @@ -466,7 +474,9 @@ public String tabularDatafileMetadataDDI(@Context ContainerRequestContext crc, @ if (!dataFile.isTabularData()) { throw new BadRequestException("tabular data required"); } - + if (FileUtil.isRetentionExpired(dataFile)) { + throw new BadRequestException("unable to download file with expired retention"); + } if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { boolean hasPermissionToDownloadFile = false; DataverseRequest dataverseRequest; @@ -921,14 +931,15 @@ public void write(OutputStream os) throws IOException, } } else { boolean embargoed = FileUtil.isActivelyEmbargoed(file); - if (file.isRestricted() || embargoed) { + boolean retentionExpired = FileUtil.isRetentionExpired(file); + if (file.isRestricted() || embargoed || retentionExpired) { if (zipper == null) { fileManifest = fileManifest + file.getFileMetadata().getLabel() + " IS " - + (embargoed ? "EMBARGOED" : "RESTRICTED") + + (embargoed ? "EMBARGOED" : retentionExpired ? "RETENTIONEXPIRED" : "RESTRICTED") + " AND CANNOT BE DOWNLOADED\r\n"; } else { zipper.addToManifest(file.getFileMetadata().getLabel() + " IS " - + (embargoed ? "EMBARGOED" : "RESTRICTED") + + (embargoed ? "EMBARGOED" : retentionExpired ? "RETENTIONEXPIRED" : "RESTRICTED") + " AND CANNOT BE DOWNLOADED\r\n"); } } else { @@ -1245,6 +1256,20 @@ private String getWebappImageResource(String imageName) { @AuthRequired @Path("datafile/{fileId}/auxiliary/{formatTag}/{formatVersion}") @Consumes(MediaType.MULTIPART_FORM_DATA) + @Produces("application/json") + @Operation(summary = "Save auxiliary file with version", + description = "Saves an auxiliary file") + @APIResponses(value = { + @APIResponse(responseCode = "200", + description = "File saved response"), + @APIResponse(responseCode = "403", + description = "User not authorized to edit the dataset."), + @APIResponse(responseCode = "400", + description = "File not found based on id.") + }) + @Tag(name = "saveAuxiliaryFileWithVersion", + description = "Save Auxiliary File With Version") + @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) public Response saveAuxiliaryFileWithVersion(@Context ContainerRequestContext crc, @PathParam("fileId") Long fileId, @PathParam("formatTag") String formatTag, @@ -1402,6 +1427,10 @@ public Response requestFileAccess(@Context ContainerRequestContext crc, @PathPar return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.fileNotFound", args)); } + if (FileUtil.isRetentionExpired(dataFile)) { + return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.failure.retentionExpired")); + } + if (!dataFile.getOwner().isFileAccessRequest()) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.requestsNotAccepted")); } @@ -1735,8 +1764,11 @@ private boolean isAccessAuthorized(User requestUser, DataFile df) { //True if there's an embargo that hasn't yet expired //In this state, we block access as though the file is restricted (even if it is not restricted) boolean embargoed = FileUtil.isActivelyEmbargoed(df); - - + // access is also blocked for retention expired files + boolean retentionExpired = FileUtil.isRetentionExpired(df); + // No access ever if retention is expired + if(retentionExpired) return false; + /* SEK 7/26/2018 for 3661 relying on the version state of the dataset versions to which this file is attached check to see if at least one is RELEASED @@ -1801,7 +1833,7 @@ private boolean isAccessAuthorized(User requestUser, DataFile df) { //The one case where we don't need to check permissions - if (!restricted && !embargoed && published) { + if (!restricted && !embargoed && !retentionExpired && published) { // If they are not published, they can still be downloaded, if the user // has the permission to view unpublished versions! (this case will // be handled below) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index d098c2fe16a..d60884bad2f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -17,6 +17,7 @@ import edu.harvard.iq.dataverse.DvObjectServiceBean; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.validation.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.Template; @@ -200,7 +201,7 @@ public Response putSetting(@PathParam("name") String name, String content) { @Path("settings/{name}/lang/{lang}") @PUT - public Response putSetting(@PathParam("name") String name, @PathParam("lang") String lang, String content) { + public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { Setting s = settingsSvc.set(name, lang, content); return ok("Setting " + name + " - " + lang + " - added."); } @@ -223,7 +224,7 @@ public Response deleteSetting(@PathParam("name") String name) { @Path("settings/{name}/lang/{lang}") @DELETE - public Response deleteSetting(@PathParam("name") String name, @PathParam("lang") String lang) { + public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { settingsSvc.delete(name, lang); return ok("Setting " + name + " - " + lang + " deleted."); } @@ -1029,29 +1030,49 @@ public Response deleteRole(@Context ContainerRequestContext crc, @PathParam("id" }, getRequestUser(crc)); } - @Path("superuser/{identifier}") - @POST - public Response toggleSuperuser(@PathParam("identifier") String identifier) { - ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "toggleSuperuser") - .setInfo(identifier); - try { - AuthenticatedUser user = authSvc.getAuthenticatedUser(identifier); - if (user.isDeactivated()) { - return error(Status.BAD_REQUEST, "You cannot make a deactivated user a superuser."); - } + @Path("superuser/{identifier}") + @Deprecated + @POST + public Response toggleSuperuser(@PathParam("identifier") String identifier) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "toggleSuperuser") + .setInfo(identifier); + try { + final AuthenticatedUser user = authSvc.getAuthenticatedUser(identifier); + return setSuperuserStatus(user, !user.isSuperuser()); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } - user.setSuperuser(!user.isSuperuser()); + private Response setSuperuserStatus(AuthenticatedUser user, Boolean isSuperuser) { + if (user.isDeactivated()) { + return error(Status.BAD_REQUEST, "You cannot make a deactivated user a superuser."); + } + user.setSuperuser(isSuperuser); + return ok("User " + user.getIdentifier() + " " + (user.isSuperuser() ? "set" : "removed") + + " as a superuser."); + } - return ok("User " + user.getIdentifier() + " " + (user.isSuperuser() ? "set" : "removed") - + " as a superuser."); - } catch (Exception e) { - alr.setActionResult(ActionLogRecord.Result.InternalError); - alr.setInfo(alr.getInfo() + "// " + e.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } finally { - actionLogSvc.log(alr); - } - } + @Path("superuser/{identifier}") + @PUT + // Using string instead of boolean so user doesn't need to add a Content-type header in their request + public Response setSuperuserStatus(@PathParam("identifier") String identifier, String isSuperuser) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "setSuperuserStatus") + .setInfo(identifier + ":" + isSuperuser); + try { + return setSuperuserStatus(authSvc.getAuthenticatedUser(identifier), StringUtil.isTrue(isSuperuser)); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } @GET @Path("validate/datasets") @@ -1332,26 +1353,24 @@ public Response convertUserFromBcryptToSha1(String json) { } - @Path("permissions/{dvo}") - @AuthRequired - @GET - public Response findPermissonsOn(@Context ContainerRequestContext crc, @PathParam("dvo") String dvo) { - try { - DvObject dvObj = findDvo(dvo); - if (dvObj == null) { - return notFound("DvObject " + dvo + " not found"); - } - User aUser = getRequestUser(crc); - JsonObjectBuilder bld = Json.createObjectBuilder(); - bld.add("user", aUser.getIdentifier()); - bld.add("permissions", json(permissionSvc.permissionsFor(createDataverseRequest(aUser), dvObj))); - return ok(bld); - - } catch (Exception e) { - logger.log(Level.SEVERE, "Error while testing permissions", e); - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } + @Path("permissions/{dvo}") + @AuthRequired + @GET + public Response findPermissonsOn(@Context final ContainerRequestContext crc, @PathParam("dvo") final String dvo) { + try { + final DvObject dvObj = findDvo(dvo); + final User aUser = getRequestUser(crc); + final JsonObjectBuilder bld = Json.createObjectBuilder(); + bld.add("user", aUser.getIdentifier()); + bld.add("permissions", json(permissionSvc.permissionsFor(createDataverseRequest(aUser), dvObj))); + return ok(bld); + } catch (WrappedResponse r) { + return r.getResponse(); + } catch (Exception e) { + logger.log(Level.SEVERE, "Error while testing permissions", e); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } @Path("assignee/{idtf}") @GET @@ -2318,6 +2337,7 @@ public Response addBannerMessage(JsonObject jsonObject) throws WrappedResponse { BannerMessage toAdd = new BannerMessage(); try { + String dismissible = jsonObject.getString("dismissibleByUser"); boolean dismissibleByUser = false; @@ -2338,12 +2358,17 @@ public Response addBannerMessage(JsonObject jsonObject) throws WrappedResponse { messageText.setBannerMessage(toAdd); toAdd.getBannerMessageTexts().add(messageText); } - bannerMessageService.save(toAdd); - return ok("Banner Message added successfully."); + bannerMessageService.save(toAdd); + + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder() + .add("message", "Banner Message added successfully.") + .add("id", toAdd.getId()); + + return ok(jsonObjectBuilder); } catch (Exception e) { logger.warning("Unexpected Exception: " + e.getMessage()); - return error(Status.BAD_REQUEST, "Add Banner Message unexpected exception: " + e.getMessage()); + return error(Status.BAD_REQUEST, "Add Banner Message unexpected exception: invalid or missing JSON object."); } } @@ -2379,10 +2404,19 @@ public Response deactivateBannerMessage(@PathParam("id") Long id) throws Wrapped @Path("/bannerMessage") public Response getBannerMessages(@PathParam("id") Long id) throws WrappedResponse { - return ok(bannerMessageService.findAllBannerMessages().stream() - .map(m -> jsonObjectBuilder().add("id", m.getId()).add("displayValue", m.getDisplayValue())) - .collect(toJsonArray())); + List messagesList = bannerMessageService.findAllBannerMessages(); + + for (BannerMessage message : messagesList) { + if ("".equals(message.getDisplayValue())) { + return error(Response.Status.INTERNAL_SERVER_ERROR, "No banner messages found for this locale."); + } + } + JsonArrayBuilder messages = messagesList.stream() + .map(m -> jsonObjectBuilder().add("id", m.getId()).add("displayValue", m.getDisplayValue())) + .collect(toJsonArray()); + + return ok(messages); } @POST diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java index 50862bc0d35..ba99cf33c5b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java @@ -119,7 +119,7 @@ public Response create(BuiltinUser user, @PathParam("password") String password, */ @POST @Path("{password}/{key}/{sendEmailNotification}") - public Response create(BuiltinUser user, @PathParam("password") String password, @PathParam("key") String key, @PathParam("sendEmailNotification") Boolean sendEmailNotification) { + public Response createWithNotification(BuiltinUser user, @PathParam("password") String password, @PathParam("key") String key, @PathParam("sendEmailNotification") Boolean sendEmailNotification) { return internalSave(user, password, key, sendEmailNotification); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 00b7dfa6e36..01c51dc2b4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -24,7 +24,6 @@ import jakarta.ejb.EJBException; import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; -import jakarta.validation.ConstraintViolation; import jakarta.validation.ConstraintViolationException; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.GET; @@ -488,9 +487,7 @@ private String parseControlledVocabulary(String[] values) { @Consumes("application/zip") @Path("loadpropertyfiles") public Response loadLanguagePropertyFile(File inputFile) { - try - { - ZipFile file = new ZipFile(inputFile); + try (ZipFile file = new ZipFile(inputFile)) { //Get file entries Enumeration entries = file.entries(); @@ -502,20 +499,26 @@ public Response loadLanguagePropertyFile(File inputFile) { { ZipEntry entry = entries.nextElement(); String dataverseLangFileName = dataverseLangDirectory + "/" + entry.getName(); - FileOutputStream fileOutput = new FileOutputStream(dataverseLangFileName); + File entryFile = new File(dataverseLangFileName); + String canonicalPath = entryFile.getCanonicalPath(); + if (canonicalPath.startsWith(dataverseLangDirectory + "/")) { + try (FileOutputStream fileOutput = new FileOutputStream(dataverseLangFileName)) { - InputStream is = file.getInputStream(entry); - BufferedInputStream bis = new BufferedInputStream(is); + InputStream is = file.getInputStream(entry); + BufferedInputStream bis = new BufferedInputStream(is); - while (bis.available() > 0) { - fileOutput.write(bis.read()); + while (bis.available() > 0) { + fileOutput.write(bis.read()); + } + } + } else { + logger.log(Level.SEVERE, "Zip Slip prevented: uploaded zip file tried to write to {}", canonicalPath); + return Response.status(400).entity("The zip file includes an illegal file path").build(); } - fileOutput.close(); } } - catch(IOException e) - { - e.printStackTrace(); + catch(IOException e) { + logger.log(Level.SEVERE, "Reading the language property zip file failed", e); return Response.status(500).entity("Internal server error. More details available at the server logs.").build(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 6d8fbe1808c..4b919c5ed82 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import com.amazonaws.services.s3.model.PartETag; + import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; @@ -44,6 +45,7 @@ import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit; @@ -66,6 +68,12 @@ import jakarta.ws.rs.core.*; import jakarta.ws.rs.core.Response.Status; import org.apache.commons.lang3.StringUtils; +import org.eclipse.microprofile.openapi.annotations.Operation; +import org.eclipse.microprofile.openapi.annotations.media.Content; +import org.eclipse.microprofile.openapi.annotations.media.Schema; +import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody; +import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; +import org.eclipse.microprofile.openapi.annotations.tags.Tag; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; @@ -80,6 +88,7 @@ import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.ExecutionException; @@ -154,6 +163,9 @@ public class Datasets extends AbstractApiBean { @EJB EmbargoServiceBean embargoService; + @EJB + RetentionServiceBean retentionService; + @Inject MakeDataCountLoggingServiceBean mdcLogService; @@ -191,7 +203,7 @@ public interface DsVersionHandler { @Path("{id}") public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") boolean returnOwners) { return response( req -> { - final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); + final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id, true))); final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved)); final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners); //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum) @@ -209,7 +221,7 @@ public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id" // WORKS on published datasets, which are open to the world. -- L.A. 4.5 @GET @Path("/export") - @Produces({"application/xml", "application/json", "application/html", "application/ld+json" }) + @Produces({"application/xml", "application/json", "application/html", "application/ld+json", "*/*" }) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { @@ -429,21 +441,32 @@ public Response getVersion(@Context ContainerRequestContext crc, @Context HttpHeaders headers) { return response( req -> { - //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. boolean checkPerms = excludeFiles == null ? true : !excludeFiles; - - Dataset dst = findDatasetOrDie(datasetId); - DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, dst, uriInfo, headers, includeDeaccessioned, checkPerms); - - if (dsv == null || dsv.getId() == null) { + + Dataset dataset = findDatasetOrDie(datasetId); + DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, + versionId, + dataset, + uriInfo, + headers, + includeDeaccessioned, + checkPerms); + + if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) { return notFound("Dataset version not found"); } if (excludeFiles == null ? true : !excludeFiles) { - dsv = datasetversionService.findDeep(dsv.getId()); + requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId()); } - return ok(json(dsv, null, excludeFiles == null ? true : !excludeFiles, returnOwners)); + + JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion, + null, + excludeFiles == null ? true : !excludeFiles, + returnOwners); + return ok(jsonBuilder); + }, getRequestUser(crc)); } @@ -465,7 +488,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId, false), uriInfo, headers, includeDeaccessioned); DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria; try { fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; @@ -671,8 +694,9 @@ public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @ } return response(req -> { - execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req)); - List args = Arrays.asList(id); + Dataset dataset = findDatasetOrDie(id); + execCommand(new UpdateDvObjectPIDMetadataCommand(dataset, req)); + List args = Arrays.asList(dataset.getIdentifier()); return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args)); }, getRequestUser(crc)); } @@ -684,7 +708,14 @@ public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc return response( req -> { datasetService.findAll().forEach( ds -> { try { + logger.fine("ReRegistering: " + ds.getId() + " : " + ds.getIdentifier()); + if (!ds.isReleased() || (!ds.isIdentifierRegistered() || (ds.getIdentifier() == null))) { + if (ds.isReleased()) { + logger.warning("Dataset id=" + ds.getId() + " is in an inconsistent state (publicationdate but no identifier/identifier not registered"); + } + } else { execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req)); + } } catch (WrappedResponse ex) { Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); } @@ -781,7 +812,7 @@ public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @ @AuthRequired @Path("{id}/metadata") @Produces("application/ld+json, application/json-ld") - public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers); } @@ -1679,6 +1710,306 @@ public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathPar } } + @POST + @AuthRequired + @Path("{id}/files/actions/:set-retention") + public Response createFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){ + + // user is authenticated + AuthenticatedUser authenticatedUser = null; + try { + authenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Status.UNAUTHORIZED, "Authentication is required."); + } + + Dataset dataset; + try { + dataset = findDatasetOrDie(id); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + + boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null); + + if (!hasValidTerms){ + return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid")); + } + + // client is superadmin or (client has EditDataset permission on these files and files are unreleased) + // check if files are unreleased(DRAFT?) + if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) { + return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions"); + } + + // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400 + long minRetentionDurationInMonths = 0; + try { + minRetentionDurationInMonths = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString())); + } catch (NumberFormatException nfe){ + if (nfe.getMessage().contains("null")) { + return error(Status.BAD_REQUEST, "No Retention periods allowed"); + } + } + if (minRetentionDurationInMonths == 0){ + return error(Status.BAD_REQUEST, "No Retention periods allowed"); + } + + JsonObject json; + try { + json = JsonUtil.getJsonObject(jsonBody); + } catch (JsonException ex) { + return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage()); + } + + Retention retention = new Retention(); + + + LocalDate currentDateTime = LocalDate.now(); + + // Extract the dateUnavailable - check if specified and valid + String dateUnavailableStr = ""; + LocalDate dateUnavailable; + try { + dateUnavailableStr = json.getString("dateUnavailable"); + dateUnavailable = LocalDate.parse(dateUnavailableStr); + } catch (NullPointerException npex) { + return error(Status.BAD_REQUEST, "Invalid retention period; no dateUnavailable specified"); + } catch (ClassCastException ccex) { + return error(Status.BAD_REQUEST, "Invalid retention period; dateUnavailable must be a string"); + } catch (DateTimeParseException dtpex) { + return error(Status.BAD_REQUEST, "Invalid date format for dateUnavailable: " + dateUnavailableStr); + } + + // check :MinRetentionDurationInMonths if -1 + LocalDate minRetentionDateTime = minRetentionDurationInMonths != -1 ? LocalDate.now().plusMonths(minRetentionDurationInMonths) : null; + // dateUnavailable is not in the past + if (dateUnavailable.isAfter(currentDateTime)){ + retention.setDateUnavailable(dateUnavailable); + } else { + return error(Status.BAD_REQUEST, "Date unavailable can not be in the past"); + } + + // dateAvailable is within limits + if (minRetentionDateTime != null){ + if (dateUnavailable.isBefore(minRetentionDateTime)){ + return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now"); + } + } + + try { + String reason = json.getString("reason"); + retention.setReason(reason); + } catch (NullPointerException npex) { + // ignoring; no reason specified is OK, it is optional + } catch (ClassCastException ccex) { + return error(Status.BAD_REQUEST, "Invalid retention period; reason must be a string"); + } + + + List datasetFiles = dataset.getFiles(); + List filesToRetention = new LinkedList<>(); + + // extract fileIds from json, find datafiles and add to list + if (json.containsKey("fileIds")){ + try { + JsonArray fileIds = json.getJsonArray("fileIds"); + for (JsonValue jsv : fileIds) { + try { + DataFile dataFile = findDataFileOrDie(jsv.toString()); + filesToRetention.add(dataFile); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + } catch (ClassCastException ccex) { + return error(Status.BAD_REQUEST, "Invalid retention period; fileIds must be an array of id strings"); + } catch (NullPointerException npex) { + return error(Status.BAD_REQUEST, "Invalid retention period; no fileIds specified"); + } + } else { + return error(Status.BAD_REQUEST, "No fileIds specified"); + } + + List orphanedRetentions = new ArrayList(); + // check if files belong to dataset + if (datasetFiles.containsAll(filesToRetention)) { + JsonArrayBuilder restrictedFiles = Json.createArrayBuilder(); + boolean badFiles = false; + for (DataFile datafile : filesToRetention) { + // superuser can overrule an existing retention, even on released files + if (datafile.isReleased() && !authenticatedUser.isSuperuser()) { + restrictedFiles.add(datafile.getId()); + badFiles = true; + } + } + if (badFiles) { + return Response.status(Status.FORBIDDEN) + .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR) + .add("message", "You do not have permission to set a retention period for the following files") + .add("files", restrictedFiles).build()) + .type(MediaType.APPLICATION_JSON_TYPE).build(); + } + retention=retentionService.merge(retention); + // Good request, so add the retention. Track any existing retentions so we can + // delete them if there are no files left that reference them. + for (DataFile datafile : filesToRetention) { + Retention ret = datafile.getRetention(); + if (ret != null) { + ret.getDataFiles().remove(datafile); + if (ret.getDataFiles().isEmpty()) { + orphanedRetentions.add(ret); + } + } + // Save merges the datafile with an retention into the context + datafile.setRetention(retention); + fileService.save(datafile); + } + //Call service to get action logged + long retentionId = retentionService.save(retention, authenticatedUser.getIdentifier()); + if (orphanedRetentions.size() > 0) { + for (Retention ret : orphanedRetentions) { + retentionService.delete(ret, authenticatedUser.getIdentifier()); + } + } + //If superuser, report changes to any released files + if (authenticatedUser.isSuperuser()) { + String releasedFiles = filesToRetention.stream().filter(d -> d.isReleased()) + .map(d -> d.getId().toString()).collect(Collectors.joining(",")); + if (!releasedFiles.isBlank()) { + actionLogSvc + .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionAddedTo") + .setInfo("Retention id: " + retention.getId() + " added for released file(s), id(s) " + + releasedFiles + ".") + .setUserIdentifier(authenticatedUser.getIdentifier())); + } + } + return ok(Json.createObjectBuilder().add("message", "File(s) retention period has been set or updated")); + } else { + return error(BAD_REQUEST, "Not all files belong to dataset"); + } + } + + @POST + @AuthRequired + @Path("{id}/files/actions/:unset-retention") + public Response removeFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){ + + // user is authenticated + AuthenticatedUser authenticatedUser = null; + try { + authenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Status.UNAUTHORIZED, "Authentication is required."); + } + + Dataset dataset; + try { + dataset = findDatasetOrDie(id); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + + // client is superadmin or (client has EditDataset permission on these files and files are unreleased) + // check if files are unreleased(DRAFT?) + //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files) + if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) { + return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions"); + } + + // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400 + int minRetentionDurationInMonths = 0; + try { + minRetentionDurationInMonths = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString())); + } catch (NumberFormatException nfe){ + if (nfe.getMessage().contains("null")) { + return error(Status.BAD_REQUEST, "No Retention periods allowed"); + } + } + if (minRetentionDurationInMonths == 0){ + return error(Status.BAD_REQUEST, "No Retention periods allowed"); + } + + JsonObject json; + try { + json = JsonUtil.getJsonObject(jsonBody); + } catch (JsonException ex) { + return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage()); + } + + List datasetFiles = dataset.getFiles(); + List retentionFilesToUnset = new LinkedList<>(); + + // extract fileIds from json, find datafiles and add to list + if (json.containsKey("fileIds")){ + try { + JsonArray fileIds = json.getJsonArray("fileIds"); + for (JsonValue jsv : fileIds) { + try { + DataFile dataFile = findDataFileOrDie(jsv.toString()); + retentionFilesToUnset.add(dataFile); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + } catch (ClassCastException ccex) { + return error(Status.BAD_REQUEST, "fileIds must be an array of id strings"); + } catch (NullPointerException npex) { + return error(Status.BAD_REQUEST, "No fileIds specified"); + } + } else { + return error(Status.BAD_REQUEST, "No fileIds specified"); + } + + List orphanedRetentions = new ArrayList(); + // check if files belong to dataset + if (datasetFiles.containsAll(retentionFilesToUnset)) { + JsonArrayBuilder restrictedFiles = Json.createArrayBuilder(); + boolean badFiles = false; + for (DataFile datafile : retentionFilesToUnset) { + // superuser can overrule an existing retention, even on released files + if (datafile.getRetention()==null || ((datafile.isReleased() && datafile.getRetention() != null) && !authenticatedUser.isSuperuser())) { + restrictedFiles.add(datafile.getId()); + badFiles = true; + } + } + if (badFiles) { + return Response.status(Status.FORBIDDEN) + .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR) + .add("message", "The following files do not have retention periods or you do not have permission to remove their retention periods") + .add("files", restrictedFiles).build()) + .type(MediaType.APPLICATION_JSON_TYPE).build(); + } + // Good request, so remove the retention from the files. Track any existing retentions so we can + // delete them if there are no files left that reference them. + for (DataFile datafile : retentionFilesToUnset) { + Retention ret = datafile.getRetention(); + if (ret != null) { + ret.getDataFiles().remove(datafile); + if (ret.getDataFiles().isEmpty()) { + orphanedRetentions.add(ret); + } + } + // Save merges the datafile with an retention into the context + datafile.setRetention(null); + fileService.save(datafile); + } + if (orphanedRetentions.size() > 0) { + for (Retention ret : orphanedRetentions) { + retentionService.delete(ret, authenticatedUser.getIdentifier()); + } + } + String releasedFiles = retentionFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(",")); + if(!releasedFiles.isBlank()) { + ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionRemovedFrom").setInfo("Retention removed from released file(s), id(s) " + releasedFiles + "."); + removeRecord.setUserIdentifier(authenticatedUser.getIdentifier()); + actionLogSvc.log(removeRecord); + } + return ok(Json.createObjectBuilder().add("message", "Retention periods were removed from file(s)")); + } else { + return error(BAD_REQUEST, "Not all files belong to dataset"); + } + } @PUT @AuthRequired @@ -1946,6 +2277,14 @@ public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @Pa @AuthRequired @Path("{id}/thumbnail") @Consumes(MediaType.MULTIPART_FORM_DATA) + @Produces("application/json") + @Operation(summary = "Uploads a logo for a dataset", + description = "Uploads a logo for a dataset") + @APIResponse(responseCode = "200", + description = "Dataset logo uploaded successfully") + @Tag(name = "uploadDatasetLogo", + description = "Uploads a logo for a dataset") + @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) { try { DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream)); @@ -1967,6 +2306,7 @@ public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathPar } } + @Deprecated(forRemoval = true, since = "2024-07-07") @GET @AuthRequired @Path("{identifier}/dataCaptureModule/rsync") @@ -2147,7 +2487,8 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam( Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; reasonForReturn = json.getString("reasonForReturn"); - if (reasonForReturn == null || reasonForReturn.isEmpty()) { + if ((reasonForReturn == null || reasonForReturn.isEmpty()) + && !FeatureFlags.DISABLE_RETURN_TO_AUTHOR_REASON.enabled()) { return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview")); } AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc); @@ -2418,6 +2759,14 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa @AuthRequired @Path("{id}/add") @Consumes(MediaType.MULTIPART_FORM_DATA) + @Produces("application/json") + @Operation(summary = "Uploads a file for a dataset", + description = "Uploads a file for a dataset") + @APIResponse(responseCode = "200", + description = "File uploaded successfully to dataset") + @Tag(name = "addFileToDataset", + description = "Uploads a file for a dataset") + @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) public Response addFileToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("jsonData") String jsonData, @@ -2694,25 +3043,49 @@ public static T handleVersion(String versionId, DsVersionHandler hdl) * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with * a deaccessioned dataset. */ - private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, + String versionNumber, + final Dataset ds, + UriInfo uriInfo, + HttpHeaders headers) throws WrappedResponse { //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned. - return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, false, false); + boolean checkFilePerms = false; + boolean includeDeaccessioned = false; + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms); } /* * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions. */ - private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse{ - return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, true); + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, + UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse { + boolean checkPermsWhenDeaccessioned = true; + boolean bypassAccessCheck = false; + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); + } + + /* + * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions. + */ + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, + UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { + boolean bypassAccessCheck = false; + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); } /* * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error. */ - private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, + UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned, + boolean bypassAccessCheck) + throws WrappedResponse { + DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); + if (dsv == null || dsv.getId() == null) { - throw new WrappedResponse(notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found")); + throw new WrappedResponse( + notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found")); } if (dsv.isReleased()&& uriInfo!=null) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds); @@ -2720,7 +3093,7 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String } return dsv; } - + @GET @Path("{identifier}/locks") public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { @@ -3619,6 +3992,14 @@ public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathP @AuthRequired @Path("{id}/addGlobusFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) + @Produces("application/json") + @Operation(summary = "Uploads a Globus file for a dataset", + description = "Uploads a Globus file for a dataset") + @APIResponse(responseCode = "200", + description = "Globus file uploaded successfully to dataset") + @Tag(name = "addGlobusFilesToDataset", + description = "Uploads a Globus file for a dataset") + @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @FormDataParam("jsonData") String jsonData, @@ -3721,11 +4102,11 @@ public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @P // ------------------------------------- // (1) Get the user from the ContainerRequestContext // ------------------------------------- - AuthenticatedUser authUser; + AuthenticatedUser authUser = null; try { authUser = getRequestAuthenticatedUserOrDie(crc); } catch (WrappedResponse e) { - return e.getResponse(); + logger.fine("guest user globus download"); } // ------------------------------------- // (2) Get the Dataset Id @@ -4001,6 +4382,14 @@ public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @Pat @AuthRequired @Path("{id}/addFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) + @Produces("application/json") + @Operation(summary = "Uploads a set of files to a dataset", + description = "Uploads a set of files to a dataset") + @APIResponse(responseCode = "200", + description = "Files uploaded successfully to dataset") + @Tag(name = "addFilesToDataset", + description = "Uploads a set of files to a dataset") + @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("jsonData") String jsonData) { @@ -4068,6 +4457,14 @@ public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathPar @AuthRequired @Path("{id}/replaceFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) + @Produces("application/json") + @Operation(summary = "Replace a set of files to a dataset", + description = "Replace a set of files to a dataset") + @APIResponse(responseCode = "200", + description = "Files replaced successfully to dataset") + @Tag(name = "replaceFilesInDataset", + description = "Replace a set of files to a dataset") + @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) public Response replaceFilesInDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("jsonData") String jsonData) { @@ -4424,8 +4821,11 @@ public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + boolean checkFilePerms = false; return response(req -> ok( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned, false).getCitation(true, false)), getRequestUser(crc)); + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, + includeDeaccessioned, checkFilePerms).getCitation(true, false)), + getRequestUser(crc)); } @POST @@ -4449,7 +4849,10 @@ public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathPa } } execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false)); - return ok("Dataset " + datasetId + " deaccessioned for version " + versionId); + + return ok("Dataset " + + (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) + + " deaccessioned for version " + versionId); } catch (JsonParsingException jpe) { return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a1dbc3a1de6..02b60fdb32a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1,24 +1,10 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseFacet; -import edu.harvard.iq.dataverse.DataverseContact; -import edu.harvard.iq.dataverse.DataverseMetadataBlockFacet; -import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean; import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; -import edu.harvard.iq.dataverse.GuestbookServiceBean; -import edu.harvard.iq.dataverse.MetadataBlock; -import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; @@ -34,45 +20,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataverse.DataverseUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.impl.AddRoleAssigneesToExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteCollectionQuotaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseLinkingDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetSchemaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionQuotaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionStorageUseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetRootCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseStorageSizeCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ImportDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.LinkDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListDataverseContentCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListExplicitGroupsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListFacetsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlockFacetsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlocksCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments; -import edu.harvard.iq.dataverse.engine.command.impl.ListRolesCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult; -import edu.harvard.iq.dataverse.engine.command.impl.MoveDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RemoveRoleAssigneesFromExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.SetCollectionQuotaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseDefaultContributorRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ValidateDatasetJsonCommand; +import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.pidproviders.PidProvider; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.JvmSettings; @@ -87,23 +35,14 @@ import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.json.JsonUtil; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.TreeSet; +import java.io.StringReader; +import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; -import jakarta.json.Json; -import jakarta.json.JsonArrayBuilder; -import jakarta.json.JsonNumber; -import jakarta.json.JsonObject; -import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonString; -import jakarta.json.JsonValue; +import jakarta.json.*; import jakarta.json.JsonValue.ValueType; import jakarta.json.stream.JsonParsingException; import jakarta.validation.ConstraintViolationException; @@ -127,10 +66,6 @@ import java.io.OutputStream; import java.text.MessageFormat; import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Date; -import java.util.Map; -import java.util.Optional; import java.util.stream.Collectors; import jakarta.servlet.http.HttpServletResponse; import jakarta.ws.rs.WebApplicationException; @@ -168,6 +103,12 @@ public class Dataverses extends AbstractApiBean { @EJB DataverseServiceBean dataverseService; + @EJB + DataverseLinkingServiceBean linkingService; + + @EJB + FeaturedDataverseServiceBean featuredDataverseService; + @EJB SwordServiceBean swordService; @@ -696,6 +637,43 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam } } + @PUT + @AuthRequired + @Path("{identifier}/inputLevels") + public Response updateInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, String jsonBody) { + try { + Dataverse dataverse = findDataverseOrDie(identifier); + List newInputLevels = parseInputLevels(jsonBody, dataverse); + execCommand(new UpdateDataverseInputLevelsCommand(dataverse, createDataverseRequest(getRequestUser(crc)), newInputLevels)); + return ok(BundleUtil.getStringFromBundle("dataverse.update.success"), JsonPrinter.json(dataverse)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + + private List parseInputLevels(String jsonBody, Dataverse dataverse) throws WrappedResponse { + JsonArray inputLevelsArray = Json.createReader(new StringReader(jsonBody)).readArray(); + + List newInputLevels = new ArrayList<>(); + for (JsonValue value : inputLevelsArray) { + JsonObject inputLevel = (JsonObject) value; + String datasetFieldTypeName = inputLevel.getString("datasetFieldTypeName"); + DatasetFieldType datasetFieldType = datasetFieldSvc.findByName(datasetFieldTypeName); + + if (datasetFieldType == null) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.updateinputlevels.error.invalidfieldtypename"), datasetFieldTypeName); + throw new WrappedResponse(badRequest(errorMessage)); + } + + boolean required = inputLevel.getBoolean("required"); + boolean include = inputLevel.getBoolean("include"); + + newInputLevels.add(new DataverseFieldTypeInputLevel(datasetFieldType, dataverse, required, include)); + } + + return newInputLevels; + } + @DELETE @AuthRequired @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}") @@ -710,14 +688,20 @@ public Response deleteDataverseLinkingDataverse(@Context ContainerRequestContext @GET @AuthRequired @Path("{identifier}/metadatablocks") - public Response listMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) { + public Response listMetadataBlocks(@Context ContainerRequestContext crc, + @PathParam("identifier") String dvIdtf, + @QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, + @QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes) { try { - JsonArrayBuilder arr = Json.createArrayBuilder(); - final List blocks = execCommand(new ListMetadataBlocksCommand(createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf))); - for (MetadataBlock mdb : blocks) { - arr.add(brief.json(mdb)); - } - return ok(arr); + Dataverse dataverse = findDataverseOrDie(dvIdtf); + final List metadataBlocks = execCommand( + new ListMetadataBlocksCommand( + createDataverseRequest(getRequestUser(crc)), + dataverse, + onlyDisplayedOnCreate + ) + ); + return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate, dataverse)); } catch (WrappedResponse we) { return we.getResponse(); } @@ -821,6 +805,111 @@ public Response listFacets(@Context ContainerRequestContext crc, @PathParam("ide } } + + @GET + @AuthRequired + @Path("{identifier}/featured") + /* + Allows user to get the collections that are featured by a given collection + probably more for SPA than end user + */ + public Response getFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) { + + try { + User u = getRequestUser(crc); + DataverseRequest r = createDataverseRequest(u); + Dataverse dataverse = findDataverseOrDie(dvIdtf); + JsonArrayBuilder fs = Json.createArrayBuilder(); + for (Dataverse f : execCommand(new ListFeaturedCollectionsCommand(r, dataverse))) { + fs.add(f.getAlias()); + } + return ok(fs); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + + + @POST + @AuthRequired + @Path("{identifier}/featured") + /** + * Allows user to set featured dataverses - must have edit dataverse permission + * + */ + public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) { + List dvsFromInput = new LinkedList<>(); + + + try { + + for (JsonString dvAlias : Util.asJsonArray(dvAliases).getValuesAs(JsonString.class)) { + Dataverse dvToBeFeatured = dataverseService.findByAlias(dvAlias.getString()); + if (dvToBeFeatured == null) { + return error(Response.Status.BAD_REQUEST, "Can't find dataverse collection with alias '" + dvAlias + "'"); + } + dvsFromInput.add(dvToBeFeatured); + } + + if (dvsFromInput.isEmpty()) { + return error(Response.Status.BAD_REQUEST, "Please provide a valid Json array of dataverse collection aliases to be featured."); + } + + Dataverse dataverse = findDataverseOrDie(dvIdtf); + List featuredSource = new ArrayList<>(); + List featuredTarget = new ArrayList<>(); + featuredSource.addAll(dataverseService.findAllPublishedByOwnerId(dataverse.getId())); + featuredSource.addAll(linkingService.findLinkedDataverses(dataverse.getId())); + List featuredList = featuredDataverseService.findByDataverseId(dataverse.getId()); + + if (featuredSource.isEmpty()) { + return error(Response.Status.BAD_REQUEST, "There are no collections avaialble to be featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); + } + + for (DataverseFeaturedDataverse dfd : featuredList) { + Dataverse fd = dfd.getFeaturedDataverse(); + featuredTarget.add(fd); + featuredSource.remove(fd); + } + + for (Dataverse test : dvsFromInput) { + if (featuredTarget.contains(test)) { + return error(Response.Status.BAD_REQUEST, "Dataverse collection '" + test.getDisplayName() + "' is already featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); + } + + if (featuredSource.contains(test)) { + featuredTarget.add(test); + } else { + return error(Response.Status.BAD_REQUEST, "Dataverse collection '" + test.getDisplayName() + "' may not be featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); + } + + } + // by passing null for Facets and DataverseFieldTypeInputLevel, those are not changed + execCommand(new UpdateDataverseCommand(dataverse, null, featuredTarget, createDataverseRequest(getRequestUser(crc)), null)); + return ok("Featured Dataverses of dataverse " + dvIdtf + " updated."); + + } catch (WrappedResponse ex) { + return ex.getResponse(); + } catch (JsonParsingException jpe){ + return error(Response.Status.BAD_REQUEST, "Please provide a valid Json array of dataverse collection aliases to be featured."); + } + + } + + @DELETE + @AuthRequired + @Path("{identifier}/featured") + public Response deleteFeaturedCollections(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) throws WrappedResponse { + try { + Dataverse dataverse = findDataverseOrDie(dvIdtf); + List featuredTarget = new ArrayList<>(); + execCommand(new UpdateDataverseCommand(dataverse, null, featuredTarget, createDataverseRequest(getRequestUser(crc)), null)); + return ok(BundleUtil.getStringFromBundle("dataverses.api.delete.featured.collections.successful")); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + @POST @AuthRequired @Path("{identifier}/facets") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index 89b22b76a7d..c815caa09eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -283,7 +283,7 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE); } else { try { - int size = new Integer(di.getConversionParamValue()); + int size = Integer.parseInt(di.getConversionParamValue()); if (size > 0) { storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, size); } @@ -294,8 +294,10 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] // and, since we now have tabular data files that can // have thumbnail previews... obviously, we don't want to // add the variable header to the image stream! - storageIO.setNoVarHeader(Boolean.TRUE); - storageIO.setVarHeader(null); + if (storageIO != null) { // ImageThumbConverter returns null if thumbnail conversion fails + storageIO.setNoVarHeader(Boolean.TRUE); + storageIO.setVarHeader(null); + } } } else if (dataFile.isTabularData()) { logger.fine("request for tabular data download;"); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 2d48322c90e..d786aab35a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -64,6 +64,13 @@ import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import jakarta.ws.rs.core.UriInfo; + +import org.eclipse.microprofile.openapi.annotations.Operation; +import org.eclipse.microprofile.openapi.annotations.media.Content; +import org.eclipse.microprofile.openapi.annotations.media.Schema; +import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody; +import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; +import org.eclipse.microprofile.openapi.annotations.tags.Tag; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; @@ -176,6 +183,14 @@ public Response restrictFileInDataset(@Context ContainerRequestContext crc, @Pat @AuthRequired @Path("{id}/replace") @Consumes(MediaType.MULTIPART_FORM_DATA) + @Produces("application/json") + @Operation(summary = "Replace a file on a dataset", + description = "Replace a file to a dataset") + @APIResponse(responseCode = "200", + description = "File replaced successfully on the dataset") + @Tag(name = "replaceFilesInDataset", + description = "Replace a file to a dataset") + @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) public Response replaceFileInDataset( @Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @@ -497,7 +512,7 @@ public Response getFileData(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{datasetVersionId}") - public Response getFileData(@Context ContainerRequestContext crc, + public Response getFileDataForVersion(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("datasetVersionId") String datasetVersionId, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java index d56a787c7ff..ed996b8ecf9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java @@ -88,8 +88,8 @@ public Response postIpGroup( JsonObject dto ){ * that group from being created. */ @PUT - @Path("ip/{groupName}") - public Response putIpGroups( @PathParam("groupName") String groupName, JsonObject dto ){ + @Path("ip/{group}") + public Response putIpGroups( @PathParam("group") String groupName, JsonObject dto ){ try { if ( groupName == null || groupName.trim().isEmpty() ) { return badRequest("Group name cannot be empty"); @@ -118,8 +118,8 @@ public Response listIpGroups() { } @GET - @Path("ip/{groupIdtf}") - public Response getIpGroup( @PathParam("groupIdtf") String groupIdtf ) { + @Path("ip/{group}") + public Response getIpGroup( @PathParam("group") String groupIdtf ) { IpGroup grp; if ( isNumeric(groupIdtf) ) { grp = ipGroupPrv.get( Long.parseLong(groupIdtf) ); @@ -131,8 +131,8 @@ public Response getIpGroup( @PathParam("groupIdtf") String groupIdtf ) { } @DELETE - @Path("ip/{groupIdtf}") - public Response deleteIpGroup( @PathParam("groupIdtf") String groupIdtf ) { + @Path("ip/{group}") + public Response deleteIpGroup( @PathParam("group") String groupIdtf ) { IpGroup grp; if ( isNumeric(groupIdtf) ) { grp = ipGroupPrv.get( Long.parseLong(groupIdtf) ); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index 40ce6cd25b7..257519677d3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -1,16 +1,35 @@ package edu.harvard.iq.dataverse.api; +import java.io.FileInputStream; +import java.io.InputStream; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import jakarta.ws.rs.Produces; +import org.apache.commons.io.IOUtils; + import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.ejb.EJB; import jakarta.json.Json; import jakarta.json.JsonValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; +import org.eclipse.microprofile.openapi.annotations.Operation; +import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; +import org.eclipse.microprofile.openapi.annotations.tags.Tag; @Path("info") +@Tag(name = "info", description = "General information about the Dataverse installation.") public class Info extends AbstractApiBean { @EJB @@ -19,6 +38,8 @@ public class Info extends AbstractApiBean { @EJB SystemConfig systemConfig; + private static final Logger logger = Logger.getLogger(Info.class.getCanonicalName()); + @GET @Path("settings/:DatasetPublishPopupCustomText") public Response getDatasetPublishPopupCustomText() { @@ -33,6 +54,9 @@ public Response getMaxEmbargoDurationInMonths() { @GET @Path("version") + @Operation(summary = "Get version and build information", description = "Get version and build information") + @APIResponse(responseCode = "200", + description = "Version and build information") public Response getInfo() { String versionStr = systemConfig.getVersion(true); String[] comps = versionStr.split("build",2); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java index 448fb48e389..8861abd4803 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java @@ -1,34 +1,33 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.MetadataBlock; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.Produces; +import jakarta.ws.rs.*; import jakarta.ws.rs.core.Response; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import jakarta.ws.rs.PathParam; + +import java.util.List; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; /** * Api bean for managing metadata blocks. + * * @author michael */ @Path("metadatablocks") @Produces("application/json") public class MetadataBlocks extends AbstractApiBean { - + @GET - public Response list() { - return ok(metadataBlockSvc.listMetadataBlocks().stream().map(brief::json).collect(toJsonArray())); + public Response listMetadataBlocks(@QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, + @QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes) { + List metadataBlocks = metadataBlockSvc.listMetadataBlocks(onlyDisplayedOnCreate); + return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate)); } - + @Path("{identifier}") @GET - public Response getBlock( @PathParam("identifier") String idtf ) { + public Response getMetadataBlock(@PathParam("identifier") String idtf) { MetadataBlock b = findMetadataBlock(idtf); - - return (b != null ) ? ok(json(b)) : notFound("Can't find metadata block '" + idtf + "'"); + return (b != null) ? ok(json(b)) : notFound("Can't find metadata block '" + idtf + "'"); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index 71e2865ca4d..6b9fcb38305 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -1,10 +1,8 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.search.SearchFields; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.DvObjectServiceBean; import edu.harvard.iq.dataverse.search.FacetCategory; import edu.harvard.iq.dataverse.search.FacetLabel; import edu.harvard.iq.dataverse.search.SolrSearchResult; @@ -16,7 +14,6 @@ import edu.harvard.iq.dataverse.search.SearchConstants; import edu.harvard.iq.dataverse.search.SearchException; import edu.harvard.iq.dataverse.search.SearchUtil; -import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.search.SortBy; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.io.IOException; @@ -26,6 +23,7 @@ import java.util.Map; import java.util.logging.Logger; import jakarta.ejb.EJB; +import jakarta.inject.Inject; import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; @@ -51,10 +49,8 @@ public class Search extends AbstractApiBean { SearchServiceBean searchService; @EJB DataverseServiceBean dataverseService; - @EJB - DvObjectServiceBean dvObjectService; - @EJB - SolrIndexServiceBean SolrIndexService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; @GET @AuthRequired @@ -179,7 +175,7 @@ public Response search( JsonArrayBuilder itemsArrayBuilder = Json.createArrayBuilder(); List solrSearchResults = solrQueryResponse.getSolrSearchResults(); for (SolrSearchResult solrSearchResult : solrSearchResults) { - itemsArrayBuilder.add(solrSearchResult.toJsonObject(showRelevance, showEntityIds, showApiUrls, metadataFields)); + itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields, getDatasetFileCount(solrSearchResult))); } JsonObjectBuilder spelling_alternatives = Json.createObjectBuilder(); @@ -187,31 +183,32 @@ public Response search( spelling_alternatives.add(entry.getKey(), entry.getValue().toString()); } - JsonArrayBuilder facets = Json.createArrayBuilder(); - JsonObjectBuilder facetCategoryBuilder = Json.createObjectBuilder(); - for (FacetCategory facetCategory : solrQueryResponse.getFacetCategoryList()) { - JsonObjectBuilder facetCategoryBuilderFriendlyPlusData = Json.createObjectBuilder(); - JsonArrayBuilder facetLabelBuilderData = Json.createArrayBuilder(); - for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { - JsonObjectBuilder countBuilder = Json.createObjectBuilder(); - countBuilder.add(facetLabel.getName(), facetLabel.getCount()); - facetLabelBuilderData.add(countBuilder); - } - facetCategoryBuilderFriendlyPlusData.add("friendly", facetCategory.getFriendlyName()); - facetCategoryBuilderFriendlyPlusData.add("labels", facetLabelBuilderData); - facetCategoryBuilder.add(facetCategory.getName(), facetCategoryBuilderFriendlyPlusData); - } - facets.add(facetCategoryBuilder); - JsonObjectBuilder value = Json.createObjectBuilder() .add("q", query) .add("total_count", solrQueryResponse.getNumResultsFound()) .add("start", solrQueryResponse.getResultsStart()) .add("spelling_alternatives", spelling_alternatives) .add("items", itemsArrayBuilder.build()); + if (showFacets) { + JsonArrayBuilder facets = Json.createArrayBuilder(); + JsonObjectBuilder facetCategoryBuilder = Json.createObjectBuilder(); + for (FacetCategory facetCategory : solrQueryResponse.getFacetCategoryList()) { + JsonObjectBuilder facetCategoryBuilderFriendlyPlusData = Json.createObjectBuilder(); + JsonArrayBuilder facetLabelBuilderData = Json.createArrayBuilder(); + for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { + JsonObjectBuilder countBuilder = Json.createObjectBuilder(); + countBuilder.add(facetLabel.getName(), facetLabel.getCount()); + facetLabelBuilderData.add(countBuilder); + } + facetCategoryBuilderFriendlyPlusData.add("friendly", facetCategory.getFriendlyName()); + facetCategoryBuilderFriendlyPlusData.add("labels", facetLabelBuilderData); + facetCategoryBuilder.add(facetCategory.getName(), facetCategoryBuilderFriendlyPlusData); + } + facets.add(facetCategoryBuilder); value.add("facets", facets); } + value.add("count_in_response", solrSearchResults.size()); /** * @todo Returning the fq might be useful as a troubleshooting aid @@ -232,6 +229,15 @@ public Response search( } } + private Long getDatasetFileCount(SolrSearchResult solrSearchResult) { + DvObject dvObject = solrSearchResult.getEntity(); + if (dvObject.isInstanceofDataset()) { + DatasetVersion datasetVersion = ((Dataset) dvObject).getVersionFromId(solrSearchResult.getDatasetVersionId()); + return datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion); + } + return null; + } + private User getUser(ContainerRequestContext crc) throws WrappedResponse { User userToExecuteSearchAs = GuestUser.get(); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java index b9db44b2671..46747b50c29 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java @@ -21,7 +21,7 @@ public class TestApi extends AbstractApiBean { @GET @Path("datasets/{id}/externalTools") - public Response getExternalToolsforFile(@PathParam("id") String idSupplied, @QueryParam("type") String typeSupplied) { + public Response getDatasetExternalToolsforFile(@PathParam("id") String idSupplied, @QueryParam("type") String typeSupplied) { ExternalTool.Type type; try { type = ExternalTool.Type.fromString(typeSupplied); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index 791fc7aa774..1f5430340c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -234,7 +234,7 @@ public Response getTraces(@Context ContainerRequestContext crc, @PathParam("iden @AuthRequired @Path("{identifier}/traces/{element}") @Produces("text/csv, application/json") - public Response getTraces(@Context ContainerRequestContext crc, @Context Request req, @PathParam("identifier") String identifier, @PathParam("element") String element) { + public Response getTracesElement(@Context ContainerRequestContext crc, @Context Request req, @PathParam("identifier") String identifier, @PathParam("element") String element) { try { AuthenticatedUser userToQuery = authSvc.getAuthenticatedUser(identifier); if(!elements.contains(element)) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java index 8d5024c1c14..15478aacff7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java @@ -111,9 +111,9 @@ public Response deleteDefault(@PathParam("triggerType") String triggerType) { } } - @Path("/{identifier}") + @Path("/{id}") @GET - public Response getWorkflow(@PathParam("identifier") String identifier ) { + public Response getWorkflow(@PathParam("id") String identifier ) { try { long idtf = Long.parseLong(identifier); return workflows.getWorkflow(idtf) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 1c0f5010059..4a8fb123fd4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -307,11 +307,9 @@ public AuthenticatedUser getUpdateAuthenticatedUser( String authenticationProvid if (user != null && !user.isDeactivated()) { user = userService.updateLastLogin(user); } - + if ( user == null ) { throw new IllegalStateException("Authenticated user does not exist. The functionality to support creating one at this point in authentication has been removed."); - //return createAuthenticatedUser( - // new UserRecordIdentifier(authenticationProviderId, resp.getUserId()), resp.getUserId(), resp.getUserDisplayInfo(), true ); } else { if (BuiltinAuthenticationProvider.PROVIDER_ID.equals(user.getAuthenticatedUserLookup().getAuthenticationProviderId())) { return user; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index 0fd0852b4df..8f3dc07fdea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; @@ -65,6 +66,9 @@ public class OAuth2LoginBackingBean implements Serializable { @EJB SystemConfig systemConfig; + @EJB + UserServiceBean userService; + @Inject DataverseSession session; @@ -128,6 +132,7 @@ public void exchangeCodeForToken() throws IOException { } else { // login the user and redirect to HOME of intended page (if any). // setUser checks for deactivated users. + dvUser = userService.updateLastLogin(dvUser); session.setUser(dvUser); final OAuth2TokenData tokenData = oauthUser.getTokenData(); if (tokenData != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java index a1bcbe49327..bc4c69390cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java @@ -270,6 +270,10 @@ public static StorageIO createNewStorageIO(T dvObject, S logger.warning("Could not find storage driver for: " + storageTag); throw new IOException("createDataAccessObject: Unsupported storage method " + storageDriverId); } + if (storageIO == null) { + logger.warning("Could not find storage driver for: " + storageTag); + throw new IOException("createDataAccessObject: Unsupported storage method " + storageDriverId); + } // Note: All storageIO classes must assure that dvObject instances' storageIdentifiers are prepended with // the :// + any additional storageIO type information required (e.g. the bucketname for s3/swift) // This currently happens when the storageIO is opened for write access diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 1be2bb79e0f..2435e3f778a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -35,21 +35,21 @@ import javax.imageio.stream.ImageOutputStream; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.ByteArrayOutputStream; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.OutputStream; import java.nio.channels.Channel; import java.nio.channels.Channels; -import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.logging.Level; import java.util.logging.Logger; + +import jakarta.enterprise.inject.spi.CDI; import org.apache.commons.io.IOUtils; //import org.primefaces.util.Base64; import java.util.Base64; @@ -110,6 +110,12 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s return false; } + // check if thumbnail generation failed: + if (file.isPreviewImageFail()) { + logger.fine("Thumbnail failed to be generated for "+ file.getId()); + return false; + } + if (isThumbnailCached(storageIO, size)) { logger.fine("Found cached thumbnail for " + file.getId()); return true; @@ -119,22 +125,23 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } private static boolean generateThumbnail(DataFile file, StorageIO storageIO, int size) { - logger.log(Level.FINE, (file.isPreviewImageFail() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); + logger.fine((file.isPreviewImageFail() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); + boolean thumbnailGenerated = false; // Don't try to generate if there have been failures: if (!file.isPreviewImageFail()) { - boolean thumbnailGenerated = false; if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { thumbnailGenerated = generateImageThumbnail(storageIO, size); } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { thumbnailGenerated = generatePDFThumbnail(storageIO, size); } if (!thumbnailGenerated) { + file.setPreviewImageFail(true); + file.setPreviewImageAvailable(false); logger.fine("No thumbnail generated for " + file.getId()); } - return thumbnailGenerated; } - return false; + return thumbnailGenerated; } // Note that this method works on ALL file types for which thumbnail @@ -165,15 +172,30 @@ public static InputStreamIO getImageThumbnailAsInputStream(StorageIO s return null; } int cachedThumbnailSize = (int) storageIO.getAuxObjectSize(THUMBNAIL_SUFFIX + size); + InputStreamIO inputStreamIO = cachedThumbnailSize > 0 ? new InputStreamIO(cachedThumbnailInputStream, cachedThumbnailSize) : null; - InputStreamIO inputStreamIO = new InputStreamIO(cachedThumbnailInputStream, cachedThumbnailSize); - - inputStreamIO.setMimeType(THUMBNAIL_MIME_TYPE); + if (inputStreamIO != null) { + inputStreamIO.setMimeType(THUMBNAIL_MIME_TYPE); - String fileName = storageIO.getFileName(); - if (fileName != null) { - fileName = fileName.replaceAll("\\.[^\\.]*$", THUMBNAIL_FILE_EXTENSION); - inputStreamIO.setFileName(fileName); + String fileName = storageIO.getFileName(); + if (fileName != null) { + fileName = fileName.replaceAll("\\.[^\\.]*$", THUMBNAIL_FILE_EXTENSION); + inputStreamIO.setFileName(fileName); + } + } else { + if (storageIO.getDataFile() != null && cachedThumbnailSize == 0) { + // We found an older 0 length thumbnail. Newer image uploads will not have this issue. + // Once cleaned up, this thumbnail will no longer have this issue + logger.warning("Cleaning up zero sized thumbnail ID: "+ storageIO.getDataFile().getId()); + storageIO.getDataFile().setPreviewImageFail(true); + storageIO.getDataFile().setPreviewImageAvailable(false); + DataFileServiceBean datafileService = CDI.current().select(DataFileServiceBean.class).get(); + datafileService.save(storageIO.getDataFile()); + + // Now that we have marked this File as a thumbnail failure, + // no reason not to try and delete this 0-size cache here: + storageIO.deleteAuxObject(THUMBNAIL_SUFFIX + size); + } } return inputStreamIO; } catch (Exception ioex) { @@ -307,6 +329,7 @@ private static boolean generateImageThumbnail(StorageIO storageIO, int private static boolean generateImageThumbnailFromInputStream(StorageIO storageIO, int size, InputStream inputStream) { BufferedImage fullSizeImage; + boolean thumbnailGenerated = false; try { logger.fine("attempting to read the image file with ImageIO.read(InputStream), " + storageIO.getDataFile().getStorageIdentifier()); @@ -359,23 +382,15 @@ private static boolean generateImageThumbnailFromInputStream(StorageIO try { rescaleImage(fullSizeImage, width, height, size, outputStream); - /* - // while we are at it, let's make sure other size thumbnails are - // generated too: - for (int s : (new int[]{DEFAULT_PREVIEW_SIZE, DEFAULT_THUMBNAIL_SIZE, DEFAULT_CARDIMAGE_SIZE})) { - if (size != s && !thumbnailFileExists(fileLocation, s)) { - rescaleImage(fullSizeImage, width, height, s, fileLocation); - } - } - */ if (tempFileRequired) { storageIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), THUMBNAIL_SUFFIX + size); } + thumbnailGenerated = true; } catch (Exception ioex) { logger.warning("Failed to rescale and/or save the image: " + ioex.getMessage()); - return false; + thumbnailGenerated = false; } finally { if(tempFileRequired) { @@ -383,10 +398,19 @@ private static boolean generateImageThumbnailFromInputStream(StorageIO tempFile.delete(); } catch (Exception e) {} + } else if (!thumbnailGenerated) { + // if it was a local file - let's make sure we are not leaving + // behind a half-baked, broken image - such as a 0-size file - + // if this was a failure. + try { + storageIO.deleteAuxObject(THUMBNAIL_SUFFIX + size); + } catch (IOException ioex) { + logger.fine("Failed attempt to delete the result of a failed thumbnail rescaling; this is most likely ok - for ex., because it was never created in the first place."); + } } } - return true; + return thumbnailGenerated; } @@ -544,12 +568,10 @@ private static String getImageAsBase64FromInputStream(InputStream inputStream) { public static String getImageAsBase64FromFile(File imageFile) { InputStream imageInputStream = null; try { - - int imageSize = (int) imageFile.length(); - - imageInputStream = new FileInputStream(imageFile); - - return getImageAsBase64FromInputStream(imageInputStream); //, imageSize); + if (imageFile.length() > 0) { + imageInputStream = new FileInputStream(imageFile); + return getImageAsBase64FromInputStream(imageInputStream); + } } catch (IOException ex) { // too bad - but not fatal logger.warning("getImageAsBase64FromFile: Failed to read data from thumbnail file"); @@ -609,16 +631,12 @@ public static String generateImageThumbnailFromFile(String fileLocation, int siz logger.fine("image dimensions: " + width + "x" + height); - thumbFileLocation = rescaleImage(fullSizeImage, width, height, size, fileLocation); + return rescaleImage(fullSizeImage, width, height, size, fileLocation); - if (thumbFileLocation != null) { - return thumbFileLocation; - } } catch (Exception e) { logger.warning("Failed to read in an image from " + fileLocation + ": " + e.getMessage()); } return null; - } /* @@ -657,10 +675,14 @@ public static String rescaleImage(BufferedImage fullSizeImage, int width, int he try { rescaleImage(fullSizeImage, width, height, size, outputFileStream); } catch (Exception ioex) { - logger.warning("caught Exceptiopn trying to create rescaled image " + outputLocation); - return null; + logger.warning("caught Exception trying to create rescaled image " + outputLocation); + outputLocation = null; } finally { IOUtils.closeQuietly(outputFileStream); + // delete the file if the rescaleImage failed + if (outputLocation == null) { + outputFile.delete(); + } } return outputLocation; @@ -716,13 +738,19 @@ private static void rescaleImage(BufferedImage fullSizeImage, int width, int hei if (iter.hasNext()) { writer = (ImageWriter) iter.next(); } else { - throw new IOException("Failed to locatie ImageWriter plugin for image type PNG"); + throw new IOException("Failed to locate ImageWriter plugin for image type PNG"); } - BufferedImage lowRes = new BufferedImage(thumbWidth, thumbHeight, BufferedImage.TYPE_INT_ARGB); - Graphics2D g2 = lowRes.createGraphics(); - g2.drawImage(thumbImage, 0, 0, null); - g2.dispose(); + BufferedImage lowRes = null; + try { + lowRes = new BufferedImage(thumbWidth, thumbHeight, BufferedImage.TYPE_INT_ARGB); + Graphics2D g2 = lowRes.createGraphics(); + g2.drawImage(thumbImage, 0, 0, null); + g2.dispose(); + } catch (Exception ex) { + logger.warning("Failed to create LoRes Image: " + ex.getMessage()); + throw new IOException("Caught exception trying to generate thumbnail: " + ex.getMessage()); + } try (ImageOutputStream ios = ImageIO.createImageOutputStream(outputStream);) { @@ -838,6 +866,7 @@ public static String generatePDFThumbnailFromFile(String fileLocation, int size) // generate the thumbnail for the requested size, *using the already scaled-down // 400x400 png version, above*: + // (the "exists()" check below appears to be unnecessary - we've already checked early on - ?) if (!((new File(thumbFileLocation)).exists())) { thumbFileLocation = runImageMagick(imageMagickExec, previewFileLocation, thumbFileLocation, size, "png"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index c2143bd4789..d2fdec7b323 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -40,6 +40,7 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.FileUtil; import opennlp.tools.util.StringUtil; @@ -991,7 +992,10 @@ private String generateTemporaryS3UploadUrl(String key, Date expiration) throws GeneratePresignedUrlRequest generatePresignedUrlRequest = new GeneratePresignedUrlRequest(bucketName, key).withMethod(HttpMethod.PUT).withExpiration(expiration); //Require user to add this header to indicate a temporary file - generatePresignedUrlRequest.putCustomRequestHeader(Headers.S3_TAGGING, "dv-state=temp"); + final boolean taggingDisabled = JvmSettings.DISABLE_S3_TAGGING.lookupOptional(Boolean.class, this.driverId).orElse(false); + if (!taggingDisabled) { + generatePresignedUrlRequest.putCustomRequestHeader(Headers.S3_TAGGING, "dv-state=temp"); + } URL presignedUrl; try { @@ -1040,7 +1044,10 @@ public JsonObjectBuilder generateTemporaryS3UploadUrls(String globalId, String s } else { JsonObjectBuilder urls = Json.createObjectBuilder(); InitiateMultipartUploadRequest initiationRequest = new InitiateMultipartUploadRequest(bucketName, key); - initiationRequest.putCustomRequestHeader(Headers.S3_TAGGING, "dv-state=temp"); + final boolean taggingDisabled = JvmSettings.DISABLE_S3_TAGGING.lookupOptional(Boolean.class, this.driverId).orElse(false); + if (!taggingDisabled) { + initiationRequest.putCustomRequestHeader(Headers.S3_TAGGING, "dv-state=temp"); + } InitiateMultipartUploadResult initiationResponse = s3.initiateMultipartUpload(initiationRequest); String uploadId = initiationResponse.getUploadId(); for (int i = 1; i <= (fileSize / minPartSize) + (fileSize % minPartSize > 0 ? 1 : 0); i++) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleException.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleException.java index 3329d92b7a9..474674bda73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleException.java +++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleException.java @@ -1,7 +1,9 @@ package edu.harvard.iq.dataverse.datacapturemodule; +@Deprecated(forRemoval = true, since = "2024-07-07") public class DataCaptureModuleException extends Exception { + @Deprecated(forRemoval = true, since = "2024-07-07") public DataCaptureModuleException(String message, Throwable cause) { super(message, cause); } diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java index 460e4727afc..094d3976133 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java @@ -12,10 +12,12 @@ import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; +@Deprecated(forRemoval = true, since = "2024-07-07") public class DataCaptureModuleUtil { private static final Logger logger = Logger.getLogger(DataCaptureModuleUtil.class.getCanonicalName()); + @Deprecated(forRemoval = true, since = "2024-07-07") public static boolean rsyncSupportEnabled(String uploadMethodsSettings) { logger.fine("uploadMethodsSettings: " + uploadMethodsSettings);; if (uploadMethodsSettings==null){ @@ -28,6 +30,7 @@ public static boolean rsyncSupportEnabled(String uploadMethodsSettings) { /** * generate JSON to send to DCM */ + @Deprecated(forRemoval = true, since = "2024-07-07") public static JsonObject generateJsonForUploadRequest(AuthenticatedUser user, Dataset dataset) { JsonObjectBuilder jab = Json.createObjectBuilder(); // The general rule should be to always pass the user id and dataset identifier to the DCM. @@ -39,6 +42,7 @@ public static JsonObject generateJsonForUploadRequest(AuthenticatedUser user, Da /** * transfer script from DCM */ + @Deprecated(forRemoval = true, since = "2024-07-07") public static ScriptRequestResponse getScriptFromRequest(HttpResponse uploadRequest) { int status = uploadRequest.getStatus(); JsonNode body = uploadRequest.getBody(); @@ -54,6 +58,7 @@ public static ScriptRequestResponse getScriptFromRequest(HttpResponse return scriptRequestResponse; } + @Deprecated(forRemoval = true, since = "2024-07-07") static UploadRequestResponse makeUploadRequest(HttpResponse uploadRequest) { int status = uploadRequest.getStatus(); String body = uploadRequest.getBody(); @@ -61,6 +66,7 @@ static UploadRequestResponse makeUploadRequest(HttpResponse uploadReques return new UploadRequestResponse(uploadRequest.getStatus(), body); } + @Deprecated(forRemoval = true, since = "2024-07-07") public static String getMessageFromException(DataCaptureModuleException ex) { if (ex == null) { return "DataCaptureModuleException was null!"; @@ -76,6 +82,7 @@ public static String getMessageFromException(DataCaptureModuleException ex) { return message + " was caused by " + cause.getMessage(); } + @Deprecated(forRemoval = true, since = "2024-07-07") public static String getScriptName(DatasetVersion datasetVersion) { return "upload-" + datasetVersion.getDataset().getIdentifier().replace("/", "_") + ".bash"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 03a0044a987..98bd26b51d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -1,17 +1,14 @@ package edu.harvard.iq.dataverse.dataset; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.TermsOfUseAndAccess; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; + +import edu.harvard.iq.dataverse.dataaccess.InputStreamIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -31,15 +28,14 @@ import java.util.*; import java.util.logging.Logger; import javax.imageio.ImageIO; + +import jakarta.enterprise.inject.spi.CDI; import org.apache.commons.io.IOUtils; -import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.license.License; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.StringUtil; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.EnumUtils; @@ -218,7 +214,8 @@ public static boolean deleteDatasetLogo(Dataset dataset) { storageIO.deleteAuxObject(datasetLogoThumbnail + thumbExtension + ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); } catch (IOException ex) { - logger.info("Failed to delete dataset logo: " + ex.getMessage()); + logger.fine("Failed to delete dataset logo: " + ex.getMessage() + + " (this is most likely harmless; this method is often called without checking if the custom dataset logo was in fact present)"); return false; } return true; @@ -293,7 +290,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat dataAccess = DataAccess.getStorageIO(dataset); } catch(IOException ioex){ - //TODO: Add a suitable waing message + //TODO: Add a suitable warning message logger.warning("Failed to save the file, storage id " + dataset.getStorageIdentifier() + " (" + ioex.getMessage() + ")"); } @@ -355,30 +352,44 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat // We'll try to pre-generate the rescaled versions in both the // DEFAULT_DATASET_LOGO (currently 140) and DEFAULT_CARDIMAGE_SIZE (48) String thumbFileLocation = ImageThumbConverter.rescaleImage(fullSizeImage, width, height, ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE, tmpFileForResize.toPath().toString()); - logger.fine("thumbFileLocation = " + thumbFileLocation); - logger.fine("tmpFileLocation=" + tmpFileForResize.toPath().toString()); - //now we must save the updated thumbnail - try { - dataAccess.savePathAsAux(Paths.get(thumbFileLocation), datasetLogoThumbnail+thumbExtension+ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE); - } catch (IOException ex) { - logger.severe("Failed to move updated thumbnail file from " + tmpFile.getAbsolutePath() + " to its DataAccess location" + ": " + ex); + if (thumbFileLocation == null) { + logger.warning("Rescale Thumbnail Image to logo failed"); + dataset.setPreviewImageAvailable(false); + dataset.setUseGenericThumbnail(true); + } else { + logger.fine("thumbFileLocation = " + thumbFileLocation); + logger.fine("tmpFileLocation=" + tmpFileForResize.toPath().toString()); + //now we must save the updated thumbnail + try { + dataAccess.savePathAsAux(Paths.get(thumbFileLocation), datasetLogoThumbnail + thumbExtension + ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE); + } catch (IOException ex) { + logger.severe("Failed to move updated thumbnail file from " + tmpFile.getAbsolutePath() + " to its DataAccess location" + ": " + ex); + } } thumbFileLocation = ImageThumbConverter.rescaleImage(fullSizeImage, width, height, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE, tmpFileForResize.toPath().toString()); - logger.fine("thumbFileLocation = " + thumbFileLocation); - logger.fine("tmpFileLocation=" + tmpFileForResize.toPath().toString()); - //now we must save the updated thumbnail - try { - dataAccess.savePathAsAux(Paths.get(thumbFileLocation), datasetLogoThumbnail+thumbExtension+ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - } catch (IOException ex) { - logger.severe("Failed to move updated thumbnail file from " + tmpFile.getAbsolutePath() + " to its DataAccess location" + ": " + ex); + if (thumbFileLocation == null) { + logger.warning("Rescale Thumbnail Image to card failed"); + dataset.setPreviewImageAvailable(false); + dataset.setUseGenericThumbnail(true); + } else { + logger.fine("thumbFileLocation = " + thumbFileLocation); + logger.fine("tmpFileLocation=" + tmpFileForResize.toPath().toString()); + //now we must save the updated thumbnail + try { + dataAccess.savePathAsAux(Paths.get(thumbFileLocation), datasetLogoThumbnail + thumbExtension + ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + } catch (IOException ex) { + logger.severe("Failed to move updated thumbnail file from " + tmpFile.getAbsolutePath() + " to its DataAccess location" + ": " + ex); + } } //This deletes the tempfiles created for rescaling and encoding boolean tmpFileWasDeleted = tmpFile.delete(); boolean originalTempFileWasDeleted = tmpFileForResize.delete(); try { - Files.delete(Paths.get(thumbFileLocation)); + if (thumbFileLocation != null) { + Files.delete(Paths.get(thumbFileLocation)); + } } catch (IOException ioex) { logger.fine("Failed to delete temporary thumbnail file"); } @@ -463,8 +474,19 @@ public static InputStream getLogoAsInputStream(Dataset dataset) { } try { - in = ImageThumbConverter.getImageThumbnailAsInputStream(thumbnailFile.getStorageIO(), - ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE).getInputStream(); + + boolean origImageFailed = thumbnailFile.isPreviewImageFail(); + InputStreamIO isIO = ImageThumbConverter.getImageThumbnailAsInputStream(thumbnailFile.getStorageIO(), + ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE); + if (!origImageFailed && thumbnailFile.isPreviewImageFail()) { + // We found an older 0 length thumbnail. Newer image uploads will not have this issue. + // Once cleaned up, this thumbnail will no longer have this issue + // ImageThumbConverter fixed the DataFile + // Now we need to update dataset since this is a bad logo + DatasetServiceBean datasetService = CDI.current().select(DatasetServiceBean.class).get(); + datasetService.clearDatasetLevelThumbnail(dataset); + } + in = isIO != null ? isIO.getInputStream() : null; } catch (IOException ioex) { logger.warning("getLogo(): Failed to get logo from DataFile for " + dataset.getStorageIdentifier() + " (" + ioex.getMessage() + ")"); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java index 6c4d63e3e35..96330271367 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command; import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetLinkingServiceBean; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersionServiceBean; @@ -15,6 +16,7 @@ import edu.harvard.iq.dataverse.FileDownloadServiceBean; import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.GuestbookServiceBean; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; @@ -133,7 +135,9 @@ public interface CommandContext { public ConfirmEmailServiceBean confirmEmail(); public ActionLogServiceBean actionLog(); - + + public MetadataBlockServiceBean metadataBlocks(); + public void beginCommandSequence(); public boolean completeCommandSequence(Command command); @@ -143,4 +147,6 @@ public interface CommandContext { public Stack getCommandsCalled(); public void addCommand(Command command); + + public DatasetFieldServiceBean dsField(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index d8302024c14..ab78a88c9a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -1,26 +1,19 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; -import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.pidproviders.PidProvider; -import edu.harvard.iq.dataverse.pidproviders.PidUtil; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import static edu.harvard.iq.dataverse.util.StringUtil.isEmpty; -import java.io.IOException; import java.util.Objects; -import java.util.logging.Level; import java.util.logging.Logger; -import org.apache.solr.client.solrj.SolrServerException; /**; * An abstract base class for commands that creates {@link Dataset}s. @@ -97,6 +90,8 @@ public Dataset execute(CommandContext ctxt) throws CommandException { if(!harvested) { checkSystemMetadataKeyIfNeeded(dsv, null); } + + registerExternalVocabValuesIfAny(ctxt, dsv); theDataset.setCreator((AuthenticatedUser) getRequest().getUser()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index 85e417ac5f3..1a1f4f9318b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -2,10 +2,13 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersionDifference; import edu.harvard.iq.dataverse.DatasetVersionUser; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -24,9 +27,8 @@ import java.util.logging.Logger; import static java.util.stream.Collectors.joining; +import jakarta.ejb.EJB; import jakarta.validation.ConstraintViolation; -import edu.harvard.iq.dataverse.MetadataBlock; -import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.settings.JvmSettings; /** @@ -231,4 +233,13 @@ protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, Dataset } } } + + protected void registerExternalVocabValuesIfAny(CommandContext ctxt, DatasetVersion newVersion) { + for (DatasetField df : newVersion.getFlatDatasetFields()) { + logger.fine("Found id: " + df.getDatasetFieldType().getId()); + if (ctxt.dsField().getCVocConf(true).containsKey(df.getDatasetFieldType().getId())) { + ctxt.dsField().registerExternalVocabValues(df); + } + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java index e4edb973cd9..121af765737 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java @@ -3,7 +3,6 @@ */ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -18,6 +17,8 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.util.BundleUtil; + import java.util.Collections; import java.util.HashSet; import java.util.Map; @@ -68,11 +69,22 @@ public RoleAssignment execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be given a role.", this); } } + if(isExistingRole(ctxt)){ + throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.has.role.error"), this); + } // TODO make sure the role is defined on the dataverse. RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken, anonymizedAccess); return ctxt.roles().save(roleAssignment); } + private boolean isExistingRole(CommandContext ctxt) { + return ctxt.roles() + .directRoleAssignments(grantee, defPoint) + .stream() + .map(RoleAssignment::getRole) + .anyMatch(it -> it.equals(role)); + } + @Override public Map> getRequiredPermissions() { // for data file check permission on owning dataset diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java index bcaece55fed..6539ac27ea2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java @@ -59,7 +59,8 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. checkSystemMetadataKeyIfNeeded(newVersion, latest); - + registerExternalVocabValuesIfAny(ctxt, newVersion); + List newVersionMetadatum = new ArrayList<>(latest.getFileMetadatas().size()); for ( FileMetadata fmd : latest.getFileMetadatas() ) { FileMetadata fmdCopy = fmd.createCopy(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java index 877f3b81d7e..be3e28029e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -64,17 +65,17 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { throw new PermissionException("Destroy can only be called by superusers.", this, Collections.singleton(Permission.DeleteDatasetDraft), doomed); } + Dataset managedDoomed = ctxt.em().merge(doomed); // If there is a dedicated thumbnail DataFile, it needs to be reset // explicitly, or we'll get a constraint violation when deleting: - doomed.setThumbnailFile(null); - final Dataset managedDoomed = ctxt.em().merge(doomed); - + managedDoomed.setThumbnailFile(null); + // files need to iterate through and remove 'by hand' to avoid // optimistic lock issues... (plus the physical files need to be // deleted too!) - - Iterator dfIt = doomed.getFiles().iterator(); + DatasetVersion dv = managedDoomed.getLatestVersion(); + Iterator dfIt = managedDoomed.getFiles().iterator(); while (dfIt.hasNext()){ DataFile df = dfIt.next(); // Gather potential Solr IDs of files. As of this writing deaccessioned files are never indexed. @@ -85,32 +86,29 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { ctxt.engine().submit(new DeleteDataFileCommand(df, getRequest(), true)); dfIt.remove(); } - - //also, lets delete the uploaded thumbnails! - if (!doomed.isHarvested()) { - deleteDatasetLogo(doomed); - } + dv.setFileMetadatas(null); // ASSIGNMENTS - for (RoleAssignment ra : ctxt.roles().directRoleAssignments(doomed)) { + for (RoleAssignment ra : ctxt.roles().directRoleAssignments(managedDoomed)) { ctxt.em().remove(ra); } // ROLES - for (DataverseRole ra : ctxt.roles().findByOwnerId(doomed.getId())) { + for (DataverseRole ra : ctxt.roles().findByOwnerId(managedDoomed.getId())) { ctxt.em().remove(ra); } - if (!doomed.isHarvested()) { - GlobalId pid = doomed.getGlobalId(); + if (!managedDoomed.isHarvested()) { + //also, lets delete the uploaded thumbnails! + deleteDatasetLogo(managedDoomed); + // and remove the PID (perhaps should be after the remove in case that causes a roll-back?) + GlobalId pid = managedDoomed.getGlobalId(); if (pid != null) { PidProvider pidProvider = PidUtil.getPidProvider(pid.getProviderId()); try { - if (pidProvider.alreadyRegistered(doomed)) { - pidProvider.deleteIdentifier(doomed); - for (DataFile df : doomed.getFiles()) { - pidProvider.deleteIdentifier(df); - } + if (pidProvider.alreadyRegistered(managedDoomed)) { + pidProvider.deleteIdentifier(managedDoomed); + //Files are handled in DeleteDataFileCommand } } catch (Exception e) { logger.log(Level.WARNING, "Identifier deletion was not successful:", e.getMessage()); @@ -120,18 +118,20 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { toReIndex = managedDoomed.getOwner(); - // dataset - ctxt.em().remove(managedDoomed); - // add potential Solr IDs of datasets to list for deletion - String solrIdOfPublishedDatasetVersion = IndexServiceBean.solrDocIdentifierDataset + doomed.getId(); + String solrIdOfPublishedDatasetVersion = IndexServiceBean.solrDocIdentifierDataset + managedDoomed.getId(); datasetAndFileSolrIdsToDelete.add(solrIdOfPublishedDatasetVersion); - String solrIdOfDraftDatasetVersion = IndexServiceBean.solrDocIdentifierDataset + doomed.getId() + IndexServiceBean.draftSuffix; + String solrIdOfDraftDatasetVersion = IndexServiceBean.solrDocIdentifierDataset + managedDoomed.getId() + IndexServiceBean.draftSuffix; datasetAndFileSolrIdsToDelete.add(solrIdOfDraftDatasetVersion); String solrIdOfDraftDatasetVersionPermission = solrIdOfDraftDatasetVersion + IndexServiceBean.discoverabilityPermissionSuffix; datasetAndFileSolrIdsToDelete.add(solrIdOfDraftDatasetVersionPermission); - String solrIdOfDeaccessionedDatasetVersion = IndexServiceBean.solrDocIdentifierDataset + doomed.getId() + IndexServiceBean.deaccessionedSuffix; + String solrIdOfDeaccessionedDatasetVersion = IndexServiceBean.solrDocIdentifierDataset + managedDoomed.getId() + IndexServiceBean.deaccessionedSuffix; datasetAndFileSolrIdsToDelete.add(solrIdOfDeaccessionedDatasetVersion); + + // dataset + ctxt.em().remove(managedDoomed); + + } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index 7bcc851bde2..431b3ff47c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -25,24 +25,25 @@ public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final boolean includeDeaccessioned; - private boolean checkPerms; + private boolean checkPermsWhenDeaccessioned; public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { - this(aRequest, anAffectedDataset, false, false); + this(aRequest, anAffectedDataset,false, false); } - public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPerms) { + public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; this.includeDeaccessioned = includeDeaccessioned; - this.checkPerms = checkPerms; + this.checkPermsWhenDeaccessioned = checkPermsWhenDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { + if (ds.getLatestVersion().isDraft() && + ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); } - return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned, checkPerms)); + return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned, checkPermsWhenDeaccessioned)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFeaturedCollectionsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFeaturedCollectionsCommand.java new file mode 100644 index 00000000000..4dca522e499 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFeaturedCollectionsCommand.java @@ -0,0 +1,50 @@ + +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseFeaturedDataverse; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * + * @author stephenkraffmiller + */ +public class ListFeaturedCollectionsCommand extends AbstractCommand> { + + private final Dataverse dv; + + public ListFeaturedCollectionsCommand(DataverseRequest aRequest, Dataverse aDataverse) { + super(aRequest, aDataverse); + dv = aDataverse; + } + + @Override + public List execute(CommandContext ctxt) throws CommandException { + List featuredTarget = new ArrayList<>(); + List featuredList = ctxt.featuredDataverses().findByDataverseId(dv.getId()); + for (DataverseFeaturedDataverse dfd : featuredList) { + Dataverse fd = dfd.getFeaturedDataverse(); + featuredTarget.add(fd); + } + return featuredTarget; + + } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + dv.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java index 912318cf155..8275533ced2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + import java.util.Collections; import java.util.List; import java.util.Map; @@ -14,29 +15,40 @@ /** * Lists the metadata blocks of a {@link Dataverse}. - * + * * @author michael */ // no annotations here, since permissions are dynamically decided -public class ListMetadataBlocksCommand extends AbstractCommand>{ - - private final Dataverse dv; - - public ListMetadataBlocksCommand(DataverseRequest aRequest, Dataverse aDataverse) { - super(aRequest, aDataverse); - dv = aDataverse; +public class ListMetadataBlocksCommand extends AbstractCommand> { + + private final Dataverse dataverse; + private final boolean onlyDisplayedOnCreate; + + public ListMetadataBlocksCommand(DataverseRequest request, Dataverse dataverse, boolean onlyDisplayedOnCreate) { + super(request, dataverse); + this.dataverse = dataverse; + this.onlyDisplayedOnCreate = onlyDisplayedOnCreate; } @Override public List execute(CommandContext ctxt) throws CommandException { - return dv.getMetadataBlocks(); + if (onlyDisplayedOnCreate) { + return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse); + } + return dataverse.getMetadataBlocks(); } - + + private List listMetadataBlocksDisplayedOnCreate(CommandContext ctxt, Dataverse dataverse) { + if (dataverse.isMetadataBlockRoot() || dataverse.getOwner() == null) { + return ctxt.metadataBlocks().listMetadataBlocksDisplayedOnCreate(dataverse); + } + return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse.getOwner()); + } + @Override public Map> getRequiredPermissions() { return Collections.singletonMap("", - dv.isReleased() ? Collections.emptySet() - : Collections.singleton(Permission.ViewUnpublishedDataverse)); - } - + dataverse.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java index 1ec51764d73..03f4dceef88 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java @@ -14,7 +14,6 @@ import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserLookup; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2TokenData; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; @@ -25,7 +24,6 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; -import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import edu.harvard.iq.dataverse.search.IndexResponse; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; import edu.harvard.iq.dataverse.workflows.WorkflowComment; @@ -177,6 +175,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { ctxt.em().createNativeQuery("Delete from OAuth2TokenData where user_id ="+consumedAU.getId()).executeUpdate(); + ctxt.em().createNativeQuery("DELETE FROM explicitgroup_authenticateduser consumed USING explicitgroup_authenticateduser ongoing WHERE consumed.containedauthenticatedusers_id="+ongoingAU.getId()+" AND ongoing.containedauthenticatedusers_id="+consumedAU.getId()).executeUpdate(); ctxt.em().createNativeQuery("UPDATE explicitgroup_authenticateduser SET containedauthenticatedusers_id="+ongoingAU.getId()+" WHERE containedauthenticatedusers_id="+consumedAU.getId()).executeUpdate(); ctxt.actionLog().changeUserIdentifierInHistory(consumedAU.getIdentifier(), ongoingAU.getIdentifier()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java index a29e7fdd59c..6b7baa7d01b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java @@ -27,6 +27,7 @@ * "actiontype" in the actionlogrecord rather than "InternalError" if you throw * a CommandExecutionException. */ +@Deprecated(forRemoval = true, since = "2024-07-07") @RequiredPermissions(Permission.EditDataset) public class RequestRsyncScriptCommand extends AbstractCommand { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java index f3b33f82524..8d8fddeda6b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.workflows.WorkflowComment; import java.io.IOException; @@ -26,7 +27,7 @@ public class ReturnDatasetToAuthorCommand extends AbstractDatasetCommand uploadLogoSizeLimit) { - throw new IllegalCommandException("File is larger than maximum size: " + uploadLogoSizeLimit + ".", this); + throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasets.api.thumbnail.fileToLarge", List.of(String.valueOf(uploadLogoSizeLimit))), this); } FileInputStream fileAsStream = null; try { @@ -107,23 +111,25 @@ public DatasetThumbnail execute(CommandContext ctxt) throws CommandException { Logger.getLogger(UpdateDatasetThumbnailCommand.class.getName()).log(Level.SEVERE, null, ex); } Dataset datasetWithNewThumbnail = ctxt.datasets().setNonDatasetFileAsThumbnail(dataset, fileAsStream); - IOUtils.closeQuietly(fileAsStream); + IOUtils.closeQuietly(fileAsStream); if (datasetWithNewThumbnail != null) { - return datasetWithNewThumbnail.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - } else { - return null; + DatasetThumbnail thumbnail = datasetWithNewThumbnail.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + if (thumbnail != null) { + return thumbnail; + } } + throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasets.api.thumbnail.nonDatasetFailed"), this); case removeThumbnail: - Dataset ds2 = ctxt.datasets().removeDatasetThumbnail(dataset); + Dataset ds2 = ctxt.datasets().clearDatasetLevelThumbnail(dataset); DatasetThumbnail datasetThumbnail2 = ds2.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); if (datasetThumbnail2 == null) { return null; } else { - throw new CommandException("User wanted to remove the thumbnail it still has one!", this); + throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.thumbnail.notDeleted"), this); } default: - throw new IllegalCommandException("Whatever you are trying to do to the dataset thumbnail is not supported.", this); + throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasets.api.thumbnail.actionNotSupported"), this); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 7591bebe796..994f4c7dfb6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -1,6 +1,12 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileCategory; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetLock; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionDifference; +import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -8,7 +14,6 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.DatasetFieldUtil; import edu.harvard.iq.dataverse.util.FileMetadataUtil; @@ -115,8 +120,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException { //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. checkSystemMetadataKeyIfNeeded(getDataset().getOrCreateEditVersion(fmVarMet), persistedVersion); - - + + getDataset().getOrCreateEditVersion().setLastUpdateTime(getTimestamp()); + + registerExternalVocabValuesIfAny(ctxt, getDataset().getOrCreateEditVersion(fmVarMet)); + try { // Invariant: Dataset has no locks preventing the update String lockInfoMessage = "saving current edits"; @@ -256,7 +264,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getOrCreateEditVersion()); } - theDataset.getOrCreateEditVersion().setLastUpdateTime(getTimestamp()); theDataset.setModificationTime(getTimestamp()); savedDataset = ctxt.em().merge(theDataset); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index fe9415f39f9..bdb69dc918f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -18,7 +18,6 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import jakarta.persistence.TypedQuery; /** * Update an existing dataverse. @@ -30,10 +29,10 @@ public class UpdateDataverseCommand extends AbstractCommand { private final Dataverse editedDv; private final List facetList; - private final List featuredDataverseList; - private final List inputLevelList; - - private boolean datasetsReindexRequired = false; + private final List featuredDataverseList; + private final List inputLevelList; + + private boolean datasetsReindexRequired = false; public UpdateDataverseCommand(Dataverse editedDv, List facetList, List featuredDataverseList, DataverseRequest aRequest, List inputLevelList ) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java new file mode 100644 index 00000000000..cf7b4a6f69c --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java @@ -0,0 +1,51 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; +import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.ArrayList; +import java.util.List; + +@RequiredPermissions(Permission.EditDataverse) +public class UpdateDataverseInputLevelsCommand extends AbstractCommand { + private final Dataverse dataverse; + private final List inputLevelList; + + public UpdateDataverseInputLevelsCommand(Dataverse dataverse, DataverseRequest request, List inputLevelList) { + super(request, dataverse); + this.dataverse = dataverse; + this.inputLevelList = new ArrayList<>(inputLevelList); + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + if (inputLevelList == null || inputLevelList.isEmpty()) { + throw new CommandException("Error while updating dataverse input levels: Input level list cannot be null or empty", this); + } + addInputLevelMetadataBlocks(); + dataverse.setMetadataBlockRoot(true); + return ctxt.engine().submit(new UpdateDataverseCommand(dataverse, null, null, getRequest(), inputLevelList)); + } + + private void addInputLevelMetadataBlocks() { + List dataverseMetadataBlocks = dataverse.getMetadataBlocks(); + for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { + MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock(); + if (!dataverseHasMetadataBlock(dataverseMetadataBlocks, inputLevelMetadataBlock)) { + dataverseMetadataBlocks.add(inputLevelMetadataBlock); + } + } + dataverse.setMetadataBlocks(dataverseMetadataBlocks); + } + + private boolean dataverseHasMetadataBlock(List dataverseMetadataBlocks, MetadataBlock metadataBlock) { + return dataverseMetadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId())); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java index 0c463cddec1..5bf54ac1ec1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java @@ -50,7 +50,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { PidProvider pidProvider = PidUtil.getPidProvider(target.getGlobalId().getProviderId()); try { - Boolean doiRetString = pidProvider.publicizeIdentifier(target); + Boolean doiRetString = pidProvider.updateIdentifier(target); if (doiRetString) { target.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); ctxt.em().merge(target); @@ -71,7 +71,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { (!(df.getIdentifier() == null || df.getIdentifier().isEmpty()) || // identifier exists, or canCreatePidsForFiles) // we can create PIDs for files ) { - doiRetString = pidProvider.publicizeIdentifier(df); + doiRetString = pidProvider.updateIdentifier(df); if (doiRetString) { df.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); ctxt.em().merge(df); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 49ceabc5900..4b8822e8b66 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -437,7 +437,7 @@ public static void writeSubjectsElement(XMLStreamWriter xmlw, DatasetVersionDTO for (String subject : fieldDTO.getMultipleVocab()) { if (StringUtils.isNotBlank(subject)) { subject_check = writeOpenTag(xmlw, "subjects", subject_check); - writeSubjectElement(xmlw, null, null, subject, language); + writeSubjectElement(xmlw, null, null, null, subject, language); } } } @@ -446,7 +446,8 @@ public static void writeSubjectsElement(XMLStreamWriter xmlw, DatasetVersionDTO for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String subject = null; String subjectScheme = null; - String schemeURI = null; + String keywordTermURI = null; + String keywordVocabURI = null; for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); @@ -454,18 +455,22 @@ public static void writeSubjectsElement(XMLStreamWriter xmlw, DatasetVersionDTO subject = next.getSinglePrimitive(); } + if (DatasetFieldConstant.keywordTermURI.equals(next.getTypeName())) { + keywordTermURI = next.getSinglePrimitive(); + } + if (DatasetFieldConstant.keywordVocab.equals(next.getTypeName())) { subjectScheme = next.getSinglePrimitive(); } - + if (DatasetFieldConstant.keywordVocabURI.equals(next.getTypeName())) { - schemeURI = next.getSinglePrimitive(); + keywordVocabURI = next.getSinglePrimitive(); } } if (StringUtils.isNotBlank(subject)) { subject_check = writeOpenTag(xmlw, "subjects", subject_check); - writeSubjectElement(xmlw, subjectScheme, schemeURI, subject, language); + writeSubjectElement(xmlw, subjectScheme, keywordTermURI, keywordVocabURI, subject, language); } } } @@ -493,7 +498,7 @@ public static void writeSubjectsElement(XMLStreamWriter xmlw, DatasetVersionDTO if (StringUtils.isNotBlank(subject)) { subject_check = writeOpenTag(xmlw, "subjects", subject_check); - writeSubjectElement(xmlw, subjectScheme, schemeURI, subject, language); + writeSubjectElement(xmlw, subjectScheme, null, schemeURI, subject, language); } } } @@ -513,7 +518,7 @@ public static void writeSubjectsElement(XMLStreamWriter xmlw, DatasetVersionDTO * @param language * @throws XMLStreamException */ - private static void writeSubjectElement(XMLStreamWriter xmlw, String subjectScheme, String schemeURI, String value, String language) throws XMLStreamException { + private static void writeSubjectElement(XMLStreamWriter xmlw, String subjectScheme, String valueURI, String schemeURI, String value, String language) throws XMLStreamException { // write a subject Map subject_map = new HashMap(); @@ -524,6 +529,9 @@ private static void writeSubjectElement(XMLStreamWriter xmlw, String subjectSche if (StringUtils.isNotBlank(subjectScheme)) { subject_map.put("subjectScheme", subjectScheme); } + if (StringUtils.isNotBlank(valueURI)) { + subject_map.put("valueURI", valueURI); + } if (StringUtils.isNotBlank(schemeURI)) { subject_map.put("schemeURI", schemeURI); } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 9fda7a0f7f1..fb50214c259 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -985,10 +985,14 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro if (taskStatus.startsWith("FAILED") || taskStatus.startsWith("INACTIVE")) { String comment = "Reason : " + taskStatus.split("#")[1] + "
    Short Description : " + taskStatus.split("#")[2]; - userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), - UserNotification.Type.GLOBUSDOWNLOADCOMPLETEDWITHERRORS, dataset.getId(), comment, true); - globusLogger.info("Globus task failed during download process"); - } else { + if (authUser != null && authUser instanceof AuthenticatedUser) { + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), + UserNotification.Type.GLOBUSDOWNLOADCOMPLETEDWITHERRORS, dataset.getId(), comment, true); + } + + globusLogger.info("Globus task failed during download process: "+comment); + } else if (authUser != null && authUser instanceof AuthenticatedUser) { + boolean taskSkippedFiles = (task.getSkip_source_errors() == null) ? false : task.getSkip_source_errors(); if (!taskSkippedFiles) { userNotificationService.sendNotification((AuthenticatedUser) authUser, @@ -1257,11 +1261,11 @@ public void writeGuestbookAndStartTransfer(GuestbookResponse guestbookResponse, Long fileId = Long.parseLong(idAsString); // If we need to create a GuestBookResponse record, we have to // look up the DataFile object for this file: + df = dataFileService.findCheapAndEasy(fileId); + selectedFiles.add(df); if (!doNotSaveGuestbookResponse) { - df = dataFileService.findCheapAndEasy(fileId); guestbookResponse.setDataFile(df); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); - selectedFiles.add(df); } } catch (NumberFormatException nfe) { logger.warning( diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java index 0a64f42d840..6c99155d8a4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java @@ -3,6 +3,7 @@ */ package edu.harvard.iq.dataverse.mydata; +import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DataverseRoleServiceBean; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseSession; @@ -26,10 +27,10 @@ import edu.harvard.iq.dataverse.search.SearchFields; import edu.harvard.iq.dataverse.search.SortBy; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.logging.Logger; -import java.util.Locale; import jakarta.ejb.EJB; import jakarta.inject.Inject; import jakarta.json.Json; @@ -63,7 +64,7 @@ public class DataRetrieverAPI extends AbstractApiBean { private static final String retrieveDataPartialAPIPath = "retrieve"; @Inject - DataverseSession session; + DataverseSession session; @EJB DataverseRoleServiceBean dataverseRoleService; @@ -81,6 +82,8 @@ public class DataRetrieverAPI extends AbstractApiBean { //MyDataQueryHelperServiceBean myDataQueryHelperServiceBean; @EJB GroupServiceBean groupService; + @EJB + DatasetServiceBean datasetService; private List roleList; private DataverseRolePermissionHelper rolePermissionHelper; @@ -274,9 +277,7 @@ public String retrieveMyDataAsJsonString( @QueryParam("dataset_valid") List datasetValidities) { boolean OTHER_USER = false; - String localeCode = session.getLocaleCode(); - String noMsgResultsFound = BundleUtil.getStringFromPropertyFile("dataretrieverAPI.noMsgResultsFound", - "Bundle", new Locale(localeCode)); + String noMsgResultsFound = BundleUtil.getStringFromBundle("dataretrieverAPI.noMsgResultsFound"); if ((session.getUser() != null) && (session.getUser().isAuthenticated())) { authUser = (AuthenticatedUser) session.getUser(); @@ -284,7 +285,10 @@ public String retrieveMyDataAsJsonString( try { authUser = getRequestAuthenticatedUserOrDie(crc); } catch (WrappedResponse e) { - return this.getJSONErrorString("Requires authentication. Please login.", "retrieveMyDataAsJsonString. User not found! Shouldn't be using this anyway"); + return this.getJSONErrorString( + BundleUtil.getStringFromBundle("dataretrieverAPI.authentication.required"), + BundleUtil.getStringFromBundle("dataretrieverAPI.authentication.required.opt") + ); } } @@ -297,7 +301,9 @@ public String retrieveMyDataAsJsonString( authUser = searchUser; OTHER_USER = true; } else { - return this.getJSONErrorString("No user found for: \"" + userIdentifier + "\"", null); + return this.getJSONErrorString( + BundleUtil.getStringFromBundle("dataretrieverAPI.user.not.found", Arrays.asList(userIdentifier)), + null); } } @@ -337,8 +343,7 @@ public String retrieveMyDataAsJsonString( myDataFinder = new MyDataFinder(rolePermissionHelper, roleAssigneeService, dvObjectServiceBean, - groupService, - noMsgResultsFound); + groupService); this.myDataFinder.runFindDataSteps(filterParams); if (myDataFinder.hasError()){ return this.getJSONErrorString(myDataFinder.getErrorMessage(), myDataFinder.getErrorMessage()); @@ -393,11 +398,14 @@ public String retrieveMyDataAsJsonString( } catch (SearchException ex) { solrQueryResponse = null; - this.logger.severe("Solr SearchException: " + ex.getMessage()); + logger.severe("Solr SearchException: " + ex.getMessage()); } - if (solrQueryResponse==null){ - return this.getJSONErrorString("Sorry! There was an error with the search service.", "Sorry! There was a SOLR Error"); + if (solrQueryResponse == null) { + return this.getJSONErrorString( + BundleUtil.getStringFromBundle("dataretrieverAPI.solr.error"), + BundleUtil.getStringFromBundle("dataretrieverAPI.solr.error.opt") + ); } // --------------------------------- @@ -491,9 +499,10 @@ private JsonArrayBuilder formatSolrDocs(SolrQueryResponse solrResponse, RoleTagR // ------------------------------------------- // (a) Get core card data from solr // ------------------------------------------- - myDataCardInfo = doc.getJsonForMyData(); - if (!doc.getEntity().isInstanceofDataFile()){ + myDataCardInfo = doc.getJsonForMyData(isValid(doc)); + + if (doc.getEntity() != null && !doc.getEntity().isInstanceofDataFile()){ String parentAlias = dataverseService.getParentAliasString(doc); myDataCardInfo.add("parent_alias",parentAlias); } @@ -514,4 +523,8 @@ private JsonArrayBuilder formatSolrDocs(SolrQueryResponse solrResponse, RoleTagR return jsonSolrDocsArrayBuilder; } + + private boolean isValid(SolrSearchResult result) { + return result.isValid(x -> true); + } } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java index 2ab248fcc0b..2acb93d37f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.search.SearchConstants; import edu.harvard.iq.dataverse.search.SearchFields; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -178,26 +179,25 @@ public List getRoleIds(){ } - - private void checkParams(){ - - if ((this.userIdentifier == null)||(this.userIdentifier.isEmpty())){ - this.addError("Sorry! No user was found!"); + private void checkParams() { + if ((this.userIdentifier == null) || (this.userIdentifier.isEmpty())) { + this.addError(BundleUtil.getStringFromBundle("myDataFilterParams.error.no.user")); return; } - if ((this.roleIds == null)||(this.roleIds.isEmpty())){ - this.addError("No results. Please select at least one Role."); + if ((this.roleIds == null) || (this.roleIds.isEmpty())) { + this.addError(BundleUtil.getStringFromBundle("myDataFilterParams.error.result.no.role")); return; } - if ((this.dvObjectTypes == null)||(this.dvObjectTypes.isEmpty())){ - this.addError("No results. Please select one of Dataverses, Datasets, Files."); + if ((this.dvObjectTypes == null) || (this.dvObjectTypes.isEmpty())) { + this.addError(BundleUtil.getStringFromBundle("myDataFilterParams.error.result.no.dvobject")); return; } - - if ((this.publicationStatuses == null)||(this.publicationStatuses.isEmpty())){ - this.addError("No results. Please select one of " + StringUtils.join(MyDataFilterParams.defaultPublishedStates, ", ") + "."); + + if ((this.publicationStatuses == null) || (this.publicationStatuses.isEmpty())) { + this.addError(BundleUtil.getStringFromBundle("dataretrieverAPI.user.not.found", + Arrays.asList(StringUtils.join(MyDataFilterParams.defaultPublishedStates, ", ")))); return; } } @@ -292,7 +292,7 @@ public String getSolrFragmentForPublicationStatus(){ } public String getSolrFragmentForDatasetValidity(){ - if ((this.datasetValidities == null) || (this.datasetValidities.isEmpty())){ + if ((this.datasetValidities == null) || (this.datasetValidities.isEmpty()) || (this.datasetValidities.size() > 1)){ return ""; } diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java index 4bd9ce2e00d..5626a442762 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java @@ -11,7 +11,9 @@ import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.search.SearchFields; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -47,7 +49,6 @@ public class MyDataFinder { private RoleAssigneeServiceBean roleAssigneeService; private DvObjectServiceBean dvObjectServiceBean; private GroupServiceBean groupService; - private String noMsgResultsFound; //private RoleAssigneeServiceBean roleService = new RoleAssigneeServiceBean(); //private MyDataQueryHelperServiceBean myDataQueryHelperService; // -------------------- @@ -86,12 +87,11 @@ public class MyDataFinder { private List fileGrandparentFileIds = new ArrayList<>(); // dataverse has file permissions - public MyDataFinder(DataverseRolePermissionHelper rolePermissionHelper, RoleAssigneeServiceBean roleAssigneeService, DvObjectServiceBean dvObjectServiceBean, GroupServiceBean groupService, String _noMsgResultsFound) { + public MyDataFinder(DataverseRolePermissionHelper rolePermissionHelper, RoleAssigneeServiceBean roleAssigneeService, DvObjectServiceBean dvObjectServiceBean, GroupServiceBean groupService) { this.rolePermissionHelper = rolePermissionHelper; this.roleAssigneeService = roleAssigneeService; this.dvObjectServiceBean = dvObjectServiceBean; this.groupService = groupService; - this.noMsgResultsFound = _noMsgResultsFound; this.loadHarvestedDataverseIds(); } @@ -213,7 +213,7 @@ private List getSolrFilterQueries(boolean totalCountsOnly){ // ----------------------------------------------------------------- String dvObjectFQ = this.getSolrDvObjectFilterQuery(); if (dvObjectFQ ==null){ - this.addErrorMessage(noMsgResultsFound); + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.empty")); return null; } filterQueries.add(dvObjectFQ); @@ -286,7 +286,7 @@ public String getSolrDvObjectFilterQuery(){ if ((distinctEntityIds.isEmpty()) && (distinctParentIds.isEmpty())) { - this.addErrorMessage(noMsgResultsFound); + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.empty")); return null; } @@ -430,24 +430,25 @@ public JsonArrayBuilder getListofSelectedRoles(){ } - private boolean runStep1RoleAssignments(){ + private boolean runStep1RoleAssignments() { List results = this.roleAssigneeService.getAssigneeAndRoleIdListFor(filterParams); //logger.info("runStep1RoleAssignments results: " + results.toString()); - if (results == null){ - this.addErrorMessage("Sorry, the EntityManager isn't working (still)."); + if (results == null) { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.null")); return false; - }else if (results.isEmpty()){ + } else if (results.isEmpty()) { List roleNames = this.rolePermissionHelper.getRoleNamesByIdList(this.filterParams.getRoleIds()); - if ((roleNames == null)||(roleNames.isEmpty())){ - this.addErrorMessage("Sorry, you have no assigned roles."); - }else{ - if (roleNames.size()==1){ - this.addErrorMessage("Sorry, nothing was found for this role: " + StringUtils.join(roleNames, ", ")); - }else{ - this.addErrorMessage("Sorry, nothing was found for these roles: " + StringUtils.join(roleNames, ", ")); + if ((roleNames == null) || (roleNames.isEmpty())) { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.no.role")); + } else { + final List args = Arrays.asList(StringUtils.join(roleNames, ", ")); + if (roleNames.size() == 1) { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.role.empty", args)); + } else { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.roles.empty", args)); } } return false; @@ -497,7 +498,7 @@ private boolean runStep2DirectAssignments(){ List results = this.dvObjectServiceBean.getDvObjectInfoForMyData(directDvObjectIds); //List results = this.roleAssigneeService.getAssignmentsFor(this.userIdentifier); if (results.isEmpty()){ - this.addErrorMessage("Sorry, you have no assigned Dataverses, Datasets, or Files."); + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.no.dvobject")); return false; } diff --git a/src/main/java/edu/harvard/iq/dataverse/openapi/OpenApi.java b/src/main/java/edu/harvard/iq/dataverse/openapi/OpenApi.java new file mode 100644 index 00000000000..6bd54916e0d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/openapi/OpenApi.java @@ -0,0 +1,101 @@ +package edu.harvard.iq.dataverse.openapi; + +import java.io.*; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.logging.*; + +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.servlet.ServletException; +import jakarta.servlet.annotation.WebServlet; +import jakarta.servlet.http.*; +import jakarta.ws.rs.core.*; +import org.apache.commons.io.IOUtils; +import edu.harvard.iq.dataverse.api.Info; +import edu.harvard.iq.dataverse.util.BundleUtil; + +@WebServlet("/openapi") +public class OpenApi extends HttpServlet { + + private static final Logger logger = Logger.getLogger(Info.class.getCanonicalName()); + + private static final String YAML_FORMAT = "yaml"; + private static final String JSON_FORMAT = "json"; + + + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { + + + String format = req.getParameter("format"); + String accept = req.getHeader("Accept"); + + /* + * We first check for the headers, if the request accepts application/json + * have to check for the format parameter, if it is different from json + * return BAD_REQUEST (400) + */ + if (MediaType.APPLICATION_JSON.equals(accept)){ + if (format != null && !JSON_FORMAT.equals(format)){ + List args = Arrays.asList(accept, format); + String bundleResponse = BundleUtil.getStringFromBundle("openapi.exception.unaligned", args); + resp.sendError(Response.Status.BAD_REQUEST.getStatusCode(), + bundleResponse); + return; + } else { + format = JSON_FORMAT; + } + } + + /* + * We currently support only JSON or YAML being the second the default + * if no format is specified, if a different format is specified we return + * UNSUPPORTED_MEDIA_TYPE (415) specifying that the format is not supported + */ + + format = format == null ? YAML_FORMAT : format.toLowerCase(); + + if (JSON_FORMAT.equals(format)) { + resp.setContentType(MediaType.APPLICATION_JSON_TYPE.toString()); + } else if (YAML_FORMAT.equals(format)){ + resp.setContentType(MediaType.TEXT_PLAIN_TYPE.toString()); + } else { + + List args = Arrays.asList(format); + String bundleResponse = BundleUtil.getStringFromBundle("openapi.exception.invalid.format", args); + + JsonObject errorResponse = Json.createObjectBuilder() + .add("status", "ERROR") + .add("code", HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE) + .add("message", bundleResponse) + .build(); + + resp.setContentType(MediaType.APPLICATION_JSON_TYPE.toString()); + resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE); + + PrintWriter responseWriter = resp.getWriter(); + responseWriter.println(errorResponse.toString()); + responseWriter.flush(); + return; + } + + try { + String baseFileName = "/META-INF/openapi." + format; + ClassLoader classLoader = this.getClass().getClassLoader(); + URL aliasesResource = classLoader.getResource(baseFileName); + InputStream openapiDefinitionStream = aliasesResource.openStream(); + String content = IOUtils.toString(openapiDefinitionStream, StandardCharsets.UTF_8); + resp.getWriter().write(content); + } catch (Exception e) { + logger.log(Level.SEVERE, "OpenAPI Definition format not found " + format + ":" + e.getMessage(), e); + String bundleResponse = BundleUtil.getStringFromBundle("openapi.exception"); + resp.sendError(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), + bundleResponse); + } + + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java index a3dcf6cbb3b..f6d142aac96 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java @@ -547,4 +547,10 @@ public JsonObject getProviderSpecification() { providerSpecification.add("excludedSet", Strings.join(",", excludedSet.toArray())); return providerSpecification.build(); } + + @Override + public boolean updateIdentifier(DvObject dvObject) { + //By default, these are the same + return publicizeIdentifier(dvObject); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProvider.java index ea3a243f25c..194a51eeae0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProvider.java @@ -54,6 +54,8 @@ public interface PidProvider { boolean publicizeIdentifier(DvObject studyIn); + boolean updateIdentifier(DvObject dvObject); + boolean isGlobalIdUnique(GlobalId globalId); String getUrlPrefix(); diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DOIDataCiteRegisterService.java index 0e322eace05..cda70cbc506 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DOIDataCiteRegisterService.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DOIDataCiteRegisterService.java @@ -23,6 +23,12 @@ import edu.harvard.iq.dataverse.pidproviders.AbstractPidProvider; import edu.harvard.iq.dataverse.pidproviders.doi.XmlMetadataTemplate; +import org.xmlunit.builder.DiffBuilder; +import org.xmlunit.builder.Input; +import org.xmlunit.builder.Input.Builder; +import org.xmlunit.diff.Diff; +import org.xmlunit.diff.Difference; + /** * * @author luopc @@ -69,6 +75,35 @@ public String registerIdentifier(String identifier, Map metadata return retString; } + + + public String reRegisterIdentifier(String identifier, Map metadata, DvObject dvObject) throws IOException { + String retString = ""; + String numericIdentifier = identifier.substring(identifier.indexOf(":") + 1); + String xmlMetadata = getMetadataFromDvObject(identifier, metadata, dvObject); + String target = metadata.get("_target"); + String currentMetadata = client.getMetadata(numericIdentifier); + Diff myDiff = DiffBuilder.compare(xmlMetadata) + .withTest(currentMetadata).ignoreWhitespace().checkForSimilar() + .build(); + + if (myDiff.hasDifferences()) { + for(Difference d : myDiff.getDifferences()) { + + logger.fine(d.toString()); + } + retString = "metadata:\\r" + client.postMetadata(xmlMetadata) + "\\r"; + } + if (!target.equals(client.getUrl(numericIdentifier))) { + logger.info("Updating target URL to " + target); + client.postUrl(numericIdentifier, target); + retString = retString + "url:\\r" + target; + + } + + return retString; + } + public String deactivateIdentifier(String identifier, Map metadata, DvObject dvObject) throws IOException { String retString = ""; diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java index 23078e2719b..cd765933796 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java @@ -313,7 +313,31 @@ String getPidStatus(DvObject dvObject) { return status; } - + @Override + public boolean updateIdentifier(DvObject dvObject) { + logger.log(Level.FINE,"updateIdentifierStatus"); + if(dvObject.getIdentifier() == null || dvObject.getIdentifier().isEmpty() ){ + dvObject = generatePid(dvObject); + } + String identifier = getIdentifier(dvObject); + Map metadata = getUpdateMetadata(dvObject); + metadata.put("_status", "public"); + metadata.put("datacite.publicationyear", generateYear(dvObject)); + metadata.put("_target", getTargetUrl(dvObject)); + try { + String updated = doiDataCiteRegisterService.reRegisterIdentifier(identifier, metadata, dvObject); + if(updated.length()!=0) { + logger.info(identifier + "updated: " + updated ); + return true; + } else { + logger.info("No updated needed for " + identifier); + return false; //No update needed + } + } catch (Exception e) { + logger.log(Level.WARNING, "updateIdentifier failed: " + e.getMessage(), e); + return false; + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java index c252d2e3330..9edb536eda2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java @@ -11,6 +11,7 @@ import jakarta.inject.Named; import jakarta.json.JsonArray; +@Deprecated(forRemoval = true, since = "2024-07-07") @Stateless @Named public class RepositoryStorageAbstractionLayerPage { @@ -22,17 +23,20 @@ public class RepositoryStorageAbstractionLayerPage { @EJB StorageSiteServiceBean storageSiteServiceBean; + @Deprecated(forRemoval = true, since = "2024-07-07") public String getLocalDataAccessDirectory(DatasetVersion datasetVersion) { String localDataAccessParentDir = settingsService.getValueForKey(SettingsServiceBean.Key.LocalDataAccessPath); return RepositoryStorageAbstractionLayerUtil.getLocalDataAccessDirectory(localDataAccessParentDir, datasetVersion.getDataset()); } + @Deprecated(forRemoval = true, since = "2024-07-07") public List getRsyncSites(DatasetVersion datasetVersion) { List storageSites = storageSiteServiceBean.findAll(); JsonArray storageSitesAsJson = RepositoryStorageAbstractionLayerUtil.getStorageSitesAsJson(storageSites); return RepositoryStorageAbstractionLayerUtil.getRsyncSites(datasetVersion.getDataset(), storageSitesAsJson); } + @Deprecated(forRemoval = true, since = "2024-07-07") public String getVerifyDataCommand(DatasetVersion datasetVersion) { return RepositoryStorageAbstractionLayerUtil.getVerifyDataCommand(datasetVersion.getDataset()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java index 8501fba3ce0..0d547402676 100644 --- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java @@ -13,10 +13,12 @@ import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +@Deprecated(forRemoval = true, since = "2024-07-07") public class RepositoryStorageAbstractionLayerUtil { private static final Logger logger = Logger.getLogger(RepositoryStorageAbstractionLayerUtil.class.getCanonicalName()); + @Deprecated(forRemoval = true, since = "2024-07-07") public static List getRsyncSites(Dataset dataset, JsonArray rsalSitesAsJson) { List rsalSites = new ArrayList<>(); boolean leafDirectoryOnly = false; @@ -30,6 +32,7 @@ public static List getRsyncSites(Dataset dataset, JsonArray rsalSites return rsalSites; } + @Deprecated(forRemoval = true, since = "2024-07-07") static String getLocalDataAccessDirectory(String localDataAccessParentDir, Dataset dataset) { if (localDataAccessParentDir == null) { localDataAccessParentDir = File.separator + "UNCONFIGURED ( " + SettingsServiceBean.Key.LocalDataAccessPath + " )"; @@ -38,6 +41,7 @@ static String getLocalDataAccessDirectory(String localDataAccessParentDir, Datas return localDataAccessParentDir + File.separator + getDirectoryContainingTheData(dataset, leafDirectoryOnly); } + @Deprecated(forRemoval = true, since = "2024-07-07") static String getVerifyDataCommand(Dataset dataset) { boolean leafDirectoryOnly = true; // TODO: if "files.sha" is defined somewhere, use it. @@ -51,6 +55,7 @@ static String getVerifyDataCommand(Dataset dataset) { * leafDirectoryOnly. See also * http://www.gnu.org/software/coreutils/manual/html_node/basename-invocation.html */ + @Deprecated(forRemoval = true, since = "2024-07-07") public static String getDirectoryContainingTheData(Dataset dataset, boolean leafDirectoryOnly) { /** * FIXME: What if there is more than one package in the dataset? @@ -81,6 +86,7 @@ public static String getDirectoryContainingTheData(Dataset dataset, boolean leaf * RSAL or some other "big data" component live for a list of remotes sites * to which a particular dataset is replicated to. */ + @Deprecated(forRemoval = true, since = "2024-07-07") static JsonArray getStorageSitesAsJson(List storageSites) { JsonArrayBuilder arraybuilder = Json.createArrayBuilder(); if (storageSites == null || storageSites.isEmpty()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index d6b3fd8c339..26b42734d19 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.search; import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.DatasetVersion.VersionState; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; @@ -12,6 +13,8 @@ import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; +import edu.harvard.iq.dataverse.search.IndexableDataset.DatasetState; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; @@ -37,6 +40,7 @@ import java.util.concurrent.Future; import java.util.concurrent.Semaphore; import java.util.function.Function; +import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import jakarta.annotation.PostConstruct; @@ -214,6 +218,9 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths) solrInputDocument.addField(SearchFields.DATAVERSE_CATEGORY, dataverse.getIndexableCategoryName()); if (dataverse.isReleased()) { solrInputDocument.addField(SearchFields.PUBLICATION_STATUS, PUBLISHED_STRING); + if (FeatureFlags.ADD_PUBLICOBJECT_SOLR_FIELD.enabled()) { + solrInputDocument.addField(SearchFields.PUBLIC_OBJECT, true); + } solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataverse.getPublicationDate()); } else { solrInputDocument.addField(SearchFields.PUBLICATION_STATUS, UNPUBLISHED_STRING); @@ -317,14 +324,6 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths) logger.info(status); return new AsyncResult<>(status); } - try { - solrClientService.getSolrClient().commit(); - } catch (SolrServerException | IOException ex) { - status = ex.toString(); - logger.info(status); - return new AsyncResult<>(status); - } - dvObjectService.updateContentIndexTime(dataverse); IndexResponse indexResponse = solrIndexService.indexPermissionsForOneDvObject(dataverse); String msg = "indexed dataverse " + dataverse.getId() + ":" + dataverse.getAlias() + ". Response from permission indexing: " + indexResponse.getMessage(); @@ -477,94 +476,160 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr * @todo should we use solrDocIdentifierDataset or * IndexableObject.IndexableTypes.DATASET.getName() + "_" ? */ - // String solrIdPublished = solrDocIdentifierDataset + dataset.getId(); String solrIdPublished = determinePublishedDatasetSolrDocId(dataset); String solrIdDraftDataset = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.WORKING_COPY.getSuffix(); - // String solrIdDeaccessioned = IndexableObject.IndexableTypes.DATASET.getName() - // + "_" + dataset.getId() + - // IndexableDataset.DatasetState.DEACCESSIONED.getSuffix(); String solrIdDeaccessioned = determineDeaccessionedDatasetId(dataset); StringBuilder debug = new StringBuilder(); debug.append("\ndebug:\n"); - int numPublishedVersions = 0; - List versions = dataset.getVersions(); - List solrIdsOfFilesToDelete = new ArrayList<>(); - for (DatasetVersion datasetVersion : versions) { - Long versionDatabaseId = datasetVersion.getId(); - String versionTitle = datasetVersion.getTitle(); - String semanticVersion = datasetVersion.getSemanticVersion(); - DatasetVersion.VersionState versionState = datasetVersion.getVersionState(); - if (versionState.equals(DatasetVersion.VersionState.RELEASED)) { - numPublishedVersions += 1; - } - debug.append("version found with database id " + versionDatabaseId + "\n"); - debug.append("- title: " + versionTitle + "\n"); - debug.append("- semanticVersion-VersionState: " + semanticVersion + "-" + versionState + "\n"); - List fileMetadatas = datasetVersion.getFileMetadatas(); - List fileInfo = new ArrayList<>(); - for (FileMetadata fileMetadata : fileMetadatas) { - String solrIdOfPublishedFile = solrDocIdentifierFile + fileMetadata.getDataFile().getId(); - /** - * It sounds weird but the first thing we'll do is preemptively - * delete the Solr documents of all published files. Don't - * worry, published files will be re-indexed later along with - * the dataset. We do this so users can delete files from - * published versions of datasets and then re-publish a new - * version without fear that their old published files (now - * deleted from the latest published version) will be - * searchable. See also - * https://github.com/IQSS/dataverse/issues/762 - */ - solrIdsOfFilesToDelete.add(solrIdOfPublishedFile); - fileInfo.add(fileMetadata.getDataFile().getId() + ":" + fileMetadata.getLabel()); - } - try { - /** - * Preemptively delete *all* Solr documents for files associated - * with the dataset based on a Solr query. - * - * We must query Solr for this information because the file has - * been deleted from the database ( perhaps when Solr was down, - * as reported in https://github.com/IQSS/dataverse/issues/2086 - * ) so the database doesn't even know about the file. It's an - * orphan. - * - * @todo This Solr query should make the iteration above based - * on the database unnecessary because it the Solr query should - * find all files for the dataset. We can probably remove the - * iteration above after an "index all" has been performed. - * Without an "index all" we won't be able to find files based - * on parentId because that field wasn't searchable in 4.0. - * - * @todo We should also delete the corresponding Solr - * "permission" documents for the files. - */ - List allFilesForDataset = findFilesOfParentDataset(dataset.getId()); - solrIdsOfFilesToDelete.addAll(allFilesForDataset); - } catch (SearchException | NullPointerException ex) { - logger.fine("could not run search of files to delete: " + ex); + boolean reduceSolrDeletes = FeatureFlags.REDUCE_SOLR_DELETES.enabled(); + if (!reduceSolrDeletes) { + int numPublishedVersions = 0; + List versions = dataset.getVersions(); + List solrIdsOfFilesToDelete = new ArrayList<>(); + for (DatasetVersion datasetVersion : versions) { + Long versionDatabaseId = datasetVersion.getId(); + String versionTitle = datasetVersion.getTitle(); + String semanticVersion = datasetVersion.getSemanticVersion(); + DatasetVersion.VersionState versionState = datasetVersion.getVersionState(); + if (versionState.equals(DatasetVersion.VersionState.RELEASED)) { + numPublishedVersions += 1; + } + debug.append("version found with database id " + versionDatabaseId + "\n"); + debug.append("- title: " + versionTitle + "\n"); + debug.append("- semanticVersion-VersionState: " + semanticVersion + "-" + versionState + "\n"); + List fileMetadatas = datasetVersion.getFileMetadatas(); + List fileInfo = new ArrayList<>(); + for (FileMetadata fileMetadata : fileMetadatas) { + String solrIdOfPublishedFile = solrDocIdentifierFile + fileMetadata.getDataFile().getId(); + /** + * It sounds weird but the first thing we'll do is preemptively + * delete the Solr documents of all published files. Don't + * worry, published files will be re-indexed later along with + * the dataset. We do this so users can delete files from + * published versions of datasets and then re-publish a new + * version without fear that their old published files (now + * deleted from the latest published version) will be + * searchable. See also + * https://github.com/IQSS/dataverse/issues/762 + */ + solrIdsOfFilesToDelete.add(solrIdOfPublishedFile); + fileInfo.add(fileMetadata.getDataFile().getId() + ":" + fileMetadata.getLabel()); + } + try { + /** + * Preemptively delete *all* Solr documents for files associated + * with the dataset based on a Solr query. + * + * We must query Solr for this information because the file has + * been deleted from the database ( perhaps when Solr was down, + * as reported in https://github.com/IQSS/dataverse/issues/2086 + * ) so the database doesn't even know about the file. It's an + * orphan. + * + * @todo This Solr query should make the iteration above based + * on the database unnecessary because it the Solr query should + * find all files for the dataset. We can probably remove the + * iteration above after an "index all" has been performed. + * Without an "index all" we won't be able to find files based + * on parentId because that field wasn't searchable in 4.0. + * + * @todo We should also delete the corresponding Solr + * "permission" documents for the files. + */ + List allFilesForDataset = findFilesOfParentDataset(dataset.getId()); + solrIdsOfFilesToDelete.addAll(allFilesForDataset); + } catch (SearchException | NullPointerException ex) { + logger.fine("could not run search of files to delete: " + ex); + } + int numFiles = 0; + if (fileMetadatas != null) { + numFiles = fileMetadatas.size(); + } + debug.append("- files: " + numFiles + " " + fileInfo.toString() + "\n"); } - int numFiles = 0; - if (fileMetadatas != null) { - numFiles = fileMetadatas.size(); + debug.append("numPublishedVersions: " + numPublishedVersions + "\n"); + if (doNormalSolrDocCleanUp) { + IndexResponse resultOfAttemptToPremptivelyDeletePublishedFiles = solrIndexService.deleteMultipleSolrIds(solrIdsOfFilesToDelete); + debug.append("result of attempt to premptively deleted published files before reindexing: " + resultOfAttemptToPremptivelyDeletePublishedFiles + "\n"); } - debug.append("- files: " + numFiles + " " + fileInfo.toString() + "\n"); - } - debug.append("numPublishedVersions: " + numPublishedVersions + "\n"); - if (doNormalSolrDocCleanUp) { - IndexResponse resultOfAttemptToPremptivelyDeletePublishedFiles = solrIndexService.deleteMultipleSolrIds(solrIdsOfFilesToDelete); - debug.append("result of attempt to premptively deleted published files before reindexing: " + resultOfAttemptToPremptivelyDeletePublishedFiles + "\n"); } DatasetVersion latestVersion = dataset.getLatestVersion(); - String latestVersionStateString = latestVersion.getVersionState().name(); DatasetVersion.VersionState latestVersionState = latestVersion.getVersionState(); + String latestVersionStateString = latestVersionState.name(); DatasetVersion releasedVersion = dataset.getReleasedVersion(); boolean atLeastOnePublishedVersion = false; if (releasedVersion != null) { atLeastOnePublishedVersion = true; - } else { - atLeastOnePublishedVersion = false; } + if (reduceSolrDeletes) { + List solrIdsOfDocsToDelete = null; + if (logger.isLoggable(Level.FINE)) { + writeDebugInfo(debug, dataset); + } + if (doNormalSolrDocCleanUp) { + try { + solrIdsOfDocsToDelete = findFilesOfParentDataset(dataset.getId()); + logger.fine("Existing file docs: " + String.join(", ", solrIdsOfDocsToDelete)); + if (!solrIdsOfDocsToDelete.isEmpty()) { + // We keep the latest version's docs unless it is deaccessioned and there is no + // published/released version + // So skip the loop removing those docs from the delete list except in that case + if ((!latestVersion.isDeaccessioned() || atLeastOnePublishedVersion)) { + List latestFileMetadatas = latestVersion.getFileMetadatas(); + String suffix = (new IndexableDataset(latestVersion)).getDatasetState().getSuffix(); + for (FileMetadata fileMetadata : latestFileMetadatas) { + String solrIdOfPublishedFile = solrDocIdentifierFile + + fileMetadata.getDataFile().getId() + suffix; + solrIdsOfDocsToDelete.remove(solrIdOfPublishedFile); + } + } + if (releasedVersion != null && !releasedVersion.equals(latestVersion)) { + List releasedFileMetadatas = releasedVersion.getFileMetadatas(); + for (FileMetadata fileMetadata : releasedFileMetadatas) { + String solrIdOfPublishedFile = solrDocIdentifierFile + + fileMetadata.getDataFile().getId(); + solrIdsOfDocsToDelete.remove(solrIdOfPublishedFile); + } + } + } + // Clear any unused dataset docs + if (!latestVersion.isDraft()) { + // The latest version is released, so should delete any draft docs for the + // dataset + solrIdsOfDocsToDelete.add(solrIdDraftDataset); + } + if (!atLeastOnePublishedVersion) { + // There's no released version, so should delete any normal state docs for the + // dataset + solrIdsOfDocsToDelete.add(solrIdPublished); + } + if (atLeastOnePublishedVersion || !latestVersion.isDeaccessioned()) { + // There's a released version or a draft, so should delete any deaccessioned + // state docs for the dataset + solrIdsOfDocsToDelete.add(solrIdDeaccessioned); + } + } catch (SearchException | NullPointerException ex) { + logger.fine("could not run search of files to delete: " + ex); + } + logger.fine("Solr docs to delete: " + String.join(", ", solrIdsOfDocsToDelete)); + + if (!solrIdsOfDocsToDelete.isEmpty()) { + List solrIdsOfPermissionDocsToDelete = new ArrayList<>(); + for (String file : solrIdsOfDocsToDelete) { + // Also remove associated permission docs + solrIdsOfPermissionDocsToDelete.add(file + discoverabilityPermissionSuffix); + } + solrIdsOfDocsToDelete.addAll(solrIdsOfPermissionDocsToDelete); + logger.fine("Solr docs and perm docs to delete: " + String.join(", ", solrIdsOfDocsToDelete)); + + IndexResponse resultOfAttemptToPremptivelyDeletePublishedFiles = solrIndexService + .deleteMultipleSolrIds(solrIdsOfDocsToDelete); + debug.append("result of attempt to premptively deleted published files before reindexing: " + + resultOfAttemptToPremptivelyDeletePublishedFiles + "\n"); + } + } + } + Map desiredCards = new LinkedHashMap<>(); /** * @todo refactor all of this below and have a single method that takes @@ -587,7 +652,7 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr .append(indexDraftResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); - if (doNormalSolrDocCleanUp) { + if (!reduceSolrDeletes && doNormalSolrDocCleanUp) { String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("Draft exists, no need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(" (and files). Result: ") @@ -595,7 +660,7 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr } desiredCards.put(DatasetVersion.VersionState.RELEASED, false); - if (doNormalSolrDocCleanUp) { + if (!reduceSolrDeletes && doNormalSolrDocCleanUp) { String deletePublishedResults = removePublished(dataset); results.append("No published version. Attempting to delete traces of published version from index. Result: ") .append(deletePublishedResults).append("\n"); @@ -638,13 +703,13 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr results.append("No draft version. Attempting to index as deaccessioned. Result: ").append(indexDeaccessionedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.RELEASED, false); - if (doNormalSolrDocCleanUp) { + if (!reduceSolrDeletes && doNormalSolrDocCleanUp) { String deletePublishedResults = removePublished(dataset); results.append("No published version. Attempting to delete traces of published version from index. Result: ").append(deletePublishedResults).append("\n"); } desiredCards.put(DatasetVersion.VersionState.DRAFT, false); - if (doNormalSolrDocCleanUp) { + if (!reduceSolrDeletes && doNormalSolrDocCleanUp) { List solrDocIdsForDraftFilesToDelete = findSolrDocIdsForDraftFilesToDelete(dataset); String deleteDraftDatasetVersionResult = removeSolrDocFromIndex(solrIdDraftDataset); String deleteDraftFilesResults = deleteDraftFiles(solrDocIdsForDraftFilesToDelete); @@ -692,7 +757,7 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr results.append("Attempted to index " + solrIdPublished).append(". Result: ").append(indexReleasedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DRAFT, false); - if (doNormalSolrDocCleanUp) { + if (!reduceSolrDeletes && doNormalSolrDocCleanUp) { List solrDocIdsForDraftFilesToDelete = findSolrDocIdsForDraftFilesToDelete(dataset); String deleteDraftDatasetVersionResult = removeSolrDocFromIndex(solrIdDraftDataset); String deleteDraftFilesResults = deleteDraftFiles(solrDocIdsForDraftFilesToDelete); @@ -701,7 +766,7 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr } desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); - if (doNormalSolrDocCleanUp) { + if (!reduceSolrDeletes && doNormalSolrDocCleanUp) { String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("No need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult); @@ -752,7 +817,7 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr .append(solrIdDraftDataset).append(" (limited visibility). Result: ").append(indexDraftResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); - if (doNormalSolrDocCleanUp) { + if (!reduceSolrDeletes && doNormalSolrDocCleanUp) { String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("No need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult); @@ -794,11 +859,42 @@ private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) thr } } - private String deleteDraftFiles(List solrDocIdsForDraftFilesToDelete) { - String deleteDraftFilesResults = ""; - IndexResponse indexResponse = solrIndexService.deleteMultipleSolrIds(solrDocIdsForDraftFilesToDelete); - deleteDraftFilesResults = indexResponse.toString(); - return deleteDraftFilesResults; + private void writeDebugInfo(StringBuilder debug, Dataset dataset) { + List versions = dataset.getVersions(); + int numPublishedVersions = 0; + for (DatasetVersion datasetVersion : versions) { + Long versionDatabaseId = datasetVersion.getId(); + String versionTitle = datasetVersion.getTitle(); + String semanticVersion = datasetVersion.getSemanticVersion(); + DatasetVersion.VersionState versionState = datasetVersion.getVersionState(); + if (versionState.equals(DatasetVersion.VersionState.RELEASED)) { + numPublishedVersions += 1; + } + debug.append("version found with database id " + versionDatabaseId + "\n"); + debug.append("- title: " + versionTitle + "\n"); + debug.append("- semanticVersion-VersionState: " + semanticVersion + "-" + versionState + "\n"); + List fileInfo = new ArrayList<>(); + List fileMetadatas = datasetVersion.getFileMetadatas(); + + for (FileMetadata fileMetadata : fileMetadatas) { + /** + * It sounds weird but the first thing we'll do is preemptively delete the Solr + * documents of all published files. Don't worry, published files will be + * re-indexed later along with the dataset. We do this so users can delete files + * from published versions of datasets and then re-publish a new version without + * fear that their old published files (now deleted from the latest published + * version) will be searchable. See also + * https://github.com/IQSS/dataverse/issues/762 + */ + fileInfo.add(fileMetadata.getDataFile().getId() + ":" + fileMetadata.getLabel()); + } + int numFiles = 0; + if (fileMetadatas != null) { + numFiles = fileMetadatas.size(); + } + debug.append("- files: " + numFiles + " " + fileInfo.toString() + "\n"); + } + debug.append("numPublishedVersions: " + numPublishedVersions + "\n"); } private IndexResponse indexDatasetPermissions(Dataset dataset) { @@ -835,16 +931,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set langs = settingsService.getConfiguredLanguages(); Map cvocMap = datasetFieldService.getCVocConf(true); + Map> cvocManagedFieldMap = new HashMap<>(); + for (Map.Entry cvocEntry : cvocMap.entrySet()) { + if(cvocEntry.getValue().containsKey("managed-fields")) { + JsonObject managedFields = cvocEntry.getValue().getJsonObject("managed-fields"); + Set managedFieldValues = new HashSet<>(); + for (String s : managedFields.keySet()) { + managedFieldValues.add(managedFields.getString(s)); + } + cvocManagedFieldMap.put(cvocEntry.getKey(), managedFieldValues); + } + } + + + Set metadataBlocksWithValue = new HashSet<>(); for (DatasetField dsf : datasetVersion.getFlatDatasetFields()) { @@ -996,19 +1107,39 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set vals = dsf.getValues_nondisplay(); - Set searchStrings = new HashSet(); + Set searchStrings = new HashSet<>(); for (String val: vals) { searchStrings.add(val); - searchStrings.addAll(datasetFieldService.getStringsFor(val)); + // Try to get string values from externalvocabularyvalue using val as termUri + searchStrings.addAll(datasetFieldService.getIndexableStringsByTermUri(val, cvocMap.get(dsfType.getId()), dsfType.getName())); + + if(dsfType.getParentDatasetFieldType()!=null) { + List childDatasetFields = dsf.getParentDatasetFieldCompoundValue().getChildDatasetFields(); + for (DatasetField df : childDatasetFields) { + if(cvocManagedFieldMap.get(dsfType.getId()).contains(df.getDatasetFieldType().getName())) { + String solrManagedFieldSearchable = df.getDatasetFieldType().getSolrField().getNameSearchable(); + // Try to get string values from externalvocabularyvalue but for a managed fields of the CVOCConf + Set stringsForManagedField = datasetFieldService.getIndexableStringsByTermUri(val, cvocMap.get(dsfType.getId()), df.getDatasetFieldType().getName()); + logger.fine(solrManagedFieldSearchable + " filled with externalvocabularyvalue : " + stringsForManagedField); + //.addField works as addition of value not a replace of value + // it allows to add mapped values by CVOCConf before or after indexing real DatasetField value(s) of solrManagedFieldSearchable + solrInputDocument.addField(solrManagedFieldSearchable, stringsForManagedField); + } + } + } } + logger.fine(solrFieldSearchable + " filled with externalvocabularyvalue : " + searchStrings); solrInputDocument.addField(solrFieldSearchable, searchStrings); if (dsfType.getSolrField().isFacetable()) { + logger.fine(solrFieldFacetable + " gets " + vals); solrInputDocument.addField(solrFieldFacetable, vals); } } + if (dsfType.isControlledVocabulary()) { /** If the cvv list is empty but the dfv list is not then it is assumed this was harvested * from an installation that had controlled vocabulary entries that don't exist in our this db @@ -1151,7 +1282,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set accessObject = null; InputStream instream = null; ContentHandler textHandler = null; @@ -1334,11 +1476,13 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set d try { solrClientService.getSolrClient().add(docs.getDocuments()); - solrClientService.getSolrClient().commit(); } catch (SolrServerException | IOException ex) { if (ex.getCause() instanceof SolrServerException) { throw new SolrServerException(ex); @@ -1771,7 +1925,6 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc sid.removeField(SearchFields.SUBTREE); sid.addField(SearchFields.SUBTREE, paths); UpdateResponse addResponse = solrClientService.getSolrClient().add(sid); - UpdateResponse commitResponse = solrClientService.getSolrClient().commit(); if (object.isInstanceofDataset()) { for (DataFile df : dataset.getFiles()) { solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString())); @@ -1785,7 +1938,6 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc sid.removeField(SearchFields.SUBTREE); sid.addField(SearchFields.SUBTREE, paths); addResponse = solrClientService.getSolrClient().add(sid); - commitResponse = solrClientService.getSolrClient().commit(); } } } @@ -1831,11 +1983,6 @@ public String delete(Dataverse doomed) { } catch (SolrServerException | IOException ex) { return ex.toString(); } - try { - solrClientService.getSolrClient().commit(); - } catch (SolrServerException | IOException ex) { - return ex.toString(); - } String response = "Successfully deleted dataverse " + doomed.getId() + " from Solr index. updateReponse was: " + updateResponse.toString(); logger.fine(response); return response; @@ -1856,11 +2003,6 @@ public String removeSolrDocFromIndex(String doomed) { } catch (SolrServerException | IOException ex) { return ex.toString(); } - try { - solrClientService.getSolrClient().commit(); - } catch (SolrServerException | IOException ex) { - return ex.toString(); - } String response = "Attempted to delete " + doomed + " from Solr index. updateReponse was: " + updateResponse.toString(); logger.fine(response); return response; @@ -1902,6 +2044,7 @@ private String determineDeaccessionedDatasetId(Dataset dataset) { return IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.DEACCESSIONED.getSuffix(); } + //Only used when FeatureFlags.REDUCE_SOLR_DELETES is disabled private String removeDeaccessioned(Dataset dataset) { StringBuilder result = new StringBuilder(); String deleteDeaccessionedResult = removeSolrDocFromIndex(determineDeaccessionedDatasetId(dataset)); @@ -1912,6 +2055,7 @@ private String removeDeaccessioned(Dataset dataset) { return result.toString(); } + //Only used when FeatureFlags.REDUCE_SOLR_DELETES is disabled private String removePublished(Dataset dataset) { StringBuilder result = new StringBuilder(); String deletePublishedResult = removeSolrDocFromIndex(determinePublishedDatasetSolrDocId(dataset)); @@ -1921,6 +2065,14 @@ private String removePublished(Dataset dataset) { result.append(deleteFilesResult); return result.toString(); } + + // Only used when FeatureFlags.REDUCE_SOLR_DELETES is disabled + private String deleteDraftFiles(List solrDocIdsForDraftFilesToDelete) { + String deleteDraftFilesResults = ""; + IndexResponse indexResponse = solrIndexService.deleteMultipleSolrIds(solrDocIdsForDraftFilesToDelete); + deleteDraftFilesResults = indexResponse.toString(); + return deleteDraftFilesResults; + } private Dataverse findRootDataverseCached() { if (true) { @@ -2054,8 +2206,50 @@ public List findPermissionsInSolrOnly() throws SearchException { SolrDocumentList list = rsp.getResults(); for (SolrDocument doc: list) { long id = Long.parseLong((String) doc.getFieldValue(SearchFields.DEFINITION_POINT_DVOBJECT_ID)); + String docId = (String)doc.getFieldValue(SearchFields.ID); if(!dvObjectService.checkExists(id)) { - permissionInSolrOnly.add((String)doc.getFieldValue(SearchFields.ID)); + permissionInSolrOnly.add(docId); + } else { + DvObject obj = dvObjectService.findDvObject(id); + if (obj instanceof Dataset d) { + DatasetVersion dv = d.getLatestVersion(); + if (docId.endsWith("draft_permission")) { + if (!dv.isDraft()) { + permissionInSolrOnly.add(docId); + } + } else if (docId.endsWith("deaccessioned_permission")) { + if (!dv.isDeaccessioned()) { + permissionInSolrOnly.add(docId); + } + } else { + if (d.getReleasedVersion() == null) { + permissionInSolrOnly.add(docId); + } + } + } else if (obj instanceof DataFile f) { + List states = dataFileService.findVersionStates(f.getId()); + Set strings = states.stream().map(VersionState::toString).collect(Collectors.toSet()); + logger.fine("States for " + docId + ": " + String.join(", ", strings)); + if (docId.endsWith("draft_permission")) { + if (!states.contains(VersionState.DRAFT)) { + permissionInSolrOnly.add(docId); + } + } else if (docId.endsWith("deaccessioned_permission")) { + if (!states.contains(VersionState.DEACCESSIONED) && states.size() == 1) { + permissionInSolrOnly.add(docId); + } + } else { + if (!states.contains(VersionState.RELEASED)) { + permissionInSolrOnly.add(docId); + } else { + if(dataFileService.findFileMetadataByDatasetVersionIdAndDataFileId(f.getOwner().getReleasedVersion().getId(), f.getId()) == null) { + logger.fine("Adding doc " + docId + " to list of permissions in Solr only"); + permissionInSolrOnly.add(docId); + } + } + + } + } } } if (cursorMark.equals(nextCursorMark)) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchConstants.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchConstants.java index 73b39332013..2d6632760fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchConstants.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchConstants.java @@ -40,5 +40,5 @@ public class SearchConstants { public static final String RESTRICTED = "Restricted"; public static final String EMBARGOEDTHENRESTRICTED = "EmbargoedThenRestricted"; public static final String EMBARGOEDTHENPUBLIC = "EmbargoedThenPublic"; - + public static final String RETENTIONEXPIRED = "RetentionPeriodExpired"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java index 399ca0340e7..02649cec68c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java @@ -217,6 +217,15 @@ public class SearchFields { public static final String DEFINITION_POINT_DVOBJECT_ID = "definitionPointDvObjectId"; public static final String DISCOVERABLE_BY = "discoverableBy"; + /** + * publicObject_b is an experimental field tied to the + * avoid-expensive-solr-join feature flag. Rather than discoverableBy which + * is a field on permission documents, publicObject_b is a field on content + * documents (dvObjects). By indexing publicObject_b=true, we can let guests + * search on it, avoiding an expensive join for those (common) users. + */ + public static final String PUBLIC_OBJECT = "publicObject_b"; + /** * i.e. "Unpublished", "Draft" (multivalued) */ @@ -267,7 +276,7 @@ more targeted results for just datasets. The format is YYYY (i.e. public static final String FULL_TEXT = "_text_"; public static final String EMBARGO_END_DATE = "embargoEndDate"; - + public static final String RETENTION_END_DATE = "retentionEndDate"; // SpatialRecursivePrefixTreeFieldType: https://solr.apache.org/guide/8_11/spatial-search.html#rpt public static final String GEOLOCATION = "geolocation"; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 520aa36a12c..4f3f6e46e48 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -21,6 +21,7 @@ import edu.harvard.iq.dataverse.SettingsWrapper; import edu.harvard.iq.dataverse.ThumbnailServiceWrapper; import edu.harvard.iq.dataverse.WidgetWrapper; +import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -355,8 +356,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused * https://github.com/IQSS/dataverse/issues/84 */ int numRows = 10; - HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); - DataverseRequest dataverseRequest = new DataverseRequest(session.getUser(), httpServletRequest); + DataverseRequest dataverseRequest = getDataverseRequest(); List dataverses = new ArrayList<>(); dataverses.add(dataverse); solrQueryResponse = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null, !isFacetsDisabled(), true); @@ -396,7 +396,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused } } - if (!wasSolrErrorEncountered() && selectedTypesList.size() < 3 && !isSolrTemporarilyUnavailable() && !isFacetsDisabled()) { + if (!wasSolrErrorEncountered() && selectedTypesList.size() < 3 && !isSolrTemporarilyUnavailable() && !isFacetsDisabled() && !isUncheckedTypesFacetDisabled()) { // If some types are NOT currently selected, we will need to // run a second search to obtain the numbers of the unselected types: @@ -1087,20 +1087,59 @@ public void setSolrTemporarilyUnavailable(boolean solrIsTemporarilyUnavailable) this.solrIsTemporarilyUnavailable = solrIsTemporarilyUnavailable; } + Boolean solrFacetsDisabled = null; /** * Indicates that the fragment should not be requesting facets in Solr * searches and rendering them on the page. * @return true if disabled; false by default */ public boolean isFacetsDisabled() { - // The method is used in rendered="..." logic. So we are using - // SettingsWrapper to make sure we are not looking it up repeatedly - // (settings are not expensive to look up, but - // still). + if (this.solrFacetsDisabled != null) { + return this.solrFacetsDisabled; + } + + if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacets, false)) { + return this.solrFacetsDisabled = true; + } + + // We also have mechanisms for disabling the facets selectively, just for + // the guests, or anonymous users: + if (session.getUser() instanceof GuestUser) { + if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacetsForGuestUsers, false)) { + return this.solrFacetsDisabled = true; + } + + // An even lower grade of user than Guest is a truly anonymous user - + // a guest user who came without the session cookie: + Map cookies = FacesContext.getCurrentInstance().getExternalContext().getRequestCookieMap(); + if (!(cookies != null && cookies.containsKey("JSESSIONID"))) { + if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacetsWithoutJsession, false)) { + return this.solrFacetsDisabled = true; + } + } + } - return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacets, false); + return this.solrFacetsDisabled = false; } + Boolean disableSecondPassSearch = null; + + /** + * Indicates that we do not need to run the second search query to populate + * the counts for *unchecked* type facets. + * @return true if disabled; false by default + */ + public boolean isUncheckedTypesFacetDisabled() { + if (this.disableSecondPassSearch != null) { + return this.disableSecondPassSearch; + } + if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableUncheckedTypesFacet, false)) { + return this.disableSecondPassSearch = true; + } + return this.disableSecondPassSearch = false; + } + + public boolean isRootDv() { return rootDv; } @@ -1480,9 +1519,31 @@ public boolean isActivelyEmbargoed(SolrSearchResult result) { return false; } } + + public boolean isRetentionExpired(SolrSearchResult result) { + Long retentionEndDate = result.getRetentionEndDate(); + if(retentionEndDate != null) { + return LocalDate.now().toEpochDay() > retentionEndDate; + } else { + return false; + } + } + private DataverseRequest getDataverseRequest() { + final HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); + return new DataverseRequest(session.getUser(), httpServletRequest); + } + public boolean isValid(SolrSearchResult result) { - return result.isValid(); + return result.isValid(x -> { + Long id = x.getEntityId(); + DvObject obj = dvObjectService.findDvObject(id); + if(obj != null && obj instanceof Dataset) { + return permissionsWrapper.canUpdateDataset(getDataverseRequest(), (Dataset) obj); + } + logger.fine("isValid called for dvObject that is null (or not a dataset), id: " + id + "This can occur if a dataset is deleted while a search is in progress"); + return true; + }); } public enum SortOrder { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index c6f08151050..1d25dbcdaba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; @@ -499,6 +500,8 @@ public SolrQueryResponse search( String identifierOfDataverse = (String) solrDocument.getFieldValue(SearchFields.IDENTIFIER_OF_DATAVERSE); String nameOfDataverse = (String) solrDocument.getFieldValue(SearchFields.DATAVERSE_NAME); Long embargoEndDate = (Long) solrDocument.getFieldValue(SearchFields.EMBARGO_END_DATE); + Long retentionEndDate = (Long) solrDocument.getFieldValue(SearchFields.RETENTION_END_DATE); + // Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID); List matchedFields = new ArrayList<>(); @@ -574,13 +577,13 @@ public SolrQueryResponse search( solrSearchResult.setDvTree(dvTree); solrSearchResult.setDatasetValid(datasetValid); - String originSource = (String) solrDocument.getFieldValue(SearchFields.METADATA_SOURCE); - if (IndexServiceBean.HARVESTED.equals(originSource)) { + if (Boolean.TRUE.equals((Boolean) solrDocument.getFieldValue(SearchFields.IS_HARVESTED))) { solrSearchResult.setHarvested(true); } solrSearchResult.setEmbargoEndDate(embargoEndDate); - + solrSearchResult.setRetentionEndDate(retentionEndDate); + /** * @todo start using SearchConstants class here */ @@ -998,14 +1001,132 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ user = GuestUser.get(); } + AuthenticatedUser au = null; + Set groups; + + if (user instanceof GuestUser) { + // Yes, GuestUser may be part of one or more groups; such as IP Groups. + groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest)); + } else { + if (!(user instanceof AuthenticatedUser)) { + logger.severe("Should never reach here. A User must be an AuthenticatedUser or a Guest"); + throw new IllegalStateException("A User must be an AuthenticatedUser or a Guest"); + } + + au = (AuthenticatedUser) user; + + // ---------------------------------------------------- + // (3) Is this a Super User? + // If so, they can see everything + // ---------------------------------------------------- + if (au.isSuperuser()) { + // Somewhat dangerous because this user (a superuser) will be able + // to see everything in Solr with no regard to permissions. But it's + // been this way since Dataverse 4.0. So relax. :) + + return dangerZoneNoSolrJoin; + } + + // ---------------------------------------------------- + // (4) User is logged in AND onlyDatatRelatedToMe == true + // Yes, give back everything -> the settings will be in + // the filterqueries given to search + // ---------------------------------------------------- + if (onlyDatatRelatedToMe == true) { + if (systemConfig.myDataDoesNotUsePermissionDocs()) { + logger.fine("old 4.2 behavior: MyData is not using Solr permission docs"); + return dangerZoneNoSolrJoin; + } else { + // fall-through + logger.fine("new post-4.2 behavior: MyData is using Solr permission docs"); + } + } + + // ---------------------------------------------------- + // (5) Work with Authenticated User who is not a Superuser + // ---------------------------------------------------- + + groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest)); + } + + if (FeatureFlags.AVOID_EXPENSIVE_SOLR_JOIN.enabled()) { + /** + * Instead of doing a super expensive join, we will rely on the + * new boolean field PublicObject:true for public objects. This field + * is indexed on the content document itself, rather than a permission + * document. An additional join will be added only for any extra, + * more restricted groups that the user may be part of. + * **Note the experimental nature of this optimization**. + */ + StringBuilder sb = new StringBuilder(); + StringBuilder sbgroups = new StringBuilder(); + + // All users, guests and authenticated, should see all the + // documents marked as publicObject_b:true, at least: + sb.append(SearchFields.PUBLIC_OBJECT + ":" + true); + + // One or more groups *may* also be available for this user. Once again, + // do note that Guest users may be part of some groups, such as + // IP groups. + + int groupCounter = 0; + + // An AuthenticatedUser should also be able to see all the content + // on which they have direct permissions: + if (au != null) { + groupCounter++; + sbgroups.append(IndexServiceBean.getGroupPerUserPrefix() + au.getId()); + } + + // In addition to the user referenced directly, we will also + // add joins on all the non-public groups that may exist for the + // user: + for (Group group : groups) { + String groupAlias = group.getAlias(); + if (groupAlias != null && !groupAlias.isEmpty() && !groupAlias.startsWith("builtIn")) { + groupCounter++; + if (groupCounter > 1) { + sbgroups.append(" OR "); + } + sbgroups.append(IndexServiceBean.getGroupPrefix() + groupAlias); + } + } + + if (groupCounter > 1) { + // If there is more than one group, the parentheses must be added: + sbgroups.insert(0, "("); + sbgroups.append(")"); + } + + if (groupCounter > 0) { + // If there are any groups for this user, an extra join must be + // added to the query, and the extra sub-query must be added to + // the combined Solr query: + sb.append(" OR {!join from=" + SearchFields.DEFINITION_POINT + " to=id v=$q1}"); + // Add the subquery to the combined Solr query: + solrQuery.setParam("q1", SearchFields.DISCOVERABLE_BY + ":" + sbgroups.toString()); + logger.info("The sub-query q1 set to " + SearchFields.DISCOVERABLE_BY + ":" + sbgroups.toString()); + } + + String ret = sb.toString(); + logger.fine("Returning experimental query: " + ret); + return ret; + } + + // END OF EXPERIMENTAL OPTIMIZATION + + // Old, un-optimized way of handling permissions. + // Largely left intact, minus the lookups that have already been performed + // above. + // ---------------------------------------------------- // (1) Is this a GuestUser? - // Yes, see if GuestUser is part of any groups such as IP Groups. // ---------------------------------------------------- if (user instanceof GuestUser) { - String groupsFromProviders = ""; - Set groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest)); + StringBuilder sb = new StringBuilder(); + + String groupsFromProviders = ""; for (Group group : groups) { logger.fine("found group " + group.getIdentifier() + " with alias " + group.getAlias()); String groupAlias = group.getAlias(); @@ -1022,51 +1143,11 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ return guestWithGroups; } - // ---------------------------------------------------- - // (2) Retrieve Authenticated User - // ---------------------------------------------------- - if (!(user instanceof AuthenticatedUser)) { - logger.severe("Should never reach here. A User must be an AuthenticatedUser or a Guest"); - throw new IllegalStateException("A User must be an AuthenticatedUser or a Guest"); - } - - AuthenticatedUser au = (AuthenticatedUser) user; - - // if (addFacets) { - // // Logged in user, has publication status facet - // // - // solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); - // } - - // ---------------------------------------------------- - // (3) Is this a Super User? - // Yes, give back everything - // ---------------------------------------------------- - if (au.isSuperuser()) { - // Somewhat dangerous because this user (a superuser) will be able - // to see everything in Solr with no regard to permissions. But it's - // been this way since Dataverse 4.0. So relax. :) - - return dangerZoneNoSolrJoin; - } - - // ---------------------------------------------------- - // (4) User is logged in AND onlyDatatRelatedToMe == true - // Yes, give back everything -> the settings will be in - // the filterqueries given to search - // ---------------------------------------------------- - if (onlyDatatRelatedToMe == true) { - if (systemConfig.myDataDoesNotUsePermissionDocs()) { - logger.fine("old 4.2 behavior: MyData is not using Solr permission docs"); - return dangerZoneNoSolrJoin; - } else { - logger.fine("new post-4.2 behavior: MyData is using Solr permission docs"); - } - } - // ---------------------------------------------------- // (5) Work with Authenticated User who is not a Superuser - // ---------------------------------------------------- + // ---------------------------------------------------- + // It was already confirmed, that if the user is not GuestUser, we + // have an AuthenticatedUser au which is not null. /** * @todo all this code needs cleanup and clarification. */ @@ -1097,7 +1178,6 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ * a given "content document" (dataset version, etc) in Solr. */ String groupsFromProviders = ""; - Set groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest)); StringBuilder sb = new StringBuilder(); for (Group group : groups) { logger.fine("found group " + group.getIdentifier() + " with alias " + group.getAlias()); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java index 04021eb75b6..cfe29ea08c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java @@ -357,7 +357,6 @@ private void persistToSolr(Collection docs) throws SolrServer * @todo Do something with these responses from Solr. */ UpdateResponse addResponse = solrClientService.getSolrClient().add(docs); - UpdateResponse commitResponse = solrClientService.getSolrClient().commit(); } public IndexResponse indexPermissionsOnSelfAndChildren(long definitionPointId) { @@ -504,11 +503,6 @@ public IndexResponse deleteMultipleSolrIds(List solrIdsToDelete) { */ return new IndexResponse("problem deleting the following documents from Solr: " + solrIdsToDelete); } - try { - solrClientService.getSolrClient().commit(); - } catch (SolrServerException | IOException ex) { - return new IndexResponse("problem committing deletion of the following documents from Solr: " + solrIdsToDelete); - } return new IndexResponse("no known problem deleting the following documents from Solr:" + solrIdsToDelete); } @@ -516,7 +510,6 @@ public JsonObjectBuilder deleteAllFromSolrAndResetIndexTimes() throws SolrServer JsonObjectBuilder response = Json.createObjectBuilder(); logger.info("attempting to delete all Solr documents before a complete re-index"); solrClientService.getSolrClient().deleteByQuery("*:*"); - solrClientService.getSolrClient().commit(); int numRowsAffected = dvObjectService.clearAllIndexTimes(); response.add(numRowsClearedByClearAllIndexTimes, numRowsAffected); response.add(messageString, "Solr index and database index timestamps cleared."); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index 6ad7f9dbbf6..e84c8f133da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -7,8 +7,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import java.util.logging.Logger; +import edu.harvard.iq.dataverse.*; import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; @@ -16,22 +18,14 @@ import org.apache.commons.collections4.CollectionUtils; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetRelPublication; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.api.Util; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.DateUtil; import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; public class SolrSearchResult { - private static final Logger logger = Logger.getLogger(SolrSearchResult.class.getCanonicalName()); private String id; @@ -123,6 +117,8 @@ public class SolrSearchResult { private Long embargoEndDate; + private Long retentionEndDate; + private boolean datasetValid; public String getDvTree() { @@ -403,21 +399,12 @@ public JsonArrayBuilder getRelevance() { return matchedFieldsArray; } - public JsonObject toJsonObject(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) { - return toJsonObject(showRelevance, showEntityIds, showApiUrls, null); - } - - public JsonObject toJsonObject(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, - List metadataFields) { - return json(showRelevance, showEntityIds, showApiUrls, metadataFields).build(); - } - /** * Add additional fields for the MyData page * * @return */ - public JsonObjectBuilder getJsonForMyData() { + public JsonObjectBuilder getJsonForMyData(boolean isValid) { JsonObjectBuilder myDataJson = json(true, true, true);// boolean showRelevance, boolean showEntityIds, boolean showApiUrls) @@ -425,7 +412,7 @@ public JsonObjectBuilder getJsonForMyData() { .add("is_draft_state", this.isDraftState()).add("is_in_review_state", this.isInReviewState()) .add("is_unpublished_state", this.isUnpublishedState()).add("is_published", this.isPublishedState()) .add("is_deaccesioned", this.isDeaccessionedState()) - .add("is_valid", this.isValid()) + .add("is_valid", isValid) .add("date_to_display_on_card", getDateToDisplayOnCard()); // Add is_deaccessioned attribute, even though MyData currently screens any deaccessioned info out @@ -436,7 +423,7 @@ public JsonObjectBuilder getJsonForMyData() { if ((this.getParent() != null) && (!this.getParent().isEmpty())) { // System.out.println("keys:" + parent.keySet().toString()); - if (this.entity.isInstanceofDataFile()) { + if (this.entity != null && this.entity.isInstanceofDataFile()) { myDataJson.add("parentIdentifier", this.getParent().get(SolrSearchResult.PARENT_IDENTIFIER)) .add("parentName", this.getParent().get("name")); @@ -450,12 +437,10 @@ public JsonObjectBuilder getJsonForMyData() { } // getJsonForMydata public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) { - return json(showRelevance, showEntityIds, showApiUrls, null); + return json(showRelevance, showEntityIds, showApiUrls, null, null); } - public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, - List metadataFields) { - + public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields, Long datasetFileCount) { if (this.type == null) { return jsonObjectBuilder(); } @@ -571,7 +556,7 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool subjects.add(subject); } nullSafeJsonBuilder.add("subjects", subjects); - nullSafeJsonBuilder.add("fileCount", dv.getFileMetadatas().size()); + nullSafeJsonBuilder.add("fileCount", datasetFileCount); nullSafeJsonBuilder.add("versionId", dv.getId()); nullSafeJsonBuilder.add("versionState", dv.getVersionState().toString()); if (this.isPublishedState()) { @@ -1261,11 +1246,31 @@ public void setEmbargoEndDate(Long embargoEndDate) { this.embargoEndDate = embargoEndDate; } + public Long getRetentionEndDate() { + return retentionEndDate; + } + + public void setRetentionEndDate(Long retentionEndDate) { + this.retentionEndDate = retentionEndDate; + } + public void setDatasetValid(Boolean datasetValid) { this.datasetValid = datasetValid == null || Boolean.valueOf(datasetValid); } - public boolean isValid() { - return datasetValid; + public boolean isValid(Predicate canUpdateDataset) { + if (this.datasetValid) { + return true; + } + if (!this.getType().equals("datasets")) { + return true; + } + if (this.isDraftState()) { + return false; + } + if (!JvmSettings.UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED.lookupOptional(Boolean.class).orElse(true)) { + return true; + } + return !canUpdateDataset.test(this); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java index 29a9d8956a3..96222f40daf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.settings; import edu.harvard.iq.dataverse.MailServiceBean; +import edu.harvard.iq.dataverse.pidproviders.PidProviderFactoryBean; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key; import edu.harvard.iq.dataverse.util.FileUtil; @@ -32,6 +33,8 @@ public class ConfigCheckService { MailSessionProducer mailSessionProducer; @Inject MailServiceBean mailService; + @Inject + PidProviderFactoryBean pidProviderFactoryBean; public static class ConfigurationError extends RuntimeException { public ConfigurationError(String message) { @@ -132,6 +135,16 @@ public void checkSystemMailSetup() { * @return True if all checks successful, false otherwise. */ private boolean checkPidProviders() { - return PidUtil.getManagedProviderIds().size() > 0; + // Check if at least one PidProvider capable of editing/minting PIDs is configured. + boolean valid=true; + if(!(PidUtil.getManagedProviderIds().size() > 0)) { + valid = false; + logger.warning("No PID providers configured"); + } + if (pidProviderFactoryBean.getDefaultPidGenerator()==null){ + valid=false; + logger.warning("No default PID provider configured"); + } + return valid; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index afa5a1c986a..021977ff8c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -36,6 +36,61 @@ public enum FeatureFlags { * @since Dataverse @TODO: */ API_BEARER_AUTH("api-bearer-auth"), + /** + * For published (public) objects, don't use a join when searching Solr. + * Experimental! Requires a reindex with the following feature flag enabled, + * in order to add the boolean publicObject_b:true field to all the public + * Solr documents. + * + * @apiNote Raise flag by setting + * "dataverse.feature.avoid-expensive-solr-join" + * @since Dataverse 6.3 + */ + AVOID_EXPENSIVE_SOLR_JOIN("avoid-expensive-solr-join"), + /** + * With this flag enabled, the boolean field publicObject_b:true will be + * added to all the indexed Solr documents for publicly-available collections, + * datasets and files. This flag makes it possible to rely on it in searches, + * instead of the very expensive join (the feature flag above). + * + * @apiNote Raise flag by setting + * "dataverse.feature.add-publicobject-solr-field" + * @since Dataverse 6.3 + */ + ADD_PUBLICOBJECT_SOLR_FIELD("add-publicobject-solr-field"), + /** + * With this flag set, Dataverse will index the actual origin of harvested + * metadata records, instead of the "Harvested" string in all cases. + * + * @apiNote Raise flag by setting + * "dataverse.feature.index-harvested-metadata-source" + * @since Dataverse 6.3 + */ + INDEX_HARVESTED_METADATA_SOURCE("index-harvested-metadata-source"), + + /** + * Dataverse normally deletes all solr documents related to a dataset's files + * when the dataset is reindexed. With this flag enabled, additional logic is + * added to the reindex process to delete only the solr documents that are no + * longer needed. (Required docs will be updated rather than deleted and + * replaced.) Enabling this feature flag should make the reindex process + * faster without impacting the search results. + * + * @apiNote Raise flag by setting + * "dataverse.feature.reduce-solr-deletes" + * @since Dataverse 6.3 + */ + REDUCE_SOLR_DELETES("reduce-solr-deletes"), + /** + * With this flag enabled, the Return To Author pop-up will not have a required + * "Reason" field, and a reason will not be required in the + * /api/datasets/{id}/returnToAuthor api call. + * + * @apiNote Raise flag by setting + * "dataverse.feature.disable-return-to-author-reason" + * @since Dataverse 6.3 + */ + DISABLE_RETURN_TO_AUTHOR_REASON("disable-return-to-author-reason"), ; final String flag; diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 8f749ba64cd..9d13be005c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -51,6 +51,10 @@ public enum JvmSettings { DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"), GUESTBOOK_AT_REQUEST(SCOPE_FILES, "guestbook-at-request"), GLOBUS_CACHE_MAXAGE(SCOPE_FILES, "globus-cache-maxage"), + + //STORAGE DRIVER SETTINGS + SCOPE_DRIVER(SCOPE_FILES), + DISABLE_S3_TAGGING(SCOPE_DRIVER, "disable-tagging"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), @@ -226,6 +230,7 @@ public enum JvmSettings { SCOPE_UI(PREFIX, "ui"), UI_ALLOW_REVIEW_INCOMPLETE(SCOPE_UI, "allow-review-for-incomplete"), UI_SHOW_VALIDITY_FILTER(SCOPE_UI, "show-validity-filter"), + UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED(SCOPE_UI, "show-validity-label-when-published"), // NetCDF SETTINGS SCOPE_NETCDF(PREFIX, "netcdf"), diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 9888db84696..8ed96690e84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -55,6 +55,10 @@ public enum Key { CustomDatasetSummaryFields, /** * Defines a public installation -- all datafiles are unrestricted + * + * This was added along with CloudEnvironmentName and ComputeBaseUrl. + * See https://github.com/IQSS/dataverse/issues/3776 and + * https://github.com/IQSS/dataverse/pull/3967 */ PublicInstall, /** @@ -75,9 +79,12 @@ public enum Key { /** * For example, https://datacapture.example.org */ + @Deprecated(forRemoval = true, since = "2024-07-07") DataCaptureModuleUrl, + @Deprecated(forRemoval = true, since = "2024-07-07") RepositoryStorageAbstractionLayerUrl, UploadMethods, + @Deprecated(forRemoval = true, since = "2024-07-07") DownloadMethods, /** * If the data replicated around the world using RSAL (Repository @@ -87,6 +94,7 @@ public enum Key { * TODO: Think about if it makes sense to make this a column in the * StorageSite database table. */ + @Deprecated(forRemoval = true, since = "2024-07-07") LocalDataAccessPath, /** * The algorithm used to generate PIDs, randomString (default) or @@ -586,6 +594,12 @@ Whether Harvesting (OAI) service is enabled * n: embargo enabled with n months the maximum allowed duration */ MaxEmbargoDurationInMonths, + /** This setting enables Retention capabilities in Dataverse and sets the minimum Retention duration allowed. + * 0 or not set: new retentions disabled + * -1: retention enabled, no time limit + * n: retention enabled with n months the minimum allowed duration + */ + MinRetentionDurationInMonths, /* * Include "Custom Terms" as an item in the license drop-down or not. */ @@ -657,6 +671,9 @@ Whether Harvesting (OAI) service is enabled * and dataset pages instantly */ DisableSolrFacets, + DisableSolrFacetsForGuestUsers, + DisableSolrFacetsWithoutJsession, + DisableUncheckedTypesFacet, /** * When ingesting tabular data files, store the generated tab-delimited * files *with* the variable names line up top. diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java index 86ae697f771..8408e7d91f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java @@ -1,194 +1,140 @@ package edu.harvard.iq.dataverse.sitemap; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DvObjectContainer; -import edu.harvard.iq.dataverse.settings.ConfigCheckService; -import edu.harvard.iq.dataverse.settings.JvmSettings; -import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; -import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.text.SimpleDateFormat; +import java.text.ParseException; +import java.time.format.DateTimeFormatter; import java.util.List; import java.util.logging.Logger; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.OutputKeys; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerConfigurationException; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.xml.sax.SAXException; + +import com.redfin.sitemapgenerator.W3CDateFormat; +import com.redfin.sitemapgenerator.W3CDateFormat.Pattern; +import com.redfin.sitemapgenerator.WebSitemapGenerator; +import com.redfin.sitemapgenerator.WebSitemapUrl; + +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.SystemConfig; public class SiteMapUtil { + static final String DATE_PATTERN = "yyyy-MM-dd"; + static final String SITEMAP_FILENAME_STAGED = "sitemap.xml.staged"; + /** @see https://www.sitemaps.org/protocol.html#index */ + static final int SITEMAP_LIMIT = 50000; + private static final Logger logger = Logger.getLogger(SiteMapUtil.class.getCanonicalName()); + private static DateTimeFormatter formatter = DateTimeFormatter.ofPattern(DATE_PATTERN); - static final String SITEMAP_FILENAME_FINAL = "sitemap.xml"; - static final String SITEMAP_FILENAME_STAGED = "sitemap.xml.staged"; - /** - * TODO: Handle more than 50,000 entries in the sitemap. - * - * (As of this writing Harvard Dataverse only has ~3000 dataverses and - * ~30,000 datasets.) - * - * "each Sitemap file that you provide must have no more than 50,000 URLs" - * https://www.sitemaps.org/protocol.html - * - * Consider using a third party library: "One sitemap can contain a maximum - * of 50,000 URLs. (Some sitemaps, like Google News sitemaps, can contain - * only 1,000 URLs.) If you need to put more URLs than that in a sitemap, - * you'll have to use a sitemap index file. Fortunately, WebSitemapGenerator - * can manage the whole thing for you." - * https://github.com/dfabulich/sitemapgen4j - */ public static void updateSiteMap(List dataverses, List datasets) { logger.info("BEGIN updateSiteMap"); - String sitemapPathString = getSitemapPathString(); - String stagedSitemapPathAndFileString = sitemapPathString + File.separator + SITEMAP_FILENAME_STAGED; - String finalSitemapPathAndFileString = sitemapPathString + File.separator + SITEMAP_FILENAME_FINAL; - - Path stagedPath = Paths.get(stagedSitemapPathAndFileString); - if (Files.exists(stagedPath)) { - logger.warning("Unable to update sitemap! The staged file from a previous run already existed. Delete " + stagedSitemapPathAndFileString + " and try again."); + final String dataverseSiteUrl = SystemConfig.getDataverseSiteUrlStatic(); + final String msgErrorFormat = "Problem with %s : %s. The exception is %s"; + final String msgErrorW3CFormat = "%s isn't a valid W3C date time for %s. The exception is %s"; + final String sitemapPathString = getSitemapPathString(); + final String stagedSitemapPathAndFileString = sitemapPathString + File.separator + SITEMAP_FILENAME_STAGED; + final Path stagedSitemapPath = Paths.get(stagedSitemapPathAndFileString); + + if (Files.exists(stagedSitemapPath)) { + logger.warning(String.format( + "Unable to update sitemap! The staged file from a previous run already existed. Delete %s and try again.", + stagedSitemapPathAndFileString)); return; } - DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder documentBuilder = null; + final File directory = new File(sitemapPathString); + if (!directory.exists()) { + directory.mkdir(); + } + + // Use DAY pattern (YYYY-MM-DD), local machine timezone + final W3CDateFormat dateFormat = new W3CDateFormat(Pattern.DAY); + WebSitemapGenerator wsg = null; try { - documentBuilder = documentBuilderFactory.newDocumentBuilder(); - } catch (ParserConfigurationException ex) { - logger.warning("Unable to update sitemap! ParserConfigurationException: " + ex.getLocalizedMessage()); + // All sitemap files are in "sitemap" folder, see "getSitemapPathString" method. + // But with pretty-faces configuration, "sitemap.xml" and "sitemap_index.xml" are accessible directly, + // like "https://demo.dataverse.org/sitemap.xml". So "/sitemap/" need to be added on "WebSitemapGenerator" + // in order to have valid URL for sitemap location. + wsg = WebSitemapGenerator.builder(dataverseSiteUrl + "/sitemap/", directory).autoValidate(true).dateFormat(dateFormat) + .build(); + } catch (MalformedURLException e) { + logger.warning(String.format(msgErrorFormat, "Dataverse site URL", dataverseSiteUrl, e.getLocalizedMessage())); return; } - Document document = documentBuilder.newDocument(); - - Element urlSet = document.createElement("urlset"); - urlSet.setAttribute("xmlns", "http://www.sitemaps.org/schemas/sitemap/0.9"); - urlSet.setAttribute("xmlns:xhtml", "http://www.w3.org/1999/xhtml"); - document.appendChild(urlSet); for (Dataverse dataverse : dataverses) { if (!dataverse.isReleased()) { continue; } - Element url = document.createElement("url"); - urlSet.appendChild(url); - - Element loc = document.createElement("loc"); - String dataverseAlias = dataverse.getAlias(); - loc.appendChild(document.createTextNode(SystemConfig.getDataverseSiteUrlStatic() + "/dataverse/" + dataverseAlias)); - url.appendChild(loc); - - Element lastmod = document.createElement("lastmod"); - lastmod.appendChild(document.createTextNode(getLastModDate(dataverse))); - url.appendChild(lastmod); + final String dvAlias = dataverse.getAlias(); + final String dataverseUrl = dataverseSiteUrl + "/dataverse/" + dvAlias; + final String lastModDate = getLastModDate(dataverse); + try { + final WebSitemapUrl url = new WebSitemapUrl.Options(dataverseUrl).lastMod(lastModDate).build(); + wsg.addUrl(url); + } catch (MalformedURLException e) { + logger.fine(String.format(msgErrorFormat, "dataverse URL", dataverseUrl, e.getLocalizedMessage())); + } catch (ParseException e) { + logger.fine(String.format(msgErrorW3CFormat, lastModDate, "dataverse alias " + dvAlias, e.getLocalizedMessage())); + } } for (Dataset dataset : datasets) { - if (!dataset.isReleased()) { - continue; - } - if (dataset.isHarvested()) { - continue; - } // The deaccessioned check is last because it has to iterate through dataset versions. - if (dataset.isDeaccessioned()) { + if (!dataset.isReleased() || dataset.isHarvested() || dataset.isDeaccessioned()) { continue; } - Element url = document.createElement("url"); - urlSet.appendChild(url); - - Element loc = document.createElement("loc"); - String datasetPid = dataset.getGlobalId().asString(); - loc.appendChild(document.createTextNode(SystemConfig.getDataverseSiteUrlStatic() + "/dataset.xhtml?persistentId=" + datasetPid)); - url.appendChild(loc); - - Element lastmod = document.createElement("lastmod"); - lastmod.appendChild(document.createTextNode(getLastModDate(dataset))); - url.appendChild(lastmod); - } - - TransformerFactory transformerFactory = TransformerFactory.newInstance(); - Transformer transformer = null; - try { - transformer = transformerFactory.newTransformer(); - } catch (TransformerConfigurationException ex) { - logger.warning("Unable to update sitemap! TransformerConfigurationException: " + ex.getLocalizedMessage()); - return; - } - transformer.setOutputProperty(OutputKeys.INDENT, "yes"); - transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); - DOMSource source = new DOMSource(document); - File directory = new File(sitemapPathString); - if (!directory.exists()) { - directory.mkdir(); - } - - boolean debug = false; - if (debug) { - logger.info("Writing sitemap to console/logs"); - StreamResult consoleResult = new StreamResult(System.out); + final String datasetPid = dataset.getGlobalId().asString(); + final String datasetUrl = dataverseSiteUrl + "/dataset.xhtml?persistentId=" + datasetPid; + final String lastModDate = getLastModDate(dataset); try { - transformer.transform(source, consoleResult); - } catch (TransformerException ex) { - logger.warning("Unable to print sitemap to the console: " + ex.getLocalizedMessage()); + final WebSitemapUrl url = new WebSitemapUrl.Options(datasetUrl).lastMod(lastModDate).build(); + wsg.addUrl(url); + } catch (MalformedURLException e) { + logger.fine(String.format(msgErrorFormat, "dataset URL", datasetUrl, e.getLocalizedMessage())); + } catch (ParseException e) { + logger.fine(String.format(msgErrorW3CFormat, lastModDate, "dataset " + datasetPid, e.getLocalizedMessage())); } } - logger.info("Writing staged sitemap to " + stagedSitemapPathAndFileString); - StreamResult result = new StreamResult(new File(stagedSitemapPathAndFileString)); - try { - transformer.transform(source, result); - } catch (TransformerException ex) { - logger.warning("Unable to update sitemap! Unable to write staged sitemap to " + stagedSitemapPathAndFileString + ". TransformerException: " + ex.getLocalizedMessage()); - return; - } - - logger.info("Checking staged sitemap for well-formedness. The staged file is " + stagedSitemapPathAndFileString); + logger.info(String.format("Writing and checking sitemap file into %s", sitemapPathString)); try { - XmlValidator.validateXmlWellFormed(stagedSitemapPathAndFileString); + wsg.write(); + if (dataverses.size() + datasets.size() > SITEMAP_LIMIT) { + wsg.writeSitemapsWithIndex(); + } } catch (Exception ex) { - logger.warning("Unable to update sitemap! Staged sitemap file is not well-formed XML! The exception for " + stagedSitemapPathAndFileString + " is " + ex.getLocalizedMessage()); - return; - } - - logger.info("Checking staged sitemap against XML schema. The staged file is " + stagedSitemapPathAndFileString); - URL schemaUrl = null; - try { - schemaUrl = new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"); - } catch (MalformedURLException ex) { - // This URL is hard coded and it's fine. We should never get MalformedURLException so we just swallow the exception and carry on. - } - try { - XmlValidator.validateXmlSchema(stagedSitemapPathAndFileString, schemaUrl); - } catch (SAXException | IOException ex) { - logger.warning("Unable to update sitemap! Exception caught while checking XML staged file (" + stagedSitemapPathAndFileString + " ) against XML schema: " + ex.getLocalizedMessage()); + final StringBuffer errorMsg = new StringBuffer("Unable to write or validate sitemap ! The exception is "); + errorMsg.append(ex.getLocalizedMessage()); + // Add causes messages exception + Throwable cause = ex.getCause(); + // Fix limit to 5 causes + final int causeLimit = 5; + int cpt = 0; + while (cause != null && cpt < causeLimit) { + errorMsg.append(" with cause ").append(cause.getLocalizedMessage()); + cause = ex.getCause(); + cpt = cpt + 1; + } + logger.warning(errorMsg.toString()); return; } - Path finalPath = Paths.get(finalSitemapPathAndFileString); - logger.info("Copying staged sitemap from " + stagedSitemapPathAndFileString + " to " + finalSitemapPathAndFileString); + logger.info(String.format("Remove staged sitemap %s", stagedSitemapPathAndFileString)); try { - Files.move(stagedPath, finalPath, StandardCopyOption.REPLACE_EXISTING); + Files.deleteIfExists(stagedSitemapPath); } catch (IOException ex) { - logger.warning("Unable to update sitemap! Unable to copy staged sitemap from " + stagedSitemapPathAndFileString + " to " + finalSitemapPathAndFileString + ". IOException: " + ex.getLocalizedMessage()); + logger.warning("Unable to delete sitemap staged file! IOException: " + ex.getLocalizedMessage()); return; } @@ -199,12 +145,11 @@ private static String getLastModDate(DvObjectContainer dvObjectContainer) { // TODO: Decide if YYYY-MM-DD is enough. https://www.sitemaps.org/protocol.html // says "The date of last modification of the file. This date should be in W3C Datetime format. // This format allows you to omit the time portion, if desired, and use YYYY-MM-DD." - return new SimpleDateFormat("yyyy-MM-dd").format(dvObjectContainer.getModificationTime()); + return dvObjectContainer.getModificationTime().toLocalDateTime().format(formatter); } public static boolean stageFileExists() { - String sitemapPathString = getSitemapPathString(); - String stagedSitemapPathAndFileString = sitemapPathString + File.separator + SITEMAP_FILENAME_STAGED; + String stagedSitemapPathAndFileString = getSitemapPathString() + File.separator + SITEMAP_FILENAME_STAGED; Path stagedPath = Paths.get(stagedSitemapPathAndFileString); if (Files.exists(stagedPath)) { logger.warning("Unable to update sitemap! The staged file from a previous run already existed. Delete " + stagedSitemapPathAndFileString + " and try again."); @@ -212,7 +157,7 @@ public static boolean stageFileExists() { } return false; } - + /** * Lookup the location where to generate the sitemap. * @@ -223,6 +168,6 @@ public static boolean stageFileExists() { */ private static String getSitemapPathString() { return JvmSettings.DOCROOT_DIRECTORY.lookup() + File.separator + "sitemap"; - } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 8decf74fe13..6c427672e6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -21,14 +21,8 @@ package edu.harvard.iq.dataverse.util; -import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DataFile.ChecksumType; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Embargo; -import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; @@ -86,6 +80,7 @@ import java.util.HashMap; import java.util.List; import java.util.Optional; +import java.util.ResourceBundle; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; @@ -182,6 +177,7 @@ public class FileUtil implements java.io.Serializable { public static final String MIME_TYPE_NETCDF = "application/netcdf"; public static final String MIME_TYPE_XNETCDF = "application/x-netcdf"; public static final String MIME_TYPE_HDF5 = "application/x-hdf5"; + public static final String MIME_TYPE_RO_CRATE = "application/ld+json; profile=\"http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate\""; // File type "thumbnail classes" tags: @@ -278,6 +274,11 @@ public static String getUserFriendlyFileType(DataFile dataFile) { if (fileType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)){ return ShapefileHandler.SHAPEFILE_FILE_TYPE_FRIENDLY_NAME; } + try { + return BundleUtil.getStringFromPropertyFile(fileType,"MimeTypeDisplay" ); + } catch (MissingResourceException e) { + //NOOP: we will try again after trimming ";" + } if (fileType.contains(";")) { fileType = fileType.substring(0, fileType.indexOf(";")); } @@ -292,6 +293,11 @@ public static String getUserFriendlyFileType(DataFile dataFile) { } public static String getIndexableFacetFileType(DataFile dataFile) { + try { + return BundleUtil.getStringFromDefaultPropertyFile(dataFile.getContentType(),"MimeTypeFacets" ); + } catch (MissingResourceException e) { + //NOOP: we will try again after trimming ";" + } String fileType = getFileType(dataFile); try { return BundleUtil.getStringFromDefaultPropertyFile(fileType,"MimeTypeFacets" ); @@ -421,7 +427,10 @@ public static String retestIngestableFileType(File file, String fileType) { } public static String determineFileType(File f, String fileName) throws IOException{ - String fileType = null; + String fileType = lookupFileTypeByFileName(fileName); + if (fileType != null) { + return fileType; + } String fileExtension = getFileExtension(fileName); @@ -480,17 +489,17 @@ public static String determineFileType(File f, String fileName) throws IOExcepti if (fileType != null && fileType.startsWith("text/plain") && STATISTICAL_FILE_EXTENSION.containsKey(fileExtension)) { fileType = STATISTICAL_FILE_EXTENSION.get(fileExtension); } else { - fileType = determineFileTypeByNameAndExtension(fileName); + fileType = lookupFileTypeByExtension(fileName); } logger.fine("mime type recognized by extension: "+fileType); } } else { logger.fine("fileExtension is null"); - String fileTypeByName = lookupFileTypeFromPropertiesFile(fileName); - if(!StringUtil.isEmpty(fileTypeByName)) { - logger.fine(String.format("mime type: %s recognized by filename: %s", fileTypeByName, fileName)); - fileType = fileTypeByName; + final String fileTypeByExtension = lookupFileTypeByExtensionFromPropertiesFile(fileName); + if(!StringUtil.isEmpty(fileTypeByExtension)) { + logger.fine(String.format("mime type: %s recognized by extension: %s", fileTypeByExtension, fileName)); + fileType = fileTypeByExtension; } } @@ -501,24 +510,15 @@ public static String determineFileType(File f, String fileName) throws IOExcepti if ("application/x-gzip".equals(fileType)) { logger.fine("we'll run additional checks on this gzipped file."); - // We want to be able to support gzipped FITS files, same way as - // if they were just regular FITS files: - FileInputStream gzippedIn = new FileInputStream(f); - // (new FileInputStream() can throw a "filen not found" exception; - // however, if we've made it this far, it really means that the - // file does exist and can be opened) - InputStream uncompressedIn = null; - try { - uncompressedIn = new GZIPInputStream(gzippedIn); + try (FileInputStream gzippedIn = new FileInputStream(f); + InputStream uncompressedIn = new GZIPInputStream(gzippedIn)) { if (isFITSFile(uncompressedIn)) { fileType = "application/fits-gzipped"; } } catch (IOException ioex) { - if (uncompressedIn != null) { - try {uncompressedIn.close();} catch (IOException e) {} - } + logger.warning("IOException while processing gzipped FITS file: " + ioex.getMessage()); } - } + } if ("application/zip".equals(fileType)) { // Is this a zipped Shapefile? @@ -544,33 +544,41 @@ public static String determineFileType(File f, String fileName) throws IOExcepti return fileType; } - public static String determineFileTypeByNameAndExtension(String fileName) { - String mimetypesFileTypeMapResult = MIME_TYPE_MAP.getContentType(fileName); + public static String determineFileTypeByNameAndExtension(final String fileName) { + final String fileType = lookupFileTypeByFileName(fileName); + if (fileType != null) { + return fileType; + } + return lookupFileTypeByExtension(fileName); + } + + private static String lookupFileTypeByExtension(final String fileName) { + final String mimetypesFileTypeMapResult = MIME_TYPE_MAP.getContentType(fileName); logger.fine("MimetypesFileTypeMap type by extension, for " + fileName + ": " + mimetypesFileTypeMapResult); - if (mimetypesFileTypeMapResult != null) { - if ("application/octet-stream".equals(mimetypesFileTypeMapResult)) { - return lookupFileTypeFromPropertiesFile(fileName); - } else { - return mimetypesFileTypeMapResult; - } - } else { + if (mimetypesFileTypeMapResult == null) { return null; } + if ("application/octet-stream".equals(mimetypesFileTypeMapResult)) { + return lookupFileTypeByExtensionFromPropertiesFile(fileName); + } + return mimetypesFileTypeMapResult; } - public static String lookupFileTypeFromPropertiesFile(String fileName) { - String fileKey = FilenameUtils.getExtension(fileName); - String propertyFileName = "MimeTypeDetectionByFileExtension"; - if(fileKey == null || fileKey.isEmpty()) { - fileKey = fileName; - propertyFileName = "MimeTypeDetectionByFileName"; + private static String lookupFileTypeByFileName(final String fileName) { + return lookupFileTypeFromPropertiesFile("MimeTypeDetectionByFileName", fileName); + } - } - String propertyFileNameOnDisk = propertyFileName + ".properties"; + private static String lookupFileTypeByExtensionFromPropertiesFile(final String fileName) { + final String fileKey = FilenameUtils.getExtension(fileName); + return lookupFileTypeFromPropertiesFile("MimeTypeDetectionByFileExtension", fileKey); + } + + private static String lookupFileTypeFromPropertiesFile(final String propertyFileName, final String fileKey) { + final String propertyFileNameOnDisk = propertyFileName + ".properties"; try { logger.fine("checking " + propertyFileNameOnDisk + " for file key " + fileKey); return BundleUtil.getStringFromPropertyFile(fileKey, propertyFileName); - } catch (MissingResourceException ex) { + } catch (final MissingResourceException ex) { logger.info(fileKey + " is a filename/extension Dataverse doesn't know about. Consider adding it to the " + propertyFileNameOnDisk + " file."); return null; } @@ -825,7 +833,8 @@ public static boolean useRecognizedType(String suppliedContentType, String recog || canIngestAsTabular(recognizedType) || recognizedType.equals("application/fits-gzipped") || recognizedType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE) || recognizedType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE) - || recognizedType.equals(MIME_TYPE_ZIP)) { + || recognizedType.equals(MIME_TYPE_ZIP) + || recognizedType.equals(MIME_TYPE_RO_CRATE)) { return true; } return false; @@ -1223,6 +1232,9 @@ public static boolean isPubliclyDownloadable(FileMetadata fileMetadata) { if (isActivelyEmbargoed(fileMetadata)) { return false; } + if (isRetentionExpired(fileMetadata)) { + return false; + } boolean popupReasons = isDownloadPopupRequired(fileMetadata.getDatasetVersion()); if (popupReasons == true) { /** @@ -1776,6 +1788,29 @@ public static boolean isActivelyEmbargoed(List fmdList) { return false; } + public static boolean isRetentionExpired(DataFile df) { + Retention e = df.getRetention(); + if (e != null) { + LocalDate endDate = e.getDateUnavailable(); + if (endDate != null && endDate.isBefore(LocalDate.now())) { + return true; + } + } + return false; + } + + public static boolean isRetentionExpired(FileMetadata fileMetadata) { + return isRetentionExpired(fileMetadata.getDataFile()); + } + + public static boolean isRetentionExpired(List fmdList) { + for (FileMetadata fmd : fmdList) { + if (isRetentionExpired(fmd)) { + return true; + } + } + return false; + } public static String getStorageDriver(DataFile dataFile) { String storageIdentifier = dataFile.getStorageIdentifier(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index ccec3e5f09b..36c249de834 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -35,7 +35,10 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti case CREATEDV: return BundleUtil.getStringFromBundle("notification.email.create.dataverse.subject", rootDvNameAsList); case REQUESTFILEACCESS: - return BundleUtil.getStringFromBundle("notification.email.request.file.access.subject", Arrays.asList(rootDvNameAsList.get(0), datasetDisplayName)); + String userNameFirst = userNotification.getRequestor().getFirstName(); + String userNameLast = userNotification.getRequestor().getLastName(); + String userIdentifier = userNotification.getRequestor().getIdentifier(); + return BundleUtil.getStringFromBundle("notification.email.request.file.access.subject", Arrays.asList(rootDvNameAsList.get(0), userNameFirst, userNameLast, userIdentifier, datasetDisplayName)); case REQUESTEDFILEACCESS: return BundleUtil.getStringFromBundle("notification.email.requested.file.access.subject", Arrays.asList(rootDvNameAsList.get(0), datasetDisplayName)); case GRANTFILEACCESS: diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 3f2f36ea36a..f9801419e47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -752,6 +752,7 @@ public enum FileUploadMethods { * DCM stands for Data Capture Module. Right now it supports upload over * rsync+ssh but DCM may support additional methods in the future. */ + @Deprecated(forRemoval = true, since = "2024-07-07") RSYNC("dcm/rsync+ssh"), /** * Traditional Dataverse file handling, which tends to involve users @@ -809,6 +810,7 @@ public enum FileDownloadMethods { * RSAL stands for Repository Storage Abstraction Layer. Downloads don't * go through Glassfish. */ + @Deprecated(forRemoval = true, since = "2024-07-07") RSYNC("rsal/rsync"), NATIVE("native/http"), GLOBUS("globus") @@ -862,6 +864,7 @@ public String toString() { */ public enum TransferProtocols { + @Deprecated(forRemoval = true, since = "2024-07-07") RSYNC("rsync"), /** * POSIX includes NFS. This is related to Key.LocalDataAccessPath in @@ -898,7 +901,8 @@ public boolean isPublicInstall(){ boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); } - + + @Deprecated(forRemoval = true, since = "2024-07-07") public boolean isRsyncUpload(){ return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), true); } @@ -915,7 +919,8 @@ public boolean isWebloaderUpload(){ public boolean isHTTPUpload(){ return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), true); } - + + @Deprecated(forRemoval = true, since = "2024-07-07") public boolean isRsyncOnly(){ String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); if(downloadMethods == null){ @@ -931,11 +936,12 @@ public boolean isRsyncOnly(){ return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); } } - + + @Deprecated(forRemoval = true, since = "2024-07-07") public boolean isRsyncDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), false); } - + public boolean isHTTPDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), false); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java index a3293e0cd28..90557a530c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.util; import java.util.Arrays; +import java.util.Random; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -261,7 +262,9 @@ public JsonObject getParams(JsonObject toolParameters) { public static String getScriptForUrl(String url) { String msg = BundleUtil.getStringFromBundle("externaltools.enable.browser.popups"); - String script = "const newWin = window.open('" + url + "', target='_blank'); if (!newWin || newWin.closed || typeof newWin.closed == \"undefined\") {alert(\"" + msg + "\");}"; + String newWin = "newWin" + (new Random()).nextInt(1000000000); + //Always use a unique identifier so that more than one script can run (or one can be rerun) without conflicts + String script = String.format("const %1$s = window.open('" + url + "', target='_blank'); if (!%1$s || %1$s.closed || typeof %1$s.closed == \"undefined\") {alert(\"" + msg + "\");}", newWin); return script; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index aa653a6e360..84bc7834ab9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -1,19 +1,7 @@ package edu.harvard.iq.dataverse.util.bagit; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; -import edu.harvard.iq.dataverse.DatasetFieldConstant; -import edu.harvard.iq.dataverse.DatasetFieldServiceBean; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DvObjectContainer; -import edu.harvard.iq.dataverse.Embargo; -import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.export.OAI_OREExporter; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -236,6 +224,17 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) { } aggRes.add(JsonLDTerm.DVCore("embargoed").getLabel(), embargoObject); } + Retention retention = df.getRetention(); + if(retention!=null) { + String date = retention.getFormattedDateUnavailable(); + String reason= retention.getReason(); + JsonObjectBuilder retentionObject = Json.createObjectBuilder(); + retentionObject.add(JsonLDTerm.DVCore("dateUnavailable").getLabel(), date); + if(reason!=null) { + retentionObject.add(JsonLDTerm.DVCore("reason").getLabel(), reason); + } + aggRes.add(JsonLDTerm.DVCore("retained").getLabel(), retentionObject); + } addIfNotNull(aggRes, JsonLDTerm.directoryLabel, fmd.getDirectoryLabel()); addIfNotNull(aggRes, JsonLDTerm.schemaOrg("version"), fmd.getVersion()); addIfNotNull(aggRes, JsonLDTerm.datasetVersionId, fmd.getDatasetVersion().getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java index 3fcaf6b11ff..c16a46a1765 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java @@ -28,6 +28,7 @@ public JsonObjectBuilder json( MetadataBlock blk ) { ? null : jsonObjectBuilder().add("id", blk.getId()) .add("displayName", blk.getDisplayName()) + .add("displayOnCreate", blk.isDisplayOnCreate()) .add("name", blk.getName()) ; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 637f002f5ad..52491a5a7e1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -466,7 +466,6 @@ private static void addField(DatasetField dsf, JsonArray valArray, DatasetFieldT if(!datasetFieldSvc.isValidCVocValue(dsft, strValue)) { throw new BadRequestException("Invalid values submitted for " + dsft.getName() + " which is limited to specific vocabularies."); } - datasetFieldSvc.registerExternalTerm(cvocMap.get(dsft.getId()), strValue); } DatasetFieldValue datasetFieldValue = new DatasetFieldValue(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index a0bd2fff295..addccc93fe0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -847,7 +847,6 @@ public void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObj if(!datasetFieldSvc.isValidCVocValue(dft, datasetFieldValue.getValue())) { throw new JsonParseException("Invalid values submitted for " + dft.getName() + " which is limited to specific vocabularies."); } - datasetFieldSvc.registerExternalTerm(cvocMap.get(dft.getId()), datasetFieldValue.getValue()); } vals.add(datasetFieldValue); } @@ -864,7 +863,6 @@ public void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObj if(!datasetFieldSvc.isValidCVocValue(dft, datasetFieldValue.getValue())) { throw new JsonParseException("Invalid values submitted for " + dft.getName() + " which is limited to specific vocabularies."); } - datasetFieldSvc.registerExternalTerm(cvocMap.get(dft.getId()), datasetFieldValue.getValue()); } vals.add(datasetFieldValue); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 005ae2f2892..c72dfc1d127 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -273,7 +273,7 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re } if (returnOwners){ bld.add("isPartOf", getOwnersFromDvObject(dv)); - } + } bld.add("permissionRoot", dv.isPermissionRoot()) .add("description", dv.getDescription()) .add("dataverseType", dv.getDataverseType().name()); @@ -292,6 +292,12 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re if (dv.getFilePIDsEnabled() != null) { bld.add("filePIDsEnabled", dv.getFilePIDsEnabled()); } + bld.add("isReleased", dv.isReleased()); + + List inputLevels = dv.getDataverseFieldTypeInputLevels(); + if(!inputLevels.isEmpty()) { + bld.add("inputLevels", JsonPrinter.jsonDataverseFieldTypeInputLevels(inputLevels)); + } return bld; } @@ -415,18 +421,25 @@ public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { return json(dsv, null, includeFiles, false); } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles, boolean returnOwners) { + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, + boolean includeFiles, boolean returnOwners) { Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dataset.getId()) .add("datasetPersistentId", dataset.getGlobalId().asString()) .add("storageIdentifier", dataset.getStorageIdentifier()) - .add("versionNumber", dsv.getVersionNumber()).add("versionMinorNumber", dsv.getMinorVersionNumber()) - .add("versionState", dsv.getVersionState().name()).add("versionNote", dsv.getVersionNote()) - .add("archiveNote", dsv.getArchiveNote()).add("deaccessionLink", dsv.getDeaccessionLink()) - .add("distributionDate", dsv.getDistributionDate()).add("productionDate", dsv.getProductionDate()) + .add("versionNumber", dsv.getVersionNumber()) + .add("versionMinorNumber", dsv.getMinorVersionNumber()) + .add("versionState", dsv.getVersionState().name()) + .add("latestVersionPublishingState", dataset.getLatestVersion().getVersionState().name()) + .add("versionNote", dsv.getVersionNote()) + .add("archiveNote", dsv.getArchiveNote()) + .add("deaccessionLink", dsv.getDeaccessionLink()) + .add("distributionDate", dsv.getDistributionDate()) + .add("productionDate", dsv.getProductionDate()) .add("UNF", dsv.getUNF()).add("archiveTime", format(dsv.getArchiveTime())) - .add("lastUpdateTime", format(dsv.getLastUpdateTime())).add("releaseTime", format(dsv.getReleaseTime())) + .add("lastUpdateTime", format(dsv.getLastUpdateTime())) + .add("releaseTime", format(dsv.getReleaseTime())) .add("createTime", format(dsv.getCreateTime())) .add("alternativePersistentId", dataset.getAlternativePersistentIdentifier()) .add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD()) @@ -580,6 +593,18 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie return blockBld; } + public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes) { + return json(metadataBlocks, returnDatasetFieldTypes, printOnlyDisplayedOnCreateDatasetFieldTypes, null); + } + + public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse) { + JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); + for (MetadataBlock metadataBlock : metadataBlocks) { + arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, printOnlyDisplayedOnCreateDatasetFieldTypes, ownerDataverse) : brief.json(metadataBlock)); + } + return arrayBuilder; + } + public static String typeClassString(DatasetFieldType typ) { if (typ.isControlledVocabulary()) { return "controlledVocabulary"; @@ -602,26 +627,51 @@ public static JsonObject json(DatasetField dfv) { } } - public static JsonObjectBuilder json(MetadataBlock blk) { - JsonObjectBuilder bld = jsonObjectBuilder(); - bld.add("id", blk.getId()); - bld.add("name", blk.getName()); - bld.add("displayName", blk.getDisplayName()); + public static JsonObjectBuilder json(MetadataBlock metadataBlock) { + return json(metadataBlock, false, null); + } - JsonObjectBuilder fieldsBld = jsonObjectBuilder(); - for (DatasetFieldType df : new TreeSet<>(blk.getDatasetFieldTypes())) { - fieldsBld.add(df.getName(), JsonPrinter.json(df)); - } + public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse) { + JsonObjectBuilder jsonObjectBuilder = jsonObjectBuilder(); + jsonObjectBuilder.add("id", metadataBlock.getId()); + jsonObjectBuilder.add("name", metadataBlock.getName()); + jsonObjectBuilder.add("displayName", metadataBlock.getDisplayName()); + jsonObjectBuilder.add("displayOnCreate", metadataBlock.isDisplayOnCreate()); - bld.add("fields", fieldsBld); + JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder(); + Set datasetFieldTypes = new TreeSet<>(metadataBlock.getDatasetFieldTypes()); - return bld; + for (DatasetFieldType datasetFieldType : datasetFieldTypes) { + Long datasetFieldTypeId = datasetFieldType.getId(); + boolean requiredAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(datasetFieldTypeId); + boolean includedAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeIncludedAsInputLevel(datasetFieldTypeId); + boolean isNotInputLevelInOwnerDataverse = ownerDataverse != null && !ownerDataverse.isDatasetFieldTypeInInputLevels(datasetFieldTypeId); + + DatasetFieldType parentDatasetFieldType = datasetFieldType.getParentDatasetFieldType(); + boolean isRequired = parentDatasetFieldType == null ? datasetFieldType.isRequired() : parentDatasetFieldType.isRequired(); + + boolean displayCondition = printOnlyDisplayedOnCreateDatasetFieldTypes + ? (datasetFieldType.isDisplayOnCreate() || isRequired || requiredAsInputLevelInOwnerDataverse) + : ownerDataverse == null || includedAsInputLevelInOwnerDataverse || isNotInputLevelInOwnerDataverse; + + if (displayCondition) { + fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse)); + } + } + + jsonObjectBuilder.add("fields", fieldsBuilder); + return jsonObjectBuilder; } public static JsonObjectBuilder json(DatasetFieldType fld) { + return json(fld, null); + } + + public static JsonObjectBuilder json(DatasetFieldType fld, Dataverse ownerDataverse) { JsonObjectBuilder fieldsBld = jsonObjectBuilder(); fieldsBld.add("name", fld.getName()); fieldsBld.add("displayName", fld.getDisplayName()); + fieldsBld.add("displayOnCreate", fld.isDisplayOnCreate()); fieldsBld.add("title", fld.getTitle()); fieldsBld.add("type", fld.getFieldType().toString()); fieldsBld.add("typeClass", typeClassString(fld)); @@ -630,8 +680,11 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { fieldsBld.add("multiple", fld.isAllowMultiples()); fieldsBld.add("isControlledVocabulary", fld.isControlledVocabulary()); fieldsBld.add("displayFormat", fld.getDisplayFormat()); - fieldsBld.add("isRequired", fld.isRequired()); fieldsBld.add("displayOrder", fld.getDisplayOrder()); + + boolean requiredInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(fld.getId()); + fieldsBld.add("isRequired", requiredInOwnerDataverse || fld.isRequired()); + if (fld.isControlledVocabulary()) { // If the field has a controlled vocabulary, // add all values to the resulting JSON @@ -641,10 +694,11 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { } fieldsBld.add("controlledVocabularyValues", jab); } + if (!fld.getChildDatasetFieldTypes().isEmpty()) { JsonObjectBuilder subFieldsBld = jsonObjectBuilder(); for (DatasetFieldType subFld : fld.getChildDatasetFieldTypes()) { - subFieldsBld.add(subFld.getName(), JsonPrinter.json(subFld)); + subFieldsBld.add(subFld.getName(), JsonPrinter.json(subFld, ownerDataverse)); } fieldsBld.add("childFields", subFieldsBld); } @@ -726,6 +780,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo String pidString = (filePid!=null)? filePid.asString(): ""; JsonObjectBuilder embargo = df.getEmbargo() != null ? JsonPrinter.json(df.getEmbargo()) : null; + JsonObjectBuilder retention = df.getRetention() != null ? JsonPrinter.json(df.getRetention()) : null; NullSafeJsonBuilder builder = jsonObjectBuilder() .add("id", df.getId()) @@ -738,6 +793,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("description", fileMetadata.getDescription()) .add("categories", getFileCategories(fileMetadata)) .add("embargo", embargo) + .add("retention", retention) //.add("released", df.isReleased()) .add("storageIdentifier", df.getStorageIdentifier()) .add("originalFileFormat", df.getOriginalFileFormat()) @@ -1143,6 +1199,11 @@ public static JsonObjectBuilder json(Embargo embargo) { embargo.getReason()); } + public static JsonObjectBuilder json(Retention retention) { + return jsonObjectBuilder().add("dateUnavailable", retention.getDateUnavailable().toString()).add("reason", + retention.getReason()); + } + public static JsonObjectBuilder json(License license) { return jsonObjectBuilder() .add("id", license.getId()) @@ -1311,4 +1372,16 @@ private static JsonObjectBuilder jsonLicense(DatasetVersion dsv) { } return licenseJsonObjectBuilder; } + + public static JsonArrayBuilder jsonDataverseFieldTypeInputLevels(List inputLevels) { + JsonArrayBuilder jsonArrayOfInputLevels = Json.createArrayBuilder(); + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + NullSafeJsonBuilder inputLevelJsonObject = NullSafeJsonBuilder.jsonObjectBuilder(); + inputLevelJsonObject.add("datasetFieldTypeName", inputLevel.getDatasetFieldType().getName()); + inputLevelJsonObject.add("required", inputLevel.isRequired()); + inputLevelJsonObject.add("include", inputLevel.isInclude()); + jsonArrayOfInputLevels.add(inputLevelJsonObject); + } + return jsonArrayOfInputLevels; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java index 285f34d3f8c..8fde76d84e1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java @@ -41,7 +41,7 @@ public static boolean isURLValid(String value) { * @return true when valid (null is also valid) or false */ public static boolean isURLValid(String value, String[] schemes) { - UrlValidator urlValidator = new UrlValidator(schemes); + UrlValidator urlValidator = new UrlValidator(schemes, UrlValidator.ALLOW_2_SLASHES); return value == null || urlValidator.isValid(value); } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 692ab9e0686..0325a47f626 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -15,6 +15,7 @@ embargoed=Embargoed embargoedaccess=Embargoed with Access embargoedandrestricted=Embargoed and then Restricted embargoedandrestrictedaccess=Embargoed and then Restricted with Access +retentionExpired=Retention Period Expired incomplete=Incomplete metadata valid=Valid find=Find @@ -30,6 +31,12 @@ embargoed.wasthrough=Was embargoed until embargoed.willbeuntil=Draft: will be embargoed until embargo.date.invalid=Date is outside the allowed range: ({0} to {1}) embargo.date.required=An embargo date is required +retention.after=Was retained until +retention.isfrom=Is retained until +retention.willbeafter=Draft: will be retained until +retention.enddateinfo=after which it will no longer be accessible +retention.date.invalid=Date is outside the allowed range: ({0} to {1}) +retention.date.required=A retention period end date is required cancel=Cancel ok=OK saveChanges=Save Changes @@ -750,8 +757,8 @@ dashboard.card.datamove.dataset.command.error.indexingProblem=Dataset could not notification.email.create.dataverse.subject={0}: Your dataverse has been created notification.email.create.dataset.subject={0}: Dataset "{1}" has been created notification.email.dataset.created.subject={0}: Dataset "{1}" has been created -notification.email.request.file.access.subject={0}: Access has been requested for a restricted file in dataset "{1}" -notification.email.requested.file.access.subject={0}: You have requested access to a restricted file in dataset "{1}" +notification.email.request.file.access.subject={0}: {1} {2} ({3}) requested access to dataset "{4}" +notification.email.requested.file.access.subject={0}: You have requested access to a restricted file in dataset "{1}" notification.email.grant.file.access.subject={0}: You have been granted access to a restricted file notification.email.rejected.file.access.subject={0}: Your request for access to a restricted file has been rejected notification.email.submit.dataset.subject={0}: Dataset "{1}" has been submitted for review @@ -940,6 +947,7 @@ dataverse.default=(Default) dataverse.metadatalanguage.setatdatasetcreation=Chosen at Dataset Creation dataverse.guestbookentry.atdownload=Guestbook Entry At Download dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request +dataverse.updateinputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} # rolesAndPermissionsFragment.xhtml # advanced.xhtml @@ -952,18 +960,16 @@ advanced.search.header.datasets=Datasets advanced.search.header.files=Files advanced.search.files.name.tip=The name given to identify the file. advanced.search.files.description.tip=A summary describing the file and its variables. -advanced.search.files.persistentId.tip=The persistent identifier for the file. advanced.search.files.persistentId=Data File Persistent ID -advanced.search.files.persistentId.tip=The unique persistent identifier for a data file, which can be a Handle or DOI in Dataverse. +advanced.search.files.persistentId.tip=The unique persistent identifier for the file. advanced.search.files.fileType=File Type advanced.search.files.fileType.tip=The file type, e.g. Comma Separated Values, Plain Text, R, etc. advanced.search.files.variableName=Variable Name advanced.search.files.variableName.tip=The name of the variable's column in the data frame. advanced.search.files.variableLabel=Variable Label advanced.search.files.variableLabel.tip=A short description of the variable. -advanced.search.datasets.persistentId.tip=The persistent identifier for the Dataset. advanced.search.datasets.persistentId=Persistent Identifier -advanced.search.datasets.persistentId.tip=The Dataset's unique persistent identifier, either a DOI or Handle +advanced.search.datasets.persistentId.tip=The persistent identifier for the Dataset. advanced.search.files.fileTags=File Tags advanced.search.files.fileTags.tip=Terms such "Documentation", "Data", or "Code" that have been applied to files. @@ -1027,7 +1033,7 @@ dataverse.theme.inheritCustomization.title=For this dataverse, use the same them dataverse.theme.inheritCustomization.label=Inherit Theme dataverse.theme.inheritCustomization.checkbox=Inherit theme from {0} dataverse.theme.logo=Logo -dataverse.theme.logo.tip=Supported image types are JPG, TIF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. +dataverse.theme.logo.tip=Supported image types are JPG and PNG, must be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. dataverse.theme.logo.format=Logo Format dataverse.theme.logo.format.selectTab.square=Square dataverse.theme.logo.format.selectTab.rectangle=Rectangle @@ -1392,6 +1398,8 @@ dataset.guestbookResponse.respondent=Respondent dataset.guestbookResponse.question=Q dataset.guestbookResponse.answer=A dataset.guestbookResponse.noResponse=(No Response) +dataset.guestbookResponse.requestor.id=authenticatedUserId +dataset.guestbookResponse.requestor.identifier=authenticatedUserIdentifier # dataset.xhtml @@ -1478,7 +1486,8 @@ dataset.submit.failure.inReview=You cannot submit this dataset for review becaus dataset.status.failure.notallowed=Status update failed - label not allowed dataset.status.failure.disabled=Status labeling disabled for this dataset dataset.status.failure.isReleased=Latest version of dataset is already released. Status can only be set on draft versions -dataset.rejectMessage=Return this dataset to contributor for modification. The reason for return entered below will be sent by email to the author. +dataset.rejectMessage=Return this dataset to contributor for modification. +dataset.rejectMessageReason=The reason for return entered below will be sent by email to the author. dataset.rejectMessage.label=Return to Author Reason dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). dataset.reject.enterReason.error=Reason for return to author is required. @@ -1665,17 +1674,19 @@ dataset.noSelectedFiles=Please select one or more files. dataset.noSelectedFilesForDownload=Please select a file or files to be downloaded. dataset.noSelectedFilesForRequestAccess=Please select a file or files for access request. dataset.embargoedSelectedFilesForRequestAccess=Embargoed files cannot be accessed. Please select an unembargoed file or files for your access request. -dataset.inValidSelectedFilesForDownload=Restricted Files Selected -dataset.inValidSelectedFilesForDownloadWithEmbargo=Embargoed and/or Restricted Files Selected -dataset.noValidSelectedFilesForDownload=The selected file(s) may not be downloaded because you have not been granted access. -dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. -dataset.mixedSelectedFilesForDownloadWithEmbargo=The embargoed and/or restricted file(s) selected may not be downloaded because you have not been granted access. -dataset.mixedSelectedFilesForTransfer=Some file(s) cannot be transferred. (They are restricted, embargoed, or not Globus accessible.) +dataset.inValidSelectedFilesForDownload=Inaccessible Files Selected +dataset.inValidSelectedFilesForDownloadWithEmbargo=Inaccessible Files Selected +dataset.inValidSelectedFilesForTransferWithEmbargo=Inaccessible Files Selected +dataset.noValidSelectedFilesForDownload=The selected file(s) may not be downloaded because you have not been granted access or the file(s) have a retention period that has expired or the files can only be transferred via Globus. +dataset.noValidSelectedFilesForTransfer=The selected file(s) may not be transferred because you have not been granted access or the file(s) have a retention period that has expired or the files are not Globus accessible. +dataset.mixedSelectedFilesForDownload=The selected file(s) may not be downloaded because you have not been granted access or the file(s) have a retention period that has expired. +dataset.mixedSelectedFilesForDownloadWithEmbargo=Any embargoed and/or restricted file(s) selected may not be downloaded because you have not been granted access. Some files may have a retention period that has expired. Some files may only be accessible via Globus. +dataset.mixedSelectedFilesForTransfer=Some file(s) cannot be transferred. (They are restricted, embargoed, with an expired retention period, or not Globus accessible.) dataset.inValidSelectedFilesForTransfer=Ineligible Files Selected dataset.downloadUnrestricted=Click Continue to download the files you have access to download. dataset.transferUnrestricted=Click Continue to transfer the elligible files. -dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button. +dataset.requestAccessToRestrictedFiles=You may request access to any restricted file(s) by clicking the Request Access button. dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button. dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. @@ -1849,6 +1860,18 @@ file.editEmbargoDialog.newReason=Add a reason... file.editEmbargoDialog.newDate=Select the embargo end-date file.editEmbargoDialog.remove=Remove existing embargo(es) on selected files +file.retention=Retention Period +file.editRetention=Edit Retention Period +file.editRetention.add=Add or Change +file.editRetention.delete=Remove +file.editRetentionDialog.tip=Edit the planned retention period for the selected file or files. Once this dataset version is published, you will need to contact an administrator to change the retention period end date or reason of the file or files. \n After the retention period expires the files become unavailable for download. +file.editRetentionDialog.some.tip=One or more of the selected files have already been published. Contact an administrator to change the retention period date or reason of the file or files. +file.editRetentionDialog.none.tip=The selected file or files have already been published. Contact an administrator to change the retention period date or reason of the file or files. +file.editRetentionDialog.partial.tip=Any changes you make here will not be made to these files. +file.editRetentionDialog.reason.tip=Enter a short reason why this retention period exists +file.editRetentionDialog.newReason=Add a reason... +file.editRetentionDialog.newDate=Select the retention period end date +file.editRetentionDialog.remove=Remove existing retention period(s) on selected files file.setThumbnail=Set Thumbnail file.setThumbnail.header=Set Dataset Thumbnail @@ -1861,6 +1884,7 @@ file.advancedIngestOptions=Advanced Ingest Options file.assignedDataverseImage.success={0} has been saved as the thumbnail for this dataset. file.assignedTabFileTags.success=The tags were successfully added for {0}. file.assignedEmbargo.success=An Embargo was successfully added for {0}. +file.assignedRetention.success=A Retention Period was successfully added for {0}. file.tabularDataTags=Tabular Data Tags file.tabularDataTags.tip=Select a tag to describe the type(s) of data this is (survey, time series, geospatial, etc). file.spss-savEncoding=Language Encoding @@ -2119,7 +2143,7 @@ dataset.thumbnailsAndWidget.thumbnails.title=Thumbnail dataset.thumbnailsAndWidget.widgets.title=Widgets dataset.thumbnailsAndWidget.thumbnailImage=Thumbnail Image dataset.thumbnailsAndWidget.thumbnailImage.title=The logo or image file you wish to display as the thumbnail of this dataset. -dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG, TIF, or PNG and should be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 48 pixels wide by 48 pixels high. +dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG and PNG, must be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 140 pixels wide by 140 pixels high. dataset.thumbnailsAndWidget.thumbnailImage.default=Default Icon dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Select Available File dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Select Thumbnail @@ -2180,6 +2204,8 @@ file.metadataTab.fileMetadata.type.label=Type file.metadataTab.fileMetadata.description.label=Description file.metadataTab.fileMetadata.publicationDate.label=Publication Date file.metadataTab.fileMetadata.embargoReason.label=Embargo Reason +file.metadataTab.fileMetadata.retentionDate.label=Retention End Date +file.metadataTab.fileMetadata.retentionReason.label=Retention Reason file.metadataTab.fileMetadata.metadataReleaseDate.label=Metadata Release Date file.metadataTab.fileMetadata.depositDate.label=Deposit Date file.metadataTab.fileMetadata.hierarchy.label=File Path @@ -2313,20 +2339,6 @@ citationFrame.banner.closeIcon=Close this message, go to dataset citationFrame.banner.countdownMessage= This message will close in citationFrame.banner.countdownMessage.seconds=seconds -# Friendly AuthenticationProvider names -authenticationProvider.name.builtin=Dataverse -authenticationProvider.name.null=(provider is unknown) -authenticationProvider.name.github=GitHub -authenticationProvider.name.google=Google -authenticationProvider.name.orcid=ORCiD -authenticationProvider.name.orcid-sandbox=ORCiD Sandbox -authenticationProvider.name.shib=Shibboleth -ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. -ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. -ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. -ingest.csv.nullStream=Stream can't be null. -citationFrame.banner.countdownMessage.seconds=seconds - #file-edit-popup-fragment.xhtml #editFilesFragment.xhtml dataset.access.accessHeader=Restrict Access dataset.access.accessHeader.invalid.state=Define Data Access @@ -2669,6 +2681,7 @@ datasets.api.datasize.ioerror=Fatal IO error while trying to determine the total datasets.api.grant.role.not.found.error=Cannot find role named ''{0}'' in dataverse {1} datasets.api.grant.role.cant.create.assignment.error=Cannot create assignment: {0} datasets.api.grant.role.assignee.not.found.error=Assignee not found +datasets.api.grant.role.assignee.has.role.error=User already has this role for this dataset datasets.api.revoke.role.not.found.error="Role assignment {0} not found" datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2} datasets.api.privateurl.error.datasetnotfound=Could not find dataset. @@ -2686,6 +2699,17 @@ datasets.api.globusdownloaddisabled=File transfer from Dataverse via Globus is n datasets.api.globusdownloadnotfound=List of files to transfer not found. datasets.api.globusuploaddisabled=File transfer to Dataverse via Globus is not available for this dataset. datasets.api.pidgenerator.notfound=No PID Generator configured for the give id. +datasets.api.thumbnail.fileToLarge=File is larger than maximum size: {0} +datasets.api.thumbnail.nonDatasetFailed=In setNonDatasetFileAsThumbnail could not generate thumbnail from uploaded file. +datasets.api.thumbnail.notDeleted=User wanted to remove the thumbnail it still has one! +datasets.api.thumbnail.actionNotSupported=Whatever you are trying to do to the dataset thumbnail is not supported. +datasets.api.thumbnail.nonDatasetsFileIsNull=In setNonDatasetFileAsThumbnail uploadedFile was null. +datasets.api.thumbnail.inputStreamToFile.exception=In setNonDatasetFileAsThumbnail caught exception calling inputStreamToFile: {0} +datasets.api.thumbnail.missing=Dataset thumbnail is unexpectedly absent. +datasets.api.thumbnail.basedOnWrongFileId=Dataset thumbnail should be based on file id {0} but instead it is {1} +datasets.api.thumbnail.fileNotFound=Could not find file based on id supplied: {0} +datasets.api.thumbnail.fileNotSupplied=A file was not selected to be the new dataset thumbnail. +datasets.api.thumbnail.noChange=No changes to save. #Dataverses.java dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found. @@ -2698,6 +2722,7 @@ dataverses.api.move.dataverse.failure.not.published=Published dataverse may not dataverses.api.move.dataverse.error.guestbook=Dataset guestbook is not in target dataverse. dataverses.api.move.dataverse.error.template=Dataverse template is not in target dataverse. dataverses.api.move.dataverse.error.featured=Dataverse is featured in current dataverse. +dataverses.api.delete.featured.collections.successful=Featured dataverses have been removed dataverses.api.move.dataverse.error.metadataBlock=Dataverse metadata block is not in target dataverse. dataverses.api.move.dataverse.error.dataverseLink=Dataverse is linked to target dataverse or one of its parents. dataverses.api.move.dataverse.error.datasetLink=Dataset is linked to target dataverse or one of its parents. @@ -2720,6 +2745,8 @@ access.api.fileAccess.failure.noUser=Could not find user to execute command: {0} access.api.requestAccess.failure.commandError=Problem trying request access on {0} : {1} access.api.requestAccess.failure.requestExists=An access request for this file on your behalf already exists. access.api.requestAccess.failure.invalidRequest=You may not request access to this file. It may already be available to you. +access.api.requestAccess.failure.retentionExpired=You may not request access to this file. It is not available because its retention period has ended. + access.api.requestAccess.noKey=You must provide a key to request access to a file. access.api.requestAccess.fileNotFound=Could not find datafile with id {0}. access.api.requestAccess.invalidRequest=This file is already available to you for download or you have a pending request @@ -2859,7 +2886,22 @@ passwdVal.passwdReq.lowercase=lowercase passwdVal.passwdReq.letter=letter passwdVal.passwdReq.numeral=numeral passwdVal.passwdReq.special=special +#mydata API (DataRetriverAPI.java and MyDataFinder.java) dataretrieverAPI.noMsgResultsFound=Sorry, no results were found. +dataretrieverAPI.authentication.required=Requires authentication. Please login. +dataretrieverAPI.authentication.required.opt=retrieveMyDataAsJsonString. User not found! Shouldn't be using this anyway. +dataretrieverAPI.user.not.found=No user found for: "{0}" +dataretrieverAPI.solr.error=Sorry! There was an error with the search service. +dataretrieverAPI.solr.error.opt=Sorry! There was a Solr Error. +myDataFilterParams.error.no.user=Sorry! No user was found! +myDataFilterParams.error.result.no.role=No results. Please select at least one Role. +myDataFilterParams.error.result.no.dvobject=No results. Please select one of Dataverses, Datasets, Files. +myDataFilterParams.error.result.no.publicationStatus=No results. Please select one of {0}. +myDataFinder.error.result.null=Sorry, the authenticated user ID could not be retrieved. +myDataFinder.error.result.no.role=Sorry, you have no assigned roles. +myDataFinder.error.result.role.empty=Sorry, nothing was found for this role: {0} +myDataFinder.error.result.roles.empty=Sorry, nothing was found for these roles: {0} +myDataFinder.error.result.no.dvobject=Sorry, you have no assigned Dataverses, Datasets, or Files. #xlsxfilereader.java xlsxfilereader.ioexception.parse=Could not parse Excel/XLSX spreadsheet. {0} @@ -2916,6 +2958,7 @@ Public=Public Restricted=Restricted EmbargoedThenPublic=Embargoed then Public EmbargoedThenRestricted=Embargoed then Restricted +RetentionPeriodExpired=Retention Period Expired #metadata source - Facet Label Harvested=Harvested @@ -2968,3 +3011,8 @@ publishDatasetCommand.pidNotReserved=Cannot publish dataset because its persiste api.errors.invalidApiToken=Invalid API token. api.ldninbox.citation.alert={0},

    The {1} has just been notified that the {2}, {3}, cites "{6}" in this repository. api.ldninbox.citation.subject={0}: A Dataset Citation has been reported! + +#Info.java +openapi.exception.invalid.format=Invalid format {0}, currently supported formats are YAML and JSON. +openapi.exception=Supported format definition not found. +openapi.exception.unaligned=Unaligned parameters on Headers [{0}] and Request [{1}] diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileName.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileName.properties index 70b0c4e371e..5c1a22bfd5f 100644 --- a/src/main/java/propertyFiles/MimeTypeDetectionByFileName.properties +++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileName.properties @@ -2,3 +2,5 @@ Makefile=text/x-makefile Snakemake=text/x-snakemake Dockerfile=application/x-docker-file Vagrantfile=application/x-vagrant-file +ro-crate-metadata.json=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" +ro-crate-metadata.jsonld=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" diff --git a/src/main/java/propertyFiles/MimeTypeDisplay.properties b/src/main/java/propertyFiles/MimeTypeDisplay.properties index 295ac226fa1..8e5a251abbf 100644 --- a/src/main/java/propertyFiles/MimeTypeDisplay.properties +++ b/src/main/java/propertyFiles/MimeTypeDisplay.properties @@ -207,6 +207,7 @@ audio/ogg=OGG Audio audio/wav=Waveform Audio audio/x-wav=Waveform Audio audio/x-wave=Waveform Audio +audio/vnd.wave=Waveform Audio # Video video/avi=AVI Video video/x-msvideo=AVI Video @@ -222,5 +223,6 @@ text/xml-graphml=GraphML Network Data application/octet-stream=Unknown application/x-docker-file=Docker Image File application/x-vagrant-file=Vagrant Image File +application/ld+json;\u0020profile\u003d\u0022http\u003a//www.w3.org/ns/json-ld#flattened\u0020http\u003a//www.w3.org/ns/json-ld#compacted\u0020https\u003a//w3id.org/ro/crate\u0022=RO-Crate metadata # Dataverse-specific application/vnd.dataverse.file-package=Dataverse Package diff --git a/src/main/java/propertyFiles/MimeTypeFacets.properties b/src/main/java/propertyFiles/MimeTypeFacets.properties index aaab66f20ae..0dad8daff4c 100644 --- a/src/main/java/propertyFiles/MimeTypeFacets.properties +++ b/src/main/java/propertyFiles/MimeTypeFacets.properties @@ -209,6 +209,7 @@ audio/ogg=Audio audio/wav=Audio audio/x-wav=Audio audio/x-wave=Audio +audio/vnd.wave=Audio # (anything else that looks like audio/* will also be indexed as facet type "Audio") # Video video/avi=Video @@ -224,5 +225,6 @@ video/webm=Video text/xml-graphml=Network Data # Other application/octet-stream=Unknown +application/ld+json;\u0020profile\u003d\u0022http\u003a//www.w3.org/ns/json-ld#flattened\u0020http\u003a//www.w3.org/ns/json-ld#compacted\u0020https\u003a//w3id.org/ro/crate\u0022=Metadata # Dataverse-specific application/vnd.dataverse.file-package=Data diff --git a/src/main/java/propertyFiles/biomedical.properties b/src/main/java/propertyFiles/biomedical.properties index 1bffed2ee03..7392ba823c4 100644 --- a/src/main/java/propertyFiles/biomedical.properties +++ b/src/main/java/propertyFiles/biomedical.properties @@ -96,7 +96,6 @@ controlledvocabulary.studyAssayMeasurementType.targeted_sequencing=targeted sequ controlledvocabulary.studyAssayMeasurementType.transcription_factor_binding_(chip-seq)=transcription factor binding (ChIP-Seq) controlledvocabulary.studyAssayMeasurementType.transcription_factor_binding_site_identification=transcription factor binding site identification controlledvocabulary.studyAssayMeasurementType.transcription_profiling=transcription profiling -controlledvocabulary.studyAssayMeasurementType.transcription_profiling=transcription profiling controlledvocabulary.studyAssayMeasurementType.transcription_profiling_(microarray)=transcription profiling (Microarray) controlledvocabulary.studyAssayMeasurementType.transcription_profiling_(rna-seq)=transcription profiling (RNA-Seq) controlledvocabulary.studyAssayMeasurementType.trap_translational_profiling=TRAP translational profiling diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties index f35ede79b50..cb864eb78e9 100644 --- a/src/main/java/propertyFiles/citation.properties +++ b/src/main/java/propertyFiles/citation.properties @@ -22,6 +22,7 @@ datasetfieldtype.dsDescriptionValue.title=Text datasetfieldtype.dsDescriptionDate.title=Date datasetfieldtype.subject.title=Subject datasetfieldtype.keyword.title=Keyword +datasetfieldtype.keywordTermURI.title=Term URI datasetfieldtype.keywordValue.title=Term datasetfieldtype.keywordVocabulary.title=Controlled Vocabulary Name datasetfieldtype.keywordVocabularyURI.title=Controlled Vocabulary URL @@ -100,6 +101,7 @@ datasetfieldtype.dsDescriptionValue.description=A summary describing the purpose datasetfieldtype.dsDescriptionDate.description=The date when the description was added to the Dataset. If the Dataset contains more than one description, e.g. the data producer supplied one description and the data repository supplied another, this date is used to distinguish between the descriptions datasetfieldtype.subject.description=The area of study relevant to the Dataset datasetfieldtype.keyword.description=A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used +datasetfieldtype.keywordTermURI.description=A URI that points to the web presence of the Keyword Term datasetfieldtype.keywordValue.description=A key term that describes important aspects of the Dataset datasetfieldtype.keywordVocabulary.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) datasetfieldtype.keywordVocabularyURI.description=The URL where one can access information about the term's controlled vocabulary @@ -178,6 +180,7 @@ datasetfieldtype.dsDescriptionValue.watermark= datasetfieldtype.dsDescriptionDate.watermark=YYYY-MM-DD datasetfieldtype.subject.watermark= datasetfieldtype.keyword.watermark= +datasetfieldtype.keywordTermURI.watermark=https:// datasetfieldtype.keywordValue.watermark= datasetfieldtype.keywordVocabulary.watermark= datasetfieldtype.keywordVocabularyURI.watermark=https:// diff --git a/src/main/java/propertyFiles/customGSD.properties b/src/main/java/propertyFiles/customGSD.properties index 40dc0328053..2375596fe2f 100644 --- a/src/main/java/propertyFiles/customGSD.properties +++ b/src/main/java/propertyFiles/customGSD.properties @@ -161,7 +161,6 @@ controlledvocabulary.gsdFacultyName.mcloskey,_karen=MCloskey, Karen controlledvocabulary.gsdFacultyName.mehrotra,_rahul=Mehrotra, Rahul controlledvocabulary.gsdFacultyName.menchaca,_alejandra=Menchaca, Alejandra controlledvocabulary.gsdFacultyName.menges,_achim=Menges, Achim -controlledvocabulary.gsdFacultyName.menges,_achim=Menges, Achim controlledvocabulary.gsdFacultyName.michalatos,_panagiotis=Michalatos, Panagiotis controlledvocabulary.gsdFacultyName.moe,_kiel=Moe, Kiel controlledvocabulary.gsdFacultyName.molinsky,_jennifer=Molinsky, Jennifer @@ -507,7 +506,6 @@ controlledvocabulary.gsdCourseName.06323:_brownfields_practicum=06323: Brownfiel controlledvocabulary.gsdCourseName.06333:_aquatic_ecology=06333: Aquatic Ecology controlledvocabulary.gsdCourseName.06335:_phytotechnologies=06335: Phytotechnologies controlledvocabulary.gsdCourseName.06337:_changing_natural_and_built_coastal_environments=06337: Changing Natural and Built Coastal Environments -controlledvocabulary.gsdCourseName.06337:_changing_natural_and_built_coastal_environments=06337: Changing Natural and Built Coastal Environments controlledvocabulary.gsdCourseName.06338:_introduction_to_computational_design=06338: Introduction to Computational Design controlledvocabulary.gsdCourseName.06436:_expanded_mechanisms_/_empirical_materialisms=06436: Expanded Mechanisms / Empirical Materialisms controlledvocabulary.gsdCourseName.06450:_high_performance_buildings_and_systems_integration=06450: High Performance Buildings and Systems Integration diff --git a/src/main/resources/db/migration/V6.2.0.1.sql b/src/main/resources/db/migration/V6.2.0.1.sql new file mode 100644 index 00000000000..cb23d589542 --- /dev/null +++ b/src/main/resources/db/migration/V6.2.0.1.sql @@ -0,0 +1 @@ +ALTER TABLE datafile ADD COLUMN IF NOT EXISTS retention_id BIGINT; \ No newline at end of file diff --git a/src/main/webapp/WEB-INF/pretty-config.xml b/src/main/webapp/WEB-INF/pretty-config.xml index ab5f37a1051..5f8f4877af8 100644 --- a/src/main/webapp/WEB-INF/pretty-config.xml +++ b/src/main/webapp/WEB-INF/pretty-config.xml @@ -27,4 +27,9 @@ + + + + + \ No newline at end of file diff --git a/src/main/webapp/dataset-citation.xhtml b/src/main/webapp/dataset-citation.xhtml index b42dd5e563f..346e39dc463 100644 --- a/src/main/webapp/dataset-citation.xhtml +++ b/src/main/webapp/dataset-citation.xhtml @@ -33,13 +33,13 @@ diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index c54d94442ea..6d5b0a5fe4f 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -482,7 +482,7 @@
    - #{bundle['file.dataFilesTab.terms.list.guestbook']}   + #{bundle['file.dataFilesTab.terms.list.guestbook']}  
    @@ -524,7 +524,7 @@ + update=":datasetForm:previewGuestbook" oncomplete="PF('viewGuestbook').show();"/>
    @@ -563,7 +563,7 @@ + update=":datasetForm:previewGuestbook" oncomplete="PF('viewGuestbook').show();"/> diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 7b5db98b9dd..936d354e9d7 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -32,7 +32,7 @@ and !permissionsWrapper.canIssuePublishDatasetCommand(DatasetPage.dataset)}"/> - + @@ -235,20 +236,21 @@ - -
  • - - #{bundle.transfer} - - - - - -
  • -
    - + +
  • + + #{bundle.transfer} + + + + + +
  • +
    + + @@ -592,7 +594,7 @@
    -
    +
    @@ -1003,8 +1005,10 @@ - + + +

    #{bundle['dataset.share.datasetShare.tip']}

    @@ -1053,10 +1057,10 @@
    - +

    #{bundle['dataset.noValidSelectedFilesForDownload']}

    -

    #{DatasetPage.cantDownloadDueToEmbargo ? bundle['dataset.requestAccessToRestrictedFilesWithEmbargo'] : bundle['dataset.requestAccessToRestrictedFiles']}

    +

    #{DatasetPage.cantDownloadDueToEmbargoOrDVAccess ? bundle['dataset.requestAccessToRestrictedFilesWithEmbargo'] : bundle['dataset.requestAccessToRestrictedFiles']}

    + +

    #{bundle['dataset.noValidSelectedFilesForTransfer']}

    + +

    #{DatasetPage.cantDownloadDueToEmbargoOrDVAccess ? bundle['dataset.requestAccessToRestrictedFilesWithEmbargo'] : bundle['dataset.requestAccessToRestrictedFiles']}

    +
    +
    + +
    +

    #{bundle['file.zip.download.exceeds.limit.info']}

    @@ -1085,8 +1100,8 @@
    - -

    #{DatasetPage.cantDownloadDueToEmbargo ? bundle['dataset.mixedSelectedFilesForDownloadWithEmbargo'] : bundle['dataset.mixedSelectedFilesForDownload']}

    + +

    #{DatasetPage.cantDownloadDueToEmbargoOrDVAccess ? bundle['dataset.mixedSelectedFilesForDownloadWithEmbargo'] : bundle['dataset.mixedSelectedFilesForDownload']}

    @@ -1847,19 +1862,23 @@ + +

    - #{bundle['dataset.rejectMessage']} + #{bundle['dataset.rejectMessage']} #{disableReasonField ? '':bundle['dataset.rejectMessageReason']}

    - - -

    - -

    - - + + + +

    + +

    + + +
    + diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 02acb224827..be78359e02b 100644 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -75,8 +75,10 @@ - + + + diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 9c29fd777a1..cd6a6b06523 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -14,16 +14,18 @@
  • - + value=" #{dataFileServiceBean.isRetentionExpired(fileMetadata) ? bundle['retentionExpired'] : !fileDownloadHelper.isRestrictedOrEmbargoed(fileMetadata) ? bundle['public'] : (!fileDownloadHelper.canDownloadFile(fileMetadata) ? (!dataFileServiceBean.isActivelyEmbargoed(fileMetadata) ? bundle['restricted'] : bundle['embargoed']) : (!dataFileServiceBean.isActivelyEmbargoed(fileMetadata) ? bundle['restrictedaccess'] : bundle['embargoed']) )}" + styleClass="#{dataFileServiceBean.isRetentionExpired(fileMetadata) ? 'text-danger' : !fileDownloadHelper.isRestrictedOrEmbargoed(fileMetadata) ? 'text-success' : (!fileDownloadHelper.canDownloadFile(fileMetadata) ? 'text-danger' : 'text-success')}"/>
  • + and fileMetadata.dataFile.owner.fileAccessRequest + and !dataFileServiceBean.isActivelyEmbargoed(fileMetadata) + and !dataFileServiceBean.isRetentionExpired(fileMetadata)}"> + +
  • + + + + +
  • +
    +
  • diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index ffc4a1fcef7..3b1141816c8 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -168,7 +168,83 @@ PF('blockDatasetForm').hide();" action="#{bean.clearEmbargoPopup()}" update="#{updateElements}" immediate="true"/>
  • - + + + + +

    #{bundle['file.editRetentionDialog.tip']}

    +

    #{bundle['file.editRetentionDialog.some.tip']} #{bundle['file.editRetentionDialog.partial.tip']}

    +

    #{bundle['file.editRetentionDialog.none.tip']}

    + + +
    + +
    +
    + +
    +
    +
    + + + + +
    + +
    +
    +
    +

    #{bundle['file.editRetentionDialog.reason.tip']}

    + +
    +
    +
    +
    +
    +
    +
    + +
    +
    + + + +
    +
    +
    +
    +
    + + +
    +
    +

    #{bundle['file.deleteFileDialog.immediate']}

    diff --git a/src/main/webapp/file-info-fragment.xhtml b/src/main/webapp/file-info-fragment.xhtml index 72fe279fbf8..dca5c4a8cec 100644 --- a/src/main/webapp/file-info-fragment.xhtml +++ b/src/main/webapp/file-info-fragment.xhtml @@ -64,6 +64,7 @@
    +
    diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index f69b5c35afd..835764d9cf5 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -43,7 +43,7 @@
    #{FilePage.fileMetadata.label} - +
    @@ -64,22 +64,23 @@

    - + - + + - + - + + value="#{bundle['file.DatasetVersion']} #{FilePage.fileMetadata.datasetVersion.versionNumber}.#{FilePage.fileMetadata.datasetVersion.minorVersionNumber}"/>
    @@ -98,9 +99,9 @@ - -
    @@ -112,19 +113,19 @@
  • @@ -145,11 +146,11 @@ - - -
    @@ -161,19 +162,19 @@
  • @@ -192,7 +193,7 @@
    -
    + - + - @@ -566,6 +574,13 @@ + + + +
    -
    +
    @@ -504,7 +505,7 @@
    -
    @@ -552,11 +553,18 @@ #{FilePage.file.publicationDateFormattedYYYYMMDD}
    #{bundle['file.metadataTab.fileMetadata.publicationDate.label']} #{!(empty FilePage.file.embargo) ? FilePage.embargoPhrase: ''} #{!(empty FilePage.file.embargo) ? FilePage.file.embargo.dateAvailable : FilePage.file.publicationDateFormattedYYYYMMDD} + #{(empty FilePage.file.embargo) and !(empty FilePage.file.released) and !(empty FilePage.file.retention) ? ''.concat(FilePage.file.publicationDateFormattedYYYYMMDD).concat('; ') : ''} + #{!(empty FilePage.file.embargo) ? ''.concat(FilePage.embargoPhrase).concat(' ').concat(FilePage.file.embargo.dateAvailable) : ''} + #{!(empty FilePage.file.embargo) and !(empty FilePage.file.retention) ? '; ': ''} + #{!(empty FilePage.file.retention) ? ''.concat(FilePage.retentionPhrase) + .concat(' ').concat(FilePage.file.retention.dateUnavailable) + .concat(', ').concat(bundle['retention.enddateinfo']) + : ''} + #{(empty FilePage.file.embargo and empty FilePage.file.retention)? FilePage.file.publicationDateFormattedYYYYMMDD : ''}
    #{FilePage.file.embargo.reason}
    + #{bundle['file.metadataTab.fileMetadata.retentionReason.label']} + #{FilePage.file.retention.reason} +
    #{bundle['file.metadataTab.fileMetadata.size.label']} @@ -623,9 +638,11 @@ + - - + + +

    #{bundle['file.share.tip']}

    @@ -634,7 +651,7 @@ #{bundle.close} -
    + @@ -645,7 +662,7 @@ - + @@ -657,7 +674,7 @@ - + @@ -678,7 +695,7 @@ #{bundle.close} - +

    #{bundle['file.compute.fileAccessDenied']}

    @@ -690,7 +707,7 @@