diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 7e6995d76d9..3dba7d52109 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -14,7 +14,7 @@ Thank you for contributing to the Dataverse Project through the creation of a bu WARNING: If this is a security issue it should be reported privately to security@dataverse.org More information on bug issues and contributions can be found in the "Contributing to Dataverse" page: -https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#bug-reportsissues +https://guides.dataverse.org/en/latest/contributor/index.html Please fill out as much of the template as you can. Start below this comment section. @@ -44,7 +44,6 @@ Start below this comment section. **Any related open or closed issues to this bug report?** - **Screenshots:** No matter the issue, screenshots are always welcome. @@ -53,3 +52,7 @@ To add a screenshot, please use one of the following formats and/or methods desc * https://help.github.com/en/articles/file-attachments-on-issues-and-pull-requests * + + +**Are you thinking about creating a pull request for this issue?** +Help is always welcome, is this bug something you or your organization plan to fix? diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index d6248537418..7365cb4317c 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,7 +1,7 @@ --- name: Feature request about: Suggest an idea or new feature for the Dataverse software! -title: 'Feature Request/Idea:' +title: 'Feature Request:' labels: 'Type: Feature' assignees: '' @@ -11,7 +11,7 @@ assignees: '' Thank you for contributing to the Dataverse Project through the creation of a feature request! More information on ideas/feature requests and contributions can be found in the "Contributing to Dataverse" page: -https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#ideasfeature-requests +https://guides.dataverse.org/en/latest/contributor/index.html Please fill out as much of the template as you can. Start below this comment section. @@ -34,3 +34,6 @@ Start below this comment section. **Any open or closed issues related to this feature request?** + +**Are you thinking about creating a pull request for this feature?** +Help is always welcome, is this feature something you or your organization plan to implement? diff --git a/.github/ISSUE_TEMPLATE/idea_proposal.md b/.github/ISSUE_TEMPLATE/idea_proposal.md new file mode 100644 index 00000000000..8cb6c7bfafe --- /dev/null +++ b/.github/ISSUE_TEMPLATE/idea_proposal.md @@ -0,0 +1,40 @@ +--- +name: Idea proposal +about: Propose a new idea for discussion to improve the Dataverse software! +title: 'Suggestion:' +labels: 'Type: Suggestion' +assignees: '' + +--- + + + +**Overview of the Suggestion** + + +**What kind of user is the suggestion intended for?** +(Example users roles: API User, Curator, Depositor, Guest, Superuser, Sysadmin) + + +**What inspired this idea?** + + +**What existing behavior do you want changed?** + + +**Any brand new behavior do you want to add to Dataverse?** + + +**Any open or closed issues related to this suggestion?** + + +**Are you thinking about creating a pull request for this issue?** +Help is always welcome, is this idea something you or your organization plan to implement? diff --git a/doc/release-notes/8796-fix-license-display-indexing.md b/doc/release-notes/8796-fix-license-display-indexing.md new file mode 100644 index 00000000000..ebded088875 --- /dev/null +++ b/doc/release-notes/8796-fix-license-display-indexing.md @@ -0,0 +1 @@ +When datasets have neither a license nor custom terms of use the display will indicate this. Also, these datasets will no longer be indexed as having custom terms. diff --git a/doc/release-notes/8945-ignore-shapefiles-under-hidden-directories-in-zip.md b/doc/release-notes/8945-ignore-shapefiles-under-hidden-directories-in-zip.md new file mode 100644 index 00000000000..145ae5f6d55 --- /dev/null +++ b/doc/release-notes/8945-ignore-shapefiles-under-hidden-directories-in-zip.md @@ -0,0 +1,5 @@ +### Shapefile Handling will now ignore files under a hidden directory within the zip file + +Directories that are hidden will be ignored when determining if a zip file contains Shapefile files. + +For more information, see #8945. \ No newline at end of file diff --git a/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md new file mode 100644 index 00000000000..042b2ec39fd --- /dev/null +++ b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md @@ -0,0 +1,6 @@ +In an earlier Dataverse release, Datasets with only 'CC0 Waiver' in termsofuse field were converted to 'Custom License' instead of CC0 1.0 licenses during an automated process. A new process was added to correct this. Only Datasets with no terms other than the one create by the previous process will be modified. +- The existing 'Terms of Use' must be equal to 'This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver' +- The following terms fields must be empty: Confidentiality Declaration, Special Permissions, Restrictions, Citation Requirements, Depositor Requirements, Conditions, and Disclaimer. +- The License ID must not be assigned. + +This process will set the License ID to that of the CC0 1.0 license and remove the contents of termsofuse field. diff --git a/doc/release-notes/api-blocking-filter-json.md b/doc/release-notes/api-blocking-filter-json.md new file mode 100644 index 00000000000..337ff82dd8b --- /dev/null +++ b/doc/release-notes/api-blocking-filter-json.md @@ -0,0 +1,3 @@ +* When any `ApiBlockingFilter` policy applies to a request, the JSON in the body of the error response is now valid JSON. + In case an API client did any special processing to allow it to parse the body, that is no longer necessary. + The status code of such responses has not changed. diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index e7a59910e56..e436ba9e9d2 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -8,12 +8,12 @@ Making Releases Introduction ------------ -Note: See :doc:`making-library-releases` for how to publish our libraries to Maven Central. - -See :doc:`version-control` for background on our branching strategy. +This document is about releasing the main Dataverse app (https://github.com/IQSS/dataverse). See :doc:`making-library-releases` for how to release our various libraries. Other projects have their own release documentation. The steps below describe making both regular releases and hotfix releases. +Below you'll see branches like "develop" and "master" mentioned. For more on our branching strategy, see :doc:`version-control`. + .. _write-release-notes: Write Release Notes @@ -24,10 +24,10 @@ Developers express the need for an addition to release notes by creating a "rele The task at or near release time is to collect these snippets into a single file. - Create an issue in GitHub to track the work of creating release notes for the upcoming release. -- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above. -- Delete the release note snippets as the content is added to the main release notes file. -- Include instructions to describe the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. -- Take the release notes .md through the regular Code Review and QA process. +- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above. Snippets may not include any issue number or pull request number in the text so be sure copy the number from the filename of the snippet into the final release note. +- Delete (``git rm``) the release note snippets as the content is added to the main release notes file. +- Include instructions describing the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. +- Take the release notes .md through the regular Code Review and QA process. That is, make a pull request. Create a GitHub Issue and Branch for the Release ------------------------------------------------ @@ -70,6 +70,13 @@ Once important tests have passed (compile, unit tests, etc.), merge the pull req If this is a hotfix release, skip this whole "merge develop to master" step (the "develop" branch is not involved until later). +Add Milestone to Pull Requests and Issues +----------------------------------------- + +Often someone is making sure that the proper milestone (e.g. 5.10.1) is being applied to pull requests and issues, but sometimes this falls between the cracks. + +Check for merged pull requests that have no milestone by going to https://github.com/IQSS/dataverse/pulls and entering `is:pr is:merged no:milestone `_ as a query. If you find any, add the milestone to the pull request and any issues it closes. This includes the "merge develop into master" pull request above. + (Optional) Test Docker Images ----------------------------- @@ -106,7 +113,7 @@ Create a Draft Release on GitHub Go to https://github.com/IQSS/dataverse/releases/new to start creating a draft release. - Under "Choose a tag" you will be creating a new tag. Have it start with a "v" such as ``v5.10.1``. Click "Create new tag on publish". -- Under "Target" go to "Recent Commits" and select the merge commit from when you merged ``develop`` into ``master`` above. This commit will appear in ``/api/info/version`` from a running installation. +- Under "Target", choose "master". This commit will appear in ``/api/info/version`` from a running installation. - Under "Release title" use the same name as the tag such as ``v5.10.1``. - In the description, copy and paste the content from the release notes .md file created in the "Write Release Notes" steps above. - Click "Save draft" because we do not want to publish the release yet. @@ -153,6 +160,7 @@ ssh into the dataverse-internal server and do the following: - ``mkdir target`` - ``cp /tmp/dataverse-5.10.1.war target`` - ``cd scripts/installer`` +- ``make clean`` - ``make`` A zip file called ``dvinstall.zip`` should be produced. @@ -175,7 +183,7 @@ Upload the following artifacts to the draft release you created: Deploy on Demo -------------- -Now that you have the release ready to go, give it one final test by deploying it on https://demo.dataverse.org . Note that this is also an opportunity to re-test the upgrade checklist as described in the release note. +Now that you have the release ready to go, consider giving it one final test by deploying it on https://demo.dataverse.org. Note that this is also an opportunity to re-test the upgrade checklist as described in the release note. Publish the Release ------------------- @@ -194,7 +202,7 @@ ssh into the guides server and update the symlink to point to the latest release cd /var/www/html/en ln -s 5.10.1 latest - +This step could be done before publishing the release if you'd like to double check that links in the release notes work. Close Milestone on GitHub and Create a New One ---------------------------------------------- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index ba92a4180ae..0038c188ea5 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1292,8 +1292,8 @@ Reported Working S3-Compatible Storage Note that for direct uploads and downloads, Dataverse redirects to the proxy-url but presigns the urls based on the ``dataverse.files..custom-endpoint-url``. Also, note that if you choose to enable ``dataverse.files..download-redirect`` the S3 URLs expire after 60 minutes by default. You can change that minute value to reflect a timeout value that’s more appropriate by using ``dataverse.files..url-expiration-minutes``. `Surf Object Store v2019-10-30 `_ - Set ``dataverse.files..payload-signing=true`` and ``dataverse.files..chunked-encoding=false`` to use Surf Object - Store. + Set ``dataverse.files..payload-signing=true``, ``dataverse.files..chunked-encoding=false`` and ``dataverse.files..path-style-request=true`` to use Surf Object + Store. You will need the Swift client (documented at ) to create the access key and secret key for the S3 interface. Note that the ``dataverse.files..proxy-url`` setting can be used in installations where the object store is proxied, but it should be considered an advanced option that will require significant expertise to properly configure. For direct uploads and downloads, Dataverse redirects to the proxy-url but presigns the urls based on the ``dataverse.files..custom-endpoint-url``. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java index 31e7758c7d5..4bf6c00f199 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java @@ -54,15 +54,18 @@ public int compare(DatasetField o1, DatasetField o2) { o2.getDatasetFieldType().getDisplayOrder() ); }}; - public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, Object dsv) { + public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, DatasetVersion dsv) { DatasetField dsfv = createNewEmptyDatasetField(dsfType); - //TODO - a better way to handle this? - if (dsv.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")){ - dsfv.setDatasetVersion((DatasetVersion)dsv); - } else { - dsfv.setTemplate((Template)dsv); - } + dsfv.setDatasetVersion(dsv); + + return dsfv; + } + + public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, Template dsv) { + + DatasetField dsfv = createNewEmptyDatasetField(dsfType); + dsfv.setTemplate(dsv); return dsfv; } @@ -545,9 +548,12 @@ public String toString() { return "edu.harvard.iq.dataverse.DatasetField[ id=" + id + " ]"; } - public DatasetField copy(Object version) { + public DatasetField copy(DatasetVersion version) { return copy(version, null); } + public DatasetField copy(Template template) { + return copy(template, null); + } // originally this was an overloaded method, but we renamed it to get around an issue with Bean Validation // (that looked t overloaded methods, when it meant to look at overriden methods @@ -555,15 +561,15 @@ public DatasetField copyChild(DatasetFieldCompoundValue parent) { return copy(null, parent); } - private DatasetField copy(Object version, DatasetFieldCompoundValue parent) { + private DatasetField copy(Object versionOrTemplate, DatasetFieldCompoundValue parent) { DatasetField dsf = new DatasetField(); dsf.setDatasetFieldType(datasetFieldType); - if (version != null) { - if (version.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")) { - dsf.setDatasetVersion((DatasetVersion) version); + if (versionOrTemplate != null) { + if (versionOrTemplate instanceof DatasetVersion) { + dsf.setDatasetVersion((DatasetVersion) versionOrTemplate); } else { - dsf.setTemplate((Template) version); + dsf.setTemplate((Template) versionOrTemplate); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index ab23fa779d5..f99b3ee1b53 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -315,6 +315,23 @@ private void msg(String s){ //logger.fine(s); } + public boolean isVersionDefaultCustomTerms(DatasetVersion datasetVersion) { + + if (datasetVersion.getId() != null) { + try { + TermsOfUseAndAccess toua = (TermsOfUseAndAccess) em.createNamedQuery("TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms") + .setParameter("id", datasetVersion.getId()).setParameter("defaultTerms", TermsOfUseAndAccess.DEFAULT_NOTERMS).getSingleResult(); + if (toua != null && datasetVersion.getTermsOfUseAndAccess().getLicense() == null) { + return true; + } + + } catch (NoResultException e) { + return false; + } + } + return false; + } + /** * Does the version identifier in the URL ask for a "DRAFT"? * diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java index ee865770dbe..9e48c6c0165 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java @@ -17,6 +17,28 @@ import jakarta.persistence.Transient; import edu.harvard.iq.dataverse.license.License; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; + +@NamedQueries({ + // TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms + // is used to determine if the dataset terms were set by the multi license support update + // as part of the 5.10 release. + + @NamedQuery(name = "TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms", + query = "SELECT o FROM TermsOfUseAndAccess o, DatasetVersion dv WHERE " + + "dv.id =:id " + + "AND dv.termsOfUseAndAccess.id = o.id " + + "AND o.termsOfUse =:defaultTerms " + + "AND o.confidentialityDeclaration IS null " + + "AND o.specialPermissions IS null " + + "AND o.restrictions IS null " + + "AND o.citationRequirements IS null " + + "AND o.depositorRequirements IS null " + + "AND o.conditions IS null " + + "AND o.disclaimer IS null " + ) +}) /** * @@ -26,6 +48,8 @@ @Entity @ValidateTermsOfUseAndAccess public class TermsOfUseAndAccess implements Serializable { + + public static final String DEFAULT_NOTERMS = "This dataset is made available without information on how it can be used. You should communicate with the Contact(s) specified before use."; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java index 0e5b8226310..b51b1aa2612 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java @@ -49,7 +49,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro @Override public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) throws IOException, ServletException { HttpServletResponse httpResponse = (HttpServletResponse) sr1; - httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint blocked. Please contact the dataverse administrator\"}" ); + httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint blocked. Please contact the dataverse administrator\"}" ); httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); httpResponse.setContentType("application/json"); } @@ -67,7 +67,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro fc.doFilter(sr, sr1); } else { HttpServletResponse httpResponse = (HttpServletResponse) sr1; - httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint available from localhost only. Please contact the dataverse administrator\"}" ); + httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint available from localhost only. Please contact the dataverse administrator\"}" ); httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); httpResponse.setContentType("application/json"); } @@ -102,7 +102,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro if ( block ) { HttpServletResponse httpResponse = (HttpServletResponse) sr1; - httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint available using API key only. Please contact the dataverse administrator\"}" ); + httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint available using API key only. Please contact the dataverse administrator\"}" ); httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); httpResponse.setContentType("application/json"); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 98bd26b51d6..060b8694e9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -653,6 +653,15 @@ public static License getLicense(DatasetVersion dsv) { } public static String getLicenseName(DatasetVersion dsv) { + + DatasetVersionServiceBean datasetVersionService = CDI.current().select(DatasetVersionServiceBean.class).get(); + /* + Special case where there are default custom terms indicating that no actual choice has been made... + */ + if (datasetVersionService.isVersionDefaultCustomTerms(dsv)) { + return BundleUtil.getStringFromBundle("license.none.chosen"); + } + License license = DatasetUtil.getLicense(dsv); return getLocalizedLicenseName(license); } @@ -683,7 +692,16 @@ public static String getLicenseIcon(DatasetVersion dsv) { } public static String getLicenseDescription(DatasetVersion dsv) { + + DatasetVersionServiceBean datasetVersionService = CDI.current().select(DatasetVersionServiceBean.class).get(); + /* + Special case where there are default custom terms indicating that no actual choice has been made... + */ + if (datasetVersionService.isVersionDefaultCustomTerms(dsv)) { + return BundleUtil.getStringFromBundle("license.none.chosen.description"); + } License license = DatasetUtil.getLicense(dsv); + return license != null ? getLocalizedLicenseDetails(license,"DESCRIPTION") : BundleUtil.getStringFromBundle("license.custom.description"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 26b42734d19..c91eb0bfa7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -100,6 +100,8 @@ public class IndexServiceBean { @EJB DatasetServiceBean datasetService; @EJB + DatasetVersionServiceBean datasetVersionService; + @EJB BuiltinUserServiceBean dataverseUserServiceBean; @EJB PermissionServiceBean permissionService; @@ -1840,9 +1842,19 @@ private List getDataversePathsFromSegments(List dataversePathSeg private void addLicenseToSolrDoc(SolrInputDocument solrInputDocument, DatasetVersion datasetVersion) { if (datasetVersion != null && datasetVersion.getTermsOfUseAndAccess() != null) { + //test to see if the terms of use are the default set in 5.10 - if so and there's no license then don't add license to solr doc. + //fixes 10513 + if (datasetVersionService.isVersionDefaultCustomTerms(datasetVersion)){ + return; + } + String licenseName = "Custom Terms"; - if(datasetVersion.getTermsOfUseAndAccess().getLicense() != null) { + if (datasetVersion.getTermsOfUseAndAccess().getLicense() != null) { licenseName = datasetVersion.getTermsOfUseAndAccess().getLicense().getName(); + } else if (datasetVersion.getTermsOfUseAndAccess().getTermsOfUse() == null) { + // this fixes #10513 for datasets harvested in oai_dc - these + // have neither the license id, nor any actual custom terms + return; } solrInputDocument.addField(SearchFields.DATASET_LICENSE, licenseName); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java index 9786fda4217..f1440cc3c02 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java @@ -15,6 +15,7 @@ import java.util.*; import java.nio.file.Files; +import java.nio.file.Paths; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; import java.util.logging.Level; import java.util.logging.Logger; @@ -695,33 +696,42 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.filesListInDir.clear(); this.filesizeHash.clear(); this.fileGroups.clear(); - - try{ + + try{ ZipInputStream zipStream = new ZipInputStream(zip_file_stream); ZipEntry entry; - + List hiddenDirectories = new ArrayList<>(); while((entry = zipStream.getNextEntry())!=null){ + String zentryFileName = entry.getName(); + boolean isDirectory = entry.isDirectory(); + + Boolean skip = isDirectory || this.isFileToSkip(zentryFileName); + + // check if path is hidden + if (isDirectory && Files.isHidden(Paths.get(zentryFileName))) { + hiddenDirectories.add(zentryFileName); + logger.fine("Ignoring files under hidden directory: " + zentryFileName); + } else { + // check if the path was already found to be hidden + for (String hidden : hiddenDirectories) { + if (zentryFileName.startsWith(hidden)) { + skip = true; + break; + } + } + } - String zentryFileName = entry.getName(); - //msg("zip entry: " + entry.getName()); - // Skip files or folders starting with __ - if (this.isFileToSkip(zentryFileName)){ - continue; - } - - if (entry.isDirectory()) { - //String dirpath = outputFolder + "/" + zentryFileName; - //createDirectory(dirpath); - continue; + if (skip) { + continue; } - + String unzipFileName = this.getFileBasename(zentryFileName); if (unzipFileName==null){ logger.warning("Zip Entry Basename is an empty string: " + zentryFileName); continue; } String unzipFolderName = this.getFolderName(zentryFileName); - + String unzipFilePath = unzipFileName; if (unzipFolderName != null) { unzipFilePath = unzipFolderName + "/" + unzipFileName; diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 0325a47f626..b645276ceaf 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1445,6 +1445,8 @@ dataset.exportBtn.itemLabel.json=JSON dataset.exportBtn.itemLabel.oai_ore=OAI_ORE dataset.exportBtn.itemLabel.dataciteOpenAIRE=OpenAIRE dataset.exportBtn.itemLabel.html=DDI HTML Codebook +license.none.chosen=No license or custom terms chosen +license.none.chosen.description=No custom terms have been entered for this dataset license.custom=Custom Dataset Terms license.custom.description=Custom terms specific to this dataset metrics.title=Metrics diff --git a/src/main/resources/db/migration/V6.3.0.1.sql b/src/main/resources/db/migration/V6.3.0.1.sql new file mode 100644 index 00000000000..fd9cd823868 --- /dev/null +++ b/src/main/resources/db/migration/V6.3.0.1.sql @@ -0,0 +1,10 @@ +UPDATE termsofuseandaccess SET license_id = (SELECT license.id FROM license WHERE license.name = 'CC0 1.0'), termsofuse = NULL +WHERE termsofuse = 'This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver' + AND license_id IS null + AND confidentialitydeclaration IS null + AND specialpermissions IS null + AND restrictions IS null + AND citationrequirements IS null + AND depositorrequirements IS null + AND conditions IS null + AND disclaimer IS null; diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 6d5b0a5fe4f..42a4c3ec5f5 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -46,8 +46,8 @@

- - + +

diff --git a/src/main/webapp/datasetLicenseInfoFragment.xhtml b/src/main/webapp/datasetLicenseInfoFragment.xhtml index 257f6b3b12f..a1bd604aa6a 100644 --- a/src/main/webapp/datasetLicenseInfoFragment.xhtml +++ b/src/main/webapp/datasetLicenseInfoFragment.xhtml @@ -22,7 +22,7 @@ xmlns:jsf="http://xmlns.jcp.org/jsf"> target="_blank">#{DatasetUtil:getLicenseName(DatasetPage.workingVersion)} -

diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 5948047d845..d53c4bf4709 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -48,7 +48,7 @@ - diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java new file mode 100644 index 00000000000..97999af3244 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java @@ -0,0 +1,64 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; +import edu.harvard.iq.dataverse.mocks.MocksFactory; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNull; + + +class DatasetFieldTest { + @Test + void testCreateNewEmptyDatasetField_withEmptyTemplate() { + Template template = new Template(); + + DatasetField field = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), template); + assertTrue(field.getTemplate() == template); + assertTrue(template.getDatasetFields().isEmpty()); + } + + @Test + void testNotEqualDatasetFields() { + DatasetFieldType type1 = new DatasetFieldType("subject", FieldType.TEXT, false); + Template template1 = new Template(); + DatasetField field1 = DatasetField.createNewEmptyDatasetField(type1, template1); + field1.setId(MocksFactory.nextId()); + DatasetFieldType type2 = new DatasetFieldType("subject", FieldType.TEXT, false); + Template template2 = new Template(); + DatasetField field2 = DatasetField.createNewEmptyDatasetField(type2, template2); + field2.setId(MocksFactory.nextId()); + + assertNotEquals(field1, field2); + assertNotEquals(field1, template2); + } + + @Test + void testEqualDatasetFields() { + DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + field1.setId(100L); + DatasetField field2 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + + // Fields are not equal before both have IDs set + assertNotEquals(field1, field2); + + field2.setId(100L); + + assertEquals(field1, field2); + } + + @Test + void testCopyDatasetFields() { + DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + field1.setId(100L); + DatasetField field2 = field1.copy(field1.getTemplate()); + + assertNull(field2.getId()); + // A copy of a field should not be equal + assertNotEquals(field1, field2); + + assertEquals(field2.getDatasetFieldType(), field1.getDatasetFieldType()); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index 92b06e5936f..c062f63e264 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -51,6 +51,7 @@ public void setUp() { indexService.settingsService = Mockito.mock(SettingsServiceBean.class); indexService.dataverseService = Mockito.mock(DataverseServiceBean.class); indexService.datasetFieldService = Mockito.mock(DatasetFieldServiceBean.class); + indexService.datasetVersionService = Mockito.mock(DatasetVersionServiceBean.class); BrandingUtil.injectServices(indexService.dataverseService, indexService.settingsService); Mockito.when(indexService.dataverseService.findRootDataverse()).thenReturn(dataverse); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java index f0e538616b2..3c5b4797b0a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java @@ -282,8 +282,14 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ msg("Passed!"); } - + @Test + public void testHiddenFiles() { + // test with shapefiles in hidden directory + ShapefileHandler shp_handler = new ShapefileHandler("src/test/resources/hiddenShapefiles.zip"); + shp_handler.DEBUG= true; + assertFalse(shp_handler.containsShapefile()); + } diff --git a/src/test/resources/hiddenShapefiles.zip b/src/test/resources/hiddenShapefiles.zip new file mode 100644 index 00000000000..c733a2083ae Binary files /dev/null and b/src/test/resources/hiddenShapefiles.zip differ