diff --git a/.env b/.env index d5cffcec0aa..9d604630073 100644 --- a/.env +++ b/.env @@ -1,5 +1,5 @@ APP_IMAGE=gdcc/dataverse:unstable -POSTGRES_VERSION=16 +POSTGRES_VERSION=17 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 -SKIP_DEPLOY=0 \ No newline at end of file +SKIP_DEPLOY=0 diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 028f0140cc9..eca8416732a 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -5,14 +5,18 @@ on: branches: - develop +concurrency: + group: deploy-beta-testing + cancel-in-progress: false + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: distribution: 'zulu' java-version: '17' @@ -32,7 +36,7 @@ jobs: run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV - name: Upload war artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: built-app path: ./target/${{ env.war_file }} @@ -42,10 +46,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download war artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: built-app path: ./ diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index a94b17a67ba..45180ea7aec 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -32,9 +32,9 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -57,7 +57,7 @@ jobs: # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub. # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dataverse-java${{ matrix.jdk }}.war path: target/dataverse*.war @@ -67,7 +67,7 @@ jobs: - run: | tar -cvf java-builddir.tar target tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: java-artifacts path: | @@ -98,16 +98,16 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin cache: maven # Get the build output from the unit test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: java-artifacts - run: | @@ -119,7 +119,7 @@ jobs: # Wrap up and send to coverage job - run: tar -cvf java-reportdir.tar target/site - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: java-reportdir path: java-reportdir.tar @@ -132,15 +132,15 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: temurin cache: maven # Get the build output from the integration test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: java-reportdir - run: tar -xvf java-reportdir.tar diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml index 2aed50e9998..d5c789c7189 100644 --- a/conf/solr/schema.xml +++ b/conf/solr/schema.xml @@ -234,6 +234,7 @@ + - 6.4 + 6.5 17 UTF-8 @@ -149,7 +149,7 @@ 6.2024.6 - 42.7.2 + 42.7.4 9.4.1 1.12.748 26.30.0 @@ -446,8 +446,8 @@ Once the release has been made (tag created), change this back to "${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}" (These properties are provided by the build-helper plugin below.) --> - ${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion} - + + ${revision} diff --git a/pom.xml b/pom.xml index b2344989569..5ecbd7059c1 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ war 1.2.18.4 - 9.22.1 + 10.19.0 1.20.1 5.2.1 2.9.1 @@ -188,6 +188,11 @@ flyway-core ${flyway.version} + + org.flywaydb + flyway-database-postgresql + ${flyway.version} + org.eclipse.persistence @@ -1004,7 +1009,7 @@ true docker-build - 16 + 17 gdcc/dataverse:${app.image.tag} unstable diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java index 2cb6f27c3e4..d880da5b4a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java @@ -2,7 +2,7 @@ import java.io.Serializable; import java.util.List; import jakarta.persistence.*; -import org.hibernate.validator.constraints.NotBlank; +import jakarta.validation.constraints.NotBlank; /** * @@ -41,7 +41,7 @@ public void setId(Long id) { private String questionType; @NotBlank(message = "{custom.questiontext}") - @Column( nullable = false ) + @Column( nullable = false, columnDefinition = "TEXT") private String questionString; private boolean required; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java index 3977023fc4b..02fb59751fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java @@ -792,6 +792,7 @@ private GlobalId getPIDFrom(DatasetVersion dsv, DvObject dv) { if (!dsv.getDataset().isHarvested() || HarvestingClient.HARVEST_STYLE_VDC.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) || HarvestingClient.HARVEST_STYLE_ICPSR.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) + || HarvestingClient.HARVEST_STYLE_DEFAULT.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) || HarvestingClient.HARVEST_STYLE_DATAVERSE .equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())) { if(!isDirect()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 98ac8ff387f..937f5693511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1407,8 +1407,7 @@ public UploadSessionQuotaLimit getUploadSessionQuotaLimit(DvObjectContainer pare } public boolean isInReleasedVersion(Long id) { - Query query = em.createQuery("SELECT fm.id FROM FileMetadata fm, DvObject dvo WHERE fm.datasetVersion.id=(SELECT dv.id FROM DatasetVersion dv WHERE dv.dataset.id=dvo.owner.id and dv.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND dvo.id=fm.dataFile.id AND fm.dataFile.id=:fid"); - query.setParameter("fid", id); + Query query = em.createNativeQuery("SELECT fm.id FROM filemetadata fm WHERE fm.datasetversion_id=(SELECT dv.id FROM datasetversion dv, dvobject dvo WHERE dv.dataset_id=dvo.owner_id AND dv.versionState='RELEASED' and dvo.id=" + id + " ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND fm.datafile_id=" + id); try { query.getSingleResult(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 8522f2733c7..33a093c8044 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -101,6 +101,7 @@ import jakarta.faces.view.ViewScoped; import jakarta.inject.Inject; import jakarta.inject.Named; +import jakarta.persistence.OptimisticLockException; import org.apache.commons.lang3.StringUtils; import org.primefaces.event.FileUploadEvent; @@ -1935,13 +1936,13 @@ public void updateOwnerDataverse() { if (selectedHostDataverse != null && selectedHostDataverse.getId() != null) { ownerId = selectedHostDataverse.getId(); dataset.setOwner(selectedHostDataverse); - logger.info("New host dataverse id: "+ownerId); + logger.info("New host dataverse id: " + ownerId); // discard the dataset already created //If a global ID was already assigned, as is true for direct upload, keep it (if files were already uploaded, they are at the path corresponding to the existing global id) GlobalId gid = dataset.getGlobalId(); dataset = new Dataset(); - if(gid!=null) { - dataset.setGlobalId(gid); + if (gid != null) { + dataset.setGlobalId(gid); } // initiate from scratch: (isolate the creation of a new dataset in its own method?) @@ -2287,8 +2288,17 @@ private String init(boolean initFull) { JsfHelper.addWarningMessage(message); } } + if(isAnonymizedAccess()){ + dataverseHeaderFragment.setBreadcrumbs(new ArrayList<>()); + } return null; } + + public void viewActionInitBreadcrumbs(){ + if(!isAnonymizedAccess()){ + dataverseHeaderFragment.initBreadcrumbs(dataset); + } + } private void displayWorkflowComments() { List comments = workingVersion.getWorkflowComments(); @@ -2888,6 +2898,9 @@ private String releaseDataset(boolean minor) { // the lock info system. JsfHelper.addErrorMessage(ex.getLocalizedMessage()); } + if(ex.getCause()!=null && ex.getCause() instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelPublishError")); + } logger.severe(ex.getMessage()); } @@ -4002,6 +4015,10 @@ public String save() { Throwable cause = ex; while (cause.getCause()!= null) { cause = cause.getCause(); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } error.append(cause).append(" "); error.append(cause.getMessage()).append(" "); } @@ -4011,6 +4028,15 @@ public String save() { } catch (CommandException ex) { //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); logger.log(Level.SEVERE, "CommandException, when attempting to update the dataset: " + ex.getMessage(), ex); + Throwable cause = ex; + while (cause.getCause()!= null) { + cause = cause.getCause(); + logger.info("Cause is: " + cause.getClass().getName() + ", Message: " + cause.getMessage()); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } + } populateDatasetUpdateFailureMessage(); return returnToDraftVersion(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 0433c425fd2..a7bbc7c3ad4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -102,6 +102,10 @@ public int compare(DatasetVersion o1, DatasetVersion o2) { } } }; + public static final JsonObjectBuilder compareVersions(DatasetVersion originalVersion, DatasetVersion newVersion) { + DatasetVersionDifference diff = new DatasetVersionDifference(newVersion, originalVersion); + return diff.compareVersionsAsJson(); + } // TODO: Determine the UI implications of various version states //IMPORTANT: If you add a new value to this enum, you will also have to modify the @@ -1390,7 +1394,10 @@ public List getRelatedPublications() { relatedPublication.setIdNumber(subField.getDisplayValue()); break; case DatasetFieldConstant.publicationRelationType: - relatedPublication.setRelationType(subField.getDisplayValue()); + List values = subField.getValues_nondisplay(); + if (!values.isEmpty()) { + relatedPublication.setRelationType(values.get(0)); //only one value allowed + } break; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index eca0c84ae84..c5d6c31386c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -5,16 +5,24 @@ import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.util.StringUtil; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.logging.Logger; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.lang3.StringUtils; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; @@ -34,14 +42,13 @@ public final class DatasetVersionDifference { private List addedFiles = new ArrayList<>(); private List removedFiles = new ArrayList<>(); private List changedFileMetadata = new ArrayList<>(); + private Map>> changedFileMetadataDiff = new HashMap<>(); private List changedVariableMetadata = new ArrayList<>(); private List replacedFiles = new ArrayList<>(); private List changedTermsAccess = new ArrayList<>(); private List summaryDataForNote = new ArrayList<>(); private List blockDataForNote = new ArrayList<>(); - private VariableMetadataUtil variableMetadataUtil; - private List differenceSummaryGroups = new ArrayList<>(); public List getDifferenceSummaryGroups() { @@ -106,54 +113,73 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin addToSummary(null, dsfn); } } - - // TODO: ? - // It looks like we are going through the filemetadatas in both versions, - // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of - // changed, deleted and added files between the 2 versions... But why - // are we doing it, if we are doing virtually the same thing inside - // the initDatasetFilesDifferenceList(), below - but in a more efficient - // way (sorting both lists, then goint through them in parallel, at the - // cost of (N+M) max.? - // -- 4.6 Nov. 2016 - + long startTime = System.currentTimeMillis(); + Map originalFileMetadataMap = new HashMap<>(); + Map previousIDtoFileMetadataMap = new HashMap<>(); for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - boolean deleted = true; - for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - deleted = false; - if (!compareFileMetadatas(fmdo, fmdn)) { - changedFileMetadata.add(fmdo); - changedFileMetadata.add(fmdn); - } - if (!variableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { - changedVariableMetadata.add(fmdo); - changedVariableMetadata.add(fmdn); - } - break; - } - } - if (deleted) { - removedFiles.add(fmdo); - } + originalFileMetadataMap.put(fmdo.getDataFile().getId(), fmdo); } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - boolean added = true; - for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - added = false; - break; + DataFile ndf = fmdn.getDataFile(); + Long id = ndf.getId(); + FileMetadata fmdo = originalFileMetadataMap.get(id); + //If this file was in the original version + if(fmdo!= null) { + //Check for differences + Map> fileMetadataDiff = compareFileMetadatas(fmdo, fmdn); + if (!fileMetadataDiff.isEmpty()) { + changedFileMetadata.add(fmdo); + changedFileMetadata.add(fmdn); + // TODO: find a better key for the map. needs to be something that doesn't change + changedFileMetadataDiff.put(fmdo, fileMetadataDiff); + } + if (!VariableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { + changedVariableMetadata.add(fmdo); + changedVariableMetadata.add(fmdn); + } + // And drop it from the list since it can't be a deleted file + originalFileMetadataMap.remove(id); + } else { + //It wasn't in the original version + Long prevID = ndf.getPreviousDataFileId(); + //It might be a replacement file or an added file + if(prevID != null) { + //Add it to a map so we can check later to see if it's a replacement + previousIDtoFileMetadataMap.put(prevID, fmdn); + } else { + //Otherwise make it an added file now + addedFiles.add(fmdn); } } - if (added) { - addedFiles.add(fmdn); + } + //Finally check any remaining files from the original version that weren't in the new version' + for (Long removedId : originalFileMetadataMap.keySet()) { + //See if it has been replaced + FileMetadata replacingFmd = previousIDtoFileMetadataMap.get(removedId); + FileMetadata fmdRemoved = originalFileMetadataMap.get(removedId); + if (replacingFmd != null) { + //This is a replacement + replacedFiles.add(new FileMetadata[] { fmdRemoved, replacingFmd }); + //Drop if from the map + previousIDtoFileMetadataMap.remove(removedId); + } else { + //This is a removed file + removedFiles.add(fmdRemoved); } - } - getReplacedFiles(); + } + // Any fms left are not updating existing files and aren't replacing a file, but + // they are claiming a previous file id. That shouldn't be possible, but this will + // make sure they get listed in the difference if they do + for (Entry entry : previousIDtoFileMetadataMap.entrySet()) { + logger.warning("Previous file id claimed for a new file: fmd id: " + entry.getValue() + ", previous file id: " + entry.getKey()); + addedFiles.add(entry.getValue()); + } + + logger.fine("Main difference loop execution time: " + (System.currentTimeMillis() - startTime) + " ms"); initDatasetFilesDifferencesList(); - //Sort within blocks by datasetfieldtype dispaly order then.... - //sort via metadatablock order - citation first... + //Sort within blocks by datasetfieldtype display order for (List blockList : detailDataByBlock) { Collections.sort(blockList, (DatasetField[] l1, DatasetField[] l2) -> { DatasetField dsfa = l1[0]; //(DatasetField[]) l1.get(0); @@ -163,6 +189,17 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin return Integer.valueOf(a).compareTo(b); }); } + //Sort existing compoundValues by datasetfieldtype display order + for (List blockList : detailDataByBlock) { + for (DatasetField[] dfarr : blockList) { + for (DatasetField df : dfarr) { + for (DatasetFieldCompoundValue dfcv : df.getDatasetFieldCompoundValues()) { + Collections.sort(dfcv.getChildDatasetFields(), DatasetField.DisplayOrder); + } + } + } + } + //Sort via metadatablock order Collections.sort(detailDataByBlock, (List l1, List l2) -> { DatasetField dsfa[] = (DatasetField[]) l1.get(0); DatasetField dsfb[] = (DatasetField[]) l2.get(0); @@ -173,294 +210,62 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin getTermsDifferences(); } - private void getReplacedFiles() { - if (addedFiles.isEmpty() || removedFiles.isEmpty()) { - return; - } - List addedToReplaced = new ArrayList<>(); - List removedToReplaced = new ArrayList<>(); - for (FileMetadata added : addedFiles) { - DataFile addedDF = added.getDataFile(); - Long replacedId = addedDF.getPreviousDataFileId(); - if (added.getDataFile().getPreviousDataFileId() != null){ - } - for (FileMetadata removed : removedFiles) { - DataFile test = removed.getDataFile(); - if (test.getId().equals(replacedId)) { - addedToReplaced.add(added); - removedToReplaced.add(removed); - FileMetadata[] replacedArray = new FileMetadata[2]; - replacedArray[0] = removed; - replacedArray[1] = added; - replacedFiles.add(replacedArray); - } - } - } - if(addedToReplaced.isEmpty()){ - } else{ - addedToReplaced.stream().forEach((delete) -> { - addedFiles.remove(delete); - }); - removedToReplaced.stream().forEach((delete) -> { - removedFiles.remove(delete); - }); - } - } + private void getTermsDifferences() { - changedTermsAccess = new ArrayList<>(); - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } + TermsOfUseAndAccess originalTerms = originalVersion.getTermsOfUseAndAccess(); + if(originalTerms == null) { + originalTerms = new TermsOfUseAndAccess(); + } + // newTerms should never be null + TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess(); + if(newTerms == null) { + logger.warning("New version does not have TermsOfUseAndAccess"); + newTerms = new TermsOfUseAndAccess(); } - - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() == null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } - } - - if (newVersion.getTermsOfUseAndAccess() == null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), ""); - } - } - } - - private DifferenceSummaryItem createSummaryItem(){ - return null; - } - - private List addToSummaryGroup(String displayName, DifferenceSummaryItem differenceSummaryItem){ - return null; + checkAndAddToChangeList(originalTerms.getTermsOfUse(), newTerms.getTermsOfUse(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header")); + checkAndAddToChangeList(originalTerms.getConfidentialityDeclaration(), newTerms.getConfidentialityDeclaration(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration")); + checkAndAddToChangeList(originalTerms.getSpecialPermissions(), newTerms.getSpecialPermissions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions")); + checkAndAddToChangeList(originalTerms.getRestrictions(), newTerms.getRestrictions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions")); + checkAndAddToChangeList(originalTerms.getCitationRequirements(), newTerms.getCitationRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements")); + checkAndAddToChangeList(originalTerms.getDepositorRequirements(), newTerms.getDepositorRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements")); + checkAndAddToChangeList(originalTerms.getConditions(), newTerms.getConditions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions")); + checkAndAddToChangeList(originalTerms.getDisclaimer(), newTerms.getDisclaimer(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer")); + checkAndAddToChangeList(originalTerms.getTermsOfAccess(), newTerms.getTermsOfAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess")); + checkAndAddToChangeList(originalTerms.getDataAccessPlace(), newTerms.getDataAccessPlace(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace")); + checkAndAddToChangeList(originalTerms.getOriginalArchive(), newTerms.getOriginalArchive(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive")); + checkAndAddToChangeList(originalTerms.getAvailabilityStatus(), newTerms.getAvailabilityStatus(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus")); + checkAndAddToChangeList(originalTerms.getContactForAccess(), newTerms.getContactForAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess")); + checkAndAddToChangeList(originalTerms.getSizeOfCollection(), newTerms.getSizeOfCollection(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection")); + checkAndAddToChangeList(originalTerms.getStudyCompletion(), newTerms.getStudyCompletion(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion")); } - - private List addToTermsChangedList(List listIn, String label, String origVal, String newVal) { - String[] diffArray; - diffArray = new String[3]; - diffArray[0] = label; - diffArray[1] = origVal; - diffArray[2] = newVal; - listIn.add(diffArray); - return listIn; + + private void checkAndAddToChangeList(String originalTerm, String newTerm, + String termLabel) { + originalTerm = StringUtil.nullToEmpty(originalTerm); + newTerm = StringUtil.nullToEmpty(newTerm); + if(!originalTerm.equals(newTerm)) { + changedTermsAccess.add(new String[]{termLabel, originalTerm, newTerm}); + } } - private void addToList(List listIn, DatasetField dsfo, DatasetField dsfn) { DatasetField[] dsfArray; dsfArray = new DatasetField[2]; @@ -523,7 +328,7 @@ private void addToNoteSummary(DatasetField dsfo, int added, int deleted, int cha summaryDataForNote.add(noteArray); } - private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { + static boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { List vglo = fmdo.getVarGroups(); List vgln = fmdn.getVarGroups(); @@ -533,7 +338,7 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { int count = 0; for (VarGroup vgo : vglo) { for (VarGroup vgn : vgln) { - if (!variableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { + if (!VariableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { Set dvo = vgo.getVarsInGroup(); Set dvn = vgn.getVarsInGroup(); if (dvo.equals(dvn)) { @@ -551,25 +356,34 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { } } - public static boolean compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { - + public static Map> compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { + Map> fileMetadataChanged = new HashMap<>(); if (!StringUtils.equals(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))) { - return false; + fileMetadataChanged.put("Description", + List.of(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))); } if (!StringUtils.equals(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())) { - return false; + fileMetadataChanged.put("Categories", + List.of(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())); } if (!StringUtils.equals(fmdo.getLabel(), fmdn.getLabel())) { - return false; + fileMetadataChanged.put("Label", + List.of(fmdo.getLabel(), fmdn.getLabel())); } if (!StringUtils.equals(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())) { - return false; + fileMetadataChanged.put("ProvFreeForm", + List.of(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())); } - - return fmdo.isRestricted() == fmdn.isRestricted(); + + if (fmdo.isRestricted() != fmdn.isRestricted()) { + fileMetadataChanged.put("isRestricted", + List.of(String.valueOf(fmdo.isRestricted()), String.valueOf(fmdn.isRestricted()))); + } + + return fileMetadataChanged; } private void compareValues(DatasetField originalField, DatasetField newField, boolean compound) { @@ -1819,4 +1633,138 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie } return false; } + + List getChangedVariableMetadata() { + return changedVariableMetadata; + } + + List getReplacedFiles() { + return replacedFiles; + } + public JsonObjectBuilder compareVersionsAsJson() { + JsonObjectBuilder job = new NullSafeJsonBuilder(); + JsonObjectBuilder jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", originalVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(originalVersion.getLastUpdateTime())); + job.add("oldVersion", jobVersion); + jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", newVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(newVersion.getLastUpdateTime())); + job.add("newVersion", jobVersion); + + if (!this.detailDataByBlock.isEmpty()) { + JsonArrayBuilder jabMetadata = Json.createArrayBuilder(); + for (List blocks : detailDataByBlock) { + JsonObjectBuilder jobMetadata = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + String blockDisplay = blocks.get(0)[0].getDatasetFieldType().getMetadataBlock().getDisplayName(); + for (DatasetField[] dsfArray : blocks) { + JsonObjectBuilder jb = new NullSafeJsonBuilder(); + jb.add("fieldName", dsfArray[0].getDatasetFieldType().getTitle()); + if (dsfArray[0].getDatasetFieldType().isPrimitive()) { + jb.add("oldValue", dsfArray[0].getRawValue()); + } else { + jb.add("oldValue", dsfArray[0].getCompoundRawValue()); + } + if (dsfArray[1].getDatasetFieldType().isPrimitive()) { + jb.add("newValue", dsfArray[1].getRawValue()); + } else { + jb.add("newValue", dsfArray[1].getCompoundRawValue()); + } + jab.add(jb); + } + jobMetadata.add("blockName", blockDisplay); + jobMetadata.add("changed", jab); + jabMetadata.add(jobMetadata); + } + job.add("metadataChanges", jabMetadata); + } + + // Format added, removed, and modified files + JsonArrayBuilder jabDiffFiles = Json.createArrayBuilder(); + if (!addedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + addedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesAdded", jab); + } + if (!removedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + removedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesRemoved", jab); + } + if (!replacedFiles.isEmpty()) { + JsonArrayBuilder jabReplaced = Json.createArrayBuilder(); + replacedFiles.forEach(fm -> { + if (fm.length == 2) { + JsonObjectBuilder jobReplaced = new NullSafeJsonBuilder(); + jobReplaced.add("oldFile", filesDiffJson(fm[0])); + jobReplaced.add("newFile", filesDiffJson(fm[1])); + jabReplaced.add(jobReplaced); + } + }); + job.add("filesReplaced", jabReplaced); + } + if (!changedFileMetadata.isEmpty()) { + changedFileMetadataDiff.entrySet().forEach(entry -> { + JsonArrayBuilder jab = Json.createArrayBuilder(); + JsonObjectBuilder jobChanged = new NullSafeJsonBuilder(); + jobChanged.add("fileName", entry.getKey().getDataFile().getDisplayName()); + jobChanged.add(entry.getKey().getDataFile().getChecksumType().name(), entry.getKey().getDataFile().getChecksumValue()); + jobChanged.add("fileId", entry.getKey().getDataFile().getId()); + entry.getValue().entrySet().forEach(e -> { + JsonObjectBuilder jobDiffField = new NullSafeJsonBuilder(); + jobDiffField.add("fieldName",e.getKey()); + jobDiffField.add("oldValue",e.getValue().get(0)); + jobDiffField.add("newValue",e.getValue().get(1)); + jab.add(jobDiffField); + }); + jobChanged.add("changed", jab); + jabDiffFiles.add(jobChanged); + }); + job.add("fileChanges", jabDiffFiles); + } + + // Format Terms Of Access changes + if (!changedTermsAccess.isEmpty()) { + JsonObjectBuilder jobTOA = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + changedTermsAccess.forEach(toa -> { + JsonObjectBuilder jobValue = new NullSafeJsonBuilder(); + jobValue.add("fieldName",toa[0]); + jobValue.add("oldValue",toa[1]); + jobValue.add("newValue",toa[2]); + jab.add(jobValue); + }); + jobTOA.add("changed", jab); + job.add("TermsOfAccess", jobTOA); + } + + return job; + } + private JsonObjectBuilder filesDiffJson(FileMetadata fileMetadata) { + NullSafeJsonBuilder job = new NullSafeJsonBuilder(); + DataFile df = fileMetadata.getDataFile(); + job.add("fileName", df.getDisplayName()) + .add("filePath", fileMetadata.getDirectoryLabel()) + .add(df.getChecksumType().name(), df.getChecksumValue()) + .add("type",df.getContentType()) + .add("fileId", df.getId()) + .add("description", fileMetadata.getDescription()) + .add("isRestricted", df.isRestricted()); + if (fileMetadata.getCategories() != null && !fileMetadata.getCategories().isEmpty()) { + JsonArrayBuilder jabCategories = Json.createArrayBuilder(); + fileMetadata.getCategories().forEach(c -> jabCategories.add(c.getName())); + job.add("categories", jabCategories); + } + if (df.getTags() != null && !df.getTags().isEmpty()) { + JsonArrayBuilder jabTags = Json.createArrayBuilder(); + df.getTags().forEach(t -> jabTags.add(t.getTypeLabel())); + job.add("tags", jabTags); + } + return job; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 86e2e0207c1..1f11725e581 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -595,6 +595,10 @@ public void setMetadataBlocks(List metadataBlocks) { this.metadataBlocks = new ArrayList<>(metadataBlocks); } + public void clearMetadataBlocks() { + this.metadataBlocks.clear(); + } + public List getCitationDatasetFieldTypes() { return citationDatasetFieldTypes; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index 5c77989f6d6..56f522fa816 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -42,23 +42,24 @@ public void delete(DataverseFacet dataverseFacet) { cache.invalidate(); } - public void deleteFacetsFor( Dataverse d ) { - em.createNamedQuery("DataverseFacet.removeByOwnerId") - .setParameter("ownerId", d.getId()) - .executeUpdate(); + public void deleteFacetsFor(Dataverse d) { + em.createNamedQuery("DataverseFacet.removeByOwnerId") + .setParameter("ownerId", d.getId()) + .executeUpdate(); cache.invalidate(d.getId()); - - } - + + } + public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { DataverseFacet dataverseFacet = new DataverseFacet(); - + dataverseFacet.setDisplayOrder(displayOrder); dataverseFacet.setDatasetFieldType(fieldType); dataverseFacet.setDataverse(ownerDv); - ownerDv.getDataverseFacets().add(dataverseFacet); em.persist(dataverseFacet); + + cache.invalidate(ownerDv.getId()); return dataverseFacet; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index a4882f772d6..5dab43fbdbd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -27,9 +27,9 @@ @NamedQuery(name = "DvObject.ownedObjectsById", query="SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"), @NamedQuery(name = "DvObject.findByGlobalId", - query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findIdByGlobalId", - query = "SELECT o.id FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o.id FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByAlternativeGlobalId", query = "SELECT o FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @@ -37,7 +37,7 @@ query = "SELECT o.id FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority", - query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol"), @NamedQuery(name = "DvObject.findByOwnerId", query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId order by o.dtype desc, o.id"), @NamedQuery(name = "DvObject.findByAuthenticatedUserId", @@ -53,7 +53,8 @@ @Table(indexes = {@Index(columnList="dtype") , @Index(columnList="owner_id") , @Index(columnList="creator_id") - , @Index(columnList="releaseuser_id")}, + , @Index(columnList="releaseuser_id") + , @Index(columnList="authority,protocol, UPPER(identifier)", name="INDEX_DVOBJECT_authority_protocol_upper_identifier")}, uniqueConstraints = {@UniqueConstraint(columnNames = {"authority,protocol,identifier"}),@UniqueConstraint(columnNames = {"owner_id,storageidentifier"})}) public abstract class DvObject extends DataverseEntity implements java.io.Serializable { diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java index f54b1fb6117..0922af007fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java @@ -322,7 +322,7 @@ public String save() { logger.info("Guestbook Page Command Exception. Dataverse: " + dataverse.getName()); logger.info(ex.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("guestbook.save.fail"), " - " + ex.toString())); - //logger.severe(ex.getMessage()); + return null; } editMode = null; String msg = (create)? BundleUtil.getStringFromBundle("guestbook.create"): BundleUtil.getStringFromBundle("guestbook.save"); diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 1ea7d02791d..830c7740e34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -15,6 +15,8 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; + +import edu.harvard.iq.dataverse.validation.ValidateEmail; import jakarta.persistence.*; import jakarta.validation.constraints.Size; import java.util.Collections; @@ -80,8 +82,8 @@ public class GuestbookResponse implements Serializable { @Size(max = 255, message = "{guestbook.response.nameLength}") private String name; - // TODO: Consider using EMailValidator as well. @Size(max = 255, message = "{guestbook.response.nameLength}") + @ValidateEmail(message = "{user.invalidEmail}") private String email; @Size(max = 255, message = "{guestbook.response.nameLength}") @@ -198,7 +200,8 @@ public String getEmail() { } public void setEmail(String email) { - this.email = email; + // ValidateEmail requires NULL or valid email. Empty String will fail validation + this.email = (email == null || email.trim().isEmpty()) ? null : email; } public Guestbook getGuestbook() { diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 2995c0c5f47..c67a0293847 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -283,7 +283,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme if (objectOfNotification != null){ String messageText = getMessageTextBasedOnNotification(notification, objectOfNotification, comment, requestor); String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, objectOfNotification); - if (!(messageText.isEmpty() || subjectText.isEmpty())){ + if (!(StringUtils.isEmpty(messageText) || StringUtils.isEmpty(subjectText))){ retval = sendSystemEmail(emailAddress, subjectText, messageText, isHtmlContent); } else { logger.warning("Skipping " + notification.getType() + " notification, because couldn't get valid message"); diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index a3dfbf81512..121d03ef0c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -421,6 +421,9 @@ private String getValueFromAssertion(String key) { Object attribute = request.getAttribute(key); if (attribute != null) { String attributeValue = attribute.toString(); + if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) { + attributeValue = new String(attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8); + } String trimmedValue = attributeValue.trim(); if (!trimmedValue.isEmpty()) { logger.fine("The SAML assertion for \"" + key + "\" (optional) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\"."); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 54e5eaf7b84..152bcf5066e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,28 +1,11 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.BannerMessage; -import edu.harvard.iq.dataverse.BannerMessageServiceBean; -import edu.harvard.iq.dataverse.BannerMessageText; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseRequestServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.DataverseSession; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.EMailValidator; -import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.Template; -import edu.harvard.iq.dataverse.TemplateServiceBean; -import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; @@ -66,8 +49,9 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.charset.StandardCharsets; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; +import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; @@ -81,7 +65,6 @@ import org.apache.commons.io.IOUtils; -import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -118,9 +101,7 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.rolesToJson; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; + import jakarta.inject.Inject; import jakarta.json.JsonArray; import jakarta.persistence.Query; @@ -128,7 +109,6 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; import java.nio.file.Paths; -import java.util.TreeMap; /** * Where the secure, setup API calls live. @@ -139,7 +119,7 @@ @Path("admin") public class Admin extends AbstractApiBean { - private static final Logger logger = Logger.getLogger(Admin.class.getName()); + private static final Logger logger = Logger.getLogger(Admin.class.getName()); @EJB AuthenticationProvidersRegistrationServiceBean authProvidersRegistrationSvc; @@ -184,53 +164,53 @@ public class Admin extends AbstractApiBean { @Inject DataverseSession session; - public static final String listUsersPartialAPIPath = "list-users"; - public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; - - @Path("settings") - @GET - public Response listAllSettings() { - JsonObjectBuilder bld = jsonObjectBuilder(); - settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); - return ok(bld); - } - - @Path("settings/{name}") - @PUT - public Response putSetting(@PathParam("name") String name, String content) { - Setting s = settingsSvc.set(name, content); - return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); - } - - @Path("settings/{name}/lang/{lang}") - @PUT - public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { - Setting s = settingsSvc.set(name, lang, content); - return ok("Setting " + name + " - " + lang + " - added."); - } - - @Path("settings/{name}") - @GET - public Response getSetting(@PathParam("name") String name) { - String s = settingsSvc.get(name); - - return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); - } - - @Path("settings/{name}") - @DELETE - public Response deleteSetting(@PathParam("name") String name) { - settingsSvc.delete(name); - - return ok("Setting " + name + " deleted."); - } - - @Path("settings/{name}/lang/{lang}") - @DELETE - public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { - settingsSvc.delete(name, lang); - return ok("Setting " + name + " - " + lang + " deleted."); - } + public static final String listUsersPartialAPIPath = "list-users"; + public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; + + @Path("settings") + @GET + public Response listAllSettings() { + JsonObjectBuilder bld = jsonObjectBuilder(); + settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); + return ok(bld); + } + + @Path("settings/{name}") + @PUT + public Response putSetting(@PathParam("name") String name, String content) { + Setting s = settingsSvc.set(name, content); + return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); + } + + @Path("settings/{name}/lang/{lang}") + @PUT + public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { + Setting s = settingsSvc.set(name, lang, content); + return ok("Setting " + name + " - " + lang + " - added."); + } + + @Path("settings/{name}") + @GET + public Response getSetting(@PathParam("name") String name) { + String s = settingsSvc.get(name); + + return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); + } + + @Path("settings/{name}") + @DELETE + public Response deleteSetting(@PathParam("name") String name) { + settingsSvc.delete(name); + + return ok("Setting " + name + " deleted."); + } + + @Path("settings/{name}/lang/{lang}") + @DELETE + public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { + settingsSvc.delete(name, lang); + return ok("Setting " + name + " - " + lang + " deleted."); + } @Path("template/{id}") @DELETE @@ -301,130 +281,130 @@ public Response findTemplates(@PathParam("alias") String alias) { } - @Path("authenticationProviderFactories") - @GET - public Response listAuthProviderFactories() { - return ok(authSvc.listProviderFactories().stream() - .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) - .collect(toJsonArray())); - } - - @Path("authenticationProviders") - @GET - public Response listAuthProviders() { - return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) - .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); - } - - @Path("authenticationProviders") - @POST - public Response addProvider(AuthenticationProviderRow row) { - try { - AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); - if (managed != null) { - managed = em.merge(row); - } else { - em.persist(row); - managed = row; - } - if (managed.isEnabled()) { - AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); - authProvidersRegistrationSvc.deregisterProvider(provider.getId()); - authProvidersRegistrationSvc.registerProvider(provider); - } - return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); - } catch (AuthorizationSetupException e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } - - @Path("authenticationProviders/{id}") - @GET - public Response showProvider(@PathParam("id") String id) { - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - return (row != null) ? ok(json(row)) - : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); - } - - @POST - @Path("authenticationProviders/{id}/:enabled") - public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { - return enableAuthenticationProvider(id, body); - } - - @PUT - @Path("authenticationProviders/{id}/enabled") - @Produces("application/json") - public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { - body = body.trim(); - if (!Util.isBoolean(body)) { - return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); - } - boolean enable = Util.isTrue(body); - - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row == null) { - return notFound("Can't find authentication provider with id '" + id + "'"); - } - - row.setEnabled(enable); - em.merge(row); - - if (enable) { - // enable a provider - if (authSvc.getAuthenticationProvider(id) != null) { - return ok(String.format("Authentication provider '%s' already enabled", id)); - } - try { - authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); - return ok(String.format("Authentication Provider %s enabled", row.getId())); - - } catch (AuthenticationProviderFactoryNotFoundException ex) { - return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", - row.getFactoryAlias())); - } catch (AuthorizationSetupException ex) { - logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); - return error(Status.INTERNAL_SERVER_ERROR, - String.format("Can't instantiate provider: %s", ex.getMessage())); - } - - } else { - // disable a provider - authProvidersRegistrationSvc.deregisterProvider(id); - return ok("Authentication Provider '" + id + "' disabled. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } - } - - @GET - @Path("authenticationProviders/{id}/enabled") - public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { - List prvs = em - .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) - .setParameter("id", id).getResultList(); - if (prvs.isEmpty()) { - return notFound("Can't find a provider with id '" + id + "'."); - } else { - return ok(Boolean.toString(prvs.get(0).isEnabled())); - } - } - - @DELETE - @Path("authenticationProviders/{id}/") - public Response deleteAuthenticationProvider(@PathParam("id") String id) { - authProvidersRegistrationSvc.deregisterProvider(id); - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row != null) { - em.remove(row); - } - - return ok("AuthenticationProvider " + id + " deleted. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } + @Path("authenticationProviderFactories") + @GET + public Response listAuthProviderFactories() { + return ok(authSvc.listProviderFactories().stream() + .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) + .collect(toJsonArray())); + } + + @Path("authenticationProviders") + @GET + public Response listAuthProviders() { + return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) + .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); + } + + @Path("authenticationProviders") + @POST + public Response addProvider(AuthenticationProviderRow row) { + try { + AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); + if (managed != null) { + managed = em.merge(row); + } else { + em.persist(row); + managed = row; + } + if (managed.isEnabled()) { + AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); + authProvidersRegistrationSvc.deregisterProvider(provider.getId()); + authProvidersRegistrationSvc.registerProvider(provider); + } + return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); + } catch (AuthorizationSetupException e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } + + @Path("authenticationProviders/{id}") + @GET + public Response showProvider(@PathParam("id") String id) { + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + return (row != null) ? ok(json(row)) + : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); + } + + @POST + @Path("authenticationProviders/{id}/:enabled") + public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { + return enableAuthenticationProvider(id, body); + } + + @PUT + @Path("authenticationProviders/{id}/enabled") + @Produces("application/json") + public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { + body = body.trim(); + if (!Util.isBoolean(body)) { + return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); + } + boolean enable = Util.isTrue(body); + + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row == null) { + return notFound("Can't find authentication provider with id '" + id + "'"); + } + + row.setEnabled(enable); + em.merge(row); + + if (enable) { + // enable a provider + if (authSvc.getAuthenticationProvider(id) != null) { + return ok(String.format("Authentication provider '%s' already enabled", id)); + } + try { + authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); + return ok(String.format("Authentication Provider %s enabled", row.getId())); + + } catch (AuthenticationProviderFactoryNotFoundException ex) { + return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", + row.getFactoryAlias())); + } catch (AuthorizationSetupException ex) { + logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); + return error(Status.INTERNAL_SERVER_ERROR, + String.format("Can't instantiate provider: %s", ex.getMessage())); + } + + } else { + // disable a provider + authProvidersRegistrationSvc.deregisterProvider(id); + return ok("Authentication Provider '" + id + "' disabled. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } + } + + @GET + @Path("authenticationProviders/{id}/enabled") + public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { + List prvs = em + .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) + .setParameter("id", id).getResultList(); + if (prvs.isEmpty()) { + return notFound("Can't find a provider with id '" + id + "'."); + } else { + return ok(Boolean.toString(prvs.get(0).isEnabled())); + } + } + + @DELETE + @Path("authenticationProviders/{id}/") + public Response deleteAuthenticationProvider(@PathParam("id") String id) { + authProvidersRegistrationSvc.deregisterProvider(id); + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row != null) { + em.remove(row); + } + + return ok("AuthenticationProvider " + id + " deleted. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } @GET @Path("authenticatedUsers/{identifier}/") @@ -509,520 +489,520 @@ private Response deactivateAuthenticatedUser(AuthenticatedUser userToDisable) { } } - @POST - @Path("publishDataverseAsCreator/{id}") - public Response publishDataverseAsCreator(@PathParam("id") long id) { - try { - Dataverse dataverse = dataverseSvc.find(id); - if (dataverse != null) { - AuthenticatedUser authenticatedUser = dataverse.getCreator(); - return ok(json(execCommand( - new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); - } else { - return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - } - - @Deprecated - @GET - @AuthRequired - @Path("authenticatedUsers") - public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - JsonArrayBuilder userArray = Json.createArrayBuilder(); - authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { - userArray.add(json(user)); - }); - return ok(userArray); - } - - @GET - @AuthRequired - @Path(listUsersPartialAPIPath) - @Produces({ "application/json" }) - public Response filterAuthenticatedUsers( - @Context ContainerRequestContext crc, - @QueryParam("searchTerm") String searchTerm, - @QueryParam("selectedPage") Integer selectedPage, - @QueryParam("itemsPerPage") Integer itemsPerPage, - @QueryParam("sortKey") String sortKey - ) { - - User authUser = getRequestUser(crc); - - if (!authUser.isSuperuser()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); - } - - UserListMaker userListMaker = new UserListMaker(userService); - - // String sortKey = null; - UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); - - return ok(userListResult.toJSON()); - } - - /** - * @todo Make this support creation of BuiltInUsers. - * - * @todo Add way more error checking. Only the happy path is tested by AdminIT. - */ - @POST - @Path("authenticatedUsers") - public Response createAuthenicatedUser(JsonObject jsonObject) { - logger.fine("JSON in: " + jsonObject); - String persistentUserId = jsonObject.getString("persistentUserId"); - String identifier = jsonObject.getString("identifier"); - String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); - String firstName = jsonObject.getString("firstName"); - String lastName = jsonObject.getString("lastName"); - String emailAddress = jsonObject.getString("email"); - String position = null; - String affiliation = null; - UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), - persistentUserId); - AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, - emailAddress, affiliation, position); - boolean generateUniqueIdentifier = true; - AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, - proposedAuthenticatedUserIdentifier, userDisplayInfo, true); - return ok(json(authenticatedUser)); - } + @POST + @Path("publishDataverseAsCreator/{id}") + public Response publishDataverseAsCreator(@PathParam("id") long id) { + try { + Dataverse dataverse = dataverseSvc.find(id); + if (dataverse != null) { + AuthenticatedUser authenticatedUser = dataverse.getCreator(); + return ok(json(execCommand( + new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); + } else { + return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @Deprecated + @GET + @AuthRequired + @Path("authenticatedUsers") + public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + JsonArrayBuilder userArray = Json.createArrayBuilder(); + authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { + userArray.add(json(user)); + }); + return ok(userArray); + } + + @GET + @AuthRequired + @Path(listUsersPartialAPIPath) + @Produces({ "application/json" }) + public Response filterAuthenticatedUsers( + @Context ContainerRequestContext crc, + @QueryParam("searchTerm") String searchTerm, + @QueryParam("selectedPage") Integer selectedPage, + @QueryParam("itemsPerPage") Integer itemsPerPage, + @QueryParam("sortKey") String sortKey + ) { + + User authUser = getRequestUser(crc); + + if (!authUser.isSuperuser()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); + } + + UserListMaker userListMaker = new UserListMaker(userService); + + // String sortKey = null; + UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); + + return ok(userListResult.toJSON()); + } + + /** + * @todo Make this support creation of BuiltInUsers. + * + * @todo Add way more error checking. Only the happy path is tested by AdminIT. + */ + @POST + @Path("authenticatedUsers") + public Response createAuthenicatedUser(JsonObject jsonObject) { + logger.fine("JSON in: " + jsonObject); + String persistentUserId = jsonObject.getString("persistentUserId"); + String identifier = jsonObject.getString("identifier"); + String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); + String firstName = jsonObject.getString("firstName"); + String lastName = jsonObject.getString("lastName"); + String emailAddress = jsonObject.getString("email"); + String position = null; + String affiliation = null; + UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), + persistentUserId); + AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, + emailAddress, affiliation, position); + boolean generateUniqueIdentifier = true; + AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, + proposedAuthenticatedUserIdentifier, userDisplayInfo, true); + return ok(json(authenticatedUser)); + } //TODO: Delete this endpoint after 4.9.3. Was updated with change in docs. --MAD - /** - * curl -X PUT -d "shib@mailinator.com" - * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn - * - * @deprecated We have documented this API endpoint so we'll keep in around for - * a while but we should encourage everyone to switch to the - * "convertRemoteToBuiltIn" endpoint and then remove this - * Shib-specfic one. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") - @Deprecated - public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); - } + /** + * curl -X PUT -d "shib@mailinator.com" + * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn + * + * @deprecated We have documented this API endpoint so we'll keep in around for + * a while but we should encourage everyone to switch to the + * "convertRemoteToBuiltIn" endpoint and then remove this + * Shib-specfic one. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") + @Deprecated + public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id - + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") - public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") + public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); //AuthenticatedUser authUser = authService.getAuthenticatedUser(aUser.getUserName()); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from remote to BuiltIn. An Exception was not thrown."); - } + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from remote to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " - + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2shib") - public Response builtin2shib(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2shib..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - idPEntityId = ShibUtil.testShibIdpEntityId; - String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - ShibServiceBean.DevShibAccountType.RANDOM); - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " + + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2shib") + public Response builtin2shib(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2shib..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + idPEntityId = ShibUtil.testShibIdpEntityId; + String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + ShibServiceBean.DevShibAccountType.RANDOM); + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { if (builtInUserToConvert.isDeactivated()) { problems.add("builtin account has been deactivated"); return error(Status.BAD_REQUEST, problems.build().toString()); } - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, - newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", shibProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2oauth") - public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2oauth..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - String newProviderId; - String newPersistentUserIdInLookupTable; - logger.info("content: " + content); - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - newProviderId = args[3]; - newPersistentUserIdInLookupTable = args[4]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - // UserIdentifier newUserIdentifierInLookupTable = new - // UserIdentifier(idPEntityId + separator + eppn, notUsed); - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - // idPEntityId = ShibUtil.testShibIdpEntityId; - // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - // ShibServiceBean.DevShibAccountType.RANDOM); - String overwriteAffiliation = null; - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, - newProviderId, newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", newProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - - - - @Path("roles") - @POST - public Response createNewBuiltinRole(RoleDTO roleDto) { - ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") - .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); - try { - return ok(json(rolesSvc.save(roleDto.asRole()))); - } catch (Exception e) { - alr.setActionResult(ActionLogRecord.Result.InternalError); - alr.setInfo(alr.getInfo() + "// " + e.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } finally { - actionLogSvc.log(alr); - } - } - - @Path("roles") - @GET - public Response listBuiltinRoles() { - try { - return ok(rolesToJson(rolesSvc.findBuiltinRoles())); - } catch (Exception e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, + newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", shibProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2oauth") + public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2oauth..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + String newProviderId; + String newPersistentUserIdInLookupTable; + logger.info("content: " + content); + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + newProviderId = args[3]; + newPersistentUserIdInLookupTable = args[4]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + // UserIdentifier newUserIdentifierInLookupTable = new + // UserIdentifier(idPEntityId + separator + eppn, notUsed); + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + // idPEntityId = ShibUtil.testShibIdpEntityId; + // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + // ShibServiceBean.DevShibAccountType.RANDOM); + String overwriteAffiliation = null; + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, + newProviderId, newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", newProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + + + + @Path("roles") + @POST + public Response createNewBuiltinRole(RoleDTO roleDto) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") + .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); + try { + return ok(json(rolesSvc.save(roleDto.asRole()))); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } + + @Path("roles") + @GET + public Response listBuiltinRoles() { + try { + return ok(rolesToJson(rolesSvc.findBuiltinRoles())); + } catch (Exception e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } @DELETE - @AuthRequired + @AuthRequired @Path("roles/{id}") public Response deleteRole(@Context ContainerRequestContext crc, @PathParam("id") String id) { @@ -1284,77 +1264,77 @@ public void write(OutputStream os) throws IOException, return Response.ok(stream).build(); } - @Path("assignments/assignees/{raIdtf: .*}") - @GET - public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { - - JsonArrayBuilder arr = Json.createArrayBuilder(); - roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); - - return ok(arr); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - * @return The confirm email token. - */ - @Path("confirmEmail/{userId}") - @GET - public Response getConfirmEmailToken(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); - if (confirmEmailData != null) { - return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); - } - } - return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - */ - @Path("confirmEmail/{userId}") - @POST - public Response startConfirmEmailProcess(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - try { - ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); - ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); - return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) - .add("identifier", user.getUserIdentifier())); - } catch (ConfirmEmailException ex) { - return error(Status.BAD_REQUEST, - "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); - } - } - return error(Status.BAD_REQUEST, "Could not find user based on " + userId); - } - - /** - * This method is used by an integration test in UsersIT.java to exercise bug - * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! - */ - @Path("convertUserFromBcryptToSha1") - @POST - public Response convertUserFromBcryptToSha1(String json) { - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); - BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); - builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means - // SHA-1 - BuiltinUser savedUser = builtinUserService.save(builtinUser); - return ok("foo: " + savedUser); - - } + @Path("assignments/assignees/{raIdtf: .*}") + @GET + public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { + + JsonArrayBuilder arr = Json.createArrayBuilder(); + roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); + + return ok(arr); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + * @return The confirm email token. + */ + @Path("confirmEmail/{userId}") + @GET + public Response getConfirmEmailToken(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); + if (confirmEmailData != null) { + return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); + } + } + return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + */ + @Path("confirmEmail/{userId}") + @POST + public Response startConfirmEmailProcess(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + try { + ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); + ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); + return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) + .add("identifier", user.getUserIdentifier())); + } catch (ConfirmEmailException ex) { + return error(Status.BAD_REQUEST, + "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); + } + } + return error(Status.BAD_REQUEST, "Could not find user based on " + userId); + } + + /** + * This method is used by an integration test in UsersIT.java to exercise bug + * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! + */ + @Path("convertUserFromBcryptToSha1") + @POST + public Response convertUserFromBcryptToSha1(String json) { + JsonReader jsonReader = Json.createReader(new StringReader(json)); + JsonObject object = jsonReader.readObject(); + jsonReader.close(); + BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); + builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means + // SHA-1 + BuiltinUser savedUser = builtinUserService.save(builtinUser); + return ok("foo: " + savedUser); + + } @Path("permissions/{dvo}") @AuthRequired @@ -1375,43 +1355,43 @@ public Response findPermissonsOn(@Context final ContainerRequestContext crc, @Pa } } - @Path("assignee/{idtf}") - @GET - public Response findRoleAssignee(@PathParam("idtf") String idtf) { - RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); - return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); - } - - @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") - @POST - public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, - @QueryParam("forceRecalculate") boolean forceRecalculate) { - JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); - return ok(info); - } - - @Path("datafiles/integrity/fixmissingoriginaltypes") - @GET - public Response fixMissingOriginalTypes() { - JsonObjectBuilder info = Json.createObjectBuilder(); - - List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); - - if (affectedFileIds.isEmpty()) { - info.add("message", - "All the tabular files in the database already have the original types set correctly; exiting."); - } else { - for (Long fileid : affectedFileIds) { - logger.fine("found file id: " + fileid); - } - info.add("message", "Found " + affectedFileIds.size() - + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); - } - - ingestService.fixMissingOriginalTypes(affectedFileIds); - - return ok(info); - } + @Path("assignee/{idtf}") + @GET + public Response findRoleAssignee(@PathParam("idtf") String idtf) { + RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); + return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); + } + + @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") + @POST + public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, + @QueryParam("forceRecalculate") boolean forceRecalculate) { + JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); + return ok(info); + } + + @Path("datafiles/integrity/fixmissingoriginaltypes") + @GET + public Response fixMissingOriginalTypes() { + JsonObjectBuilder info = Json.createObjectBuilder(); + + List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); + + if (affectedFileIds.isEmpty()) { + info.add("message", + "All the tabular files in the database already have the original types set correctly; exiting."); + } else { + for (Long fileid : affectedFileIds) { + logger.fine("found file id: " + fileid); + } + info.add("message", "Found " + affectedFileIds.size() + + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); + } + + ingestService.fixMissingOriginalTypes(affectedFileIds); + + return ok(info); + } @Path("datafiles/integrity/fixmissingoriginalsizes") @GET @@ -1441,60 +1421,60 @@ public Response fixMissingOriginalSizes(@QueryParam("limit") Integer limit) { return ok(info); } - /** - * This method is used in API tests, called from UtilIt.java. - */ - @GET - @Path("datasets/thumbnailMetadata/{id}") - public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { - Dataset dataset = datasetSvc.find(idSupplied); - if (dataset == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); - } - JsonObjectBuilder data = Json.createObjectBuilder(); - DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); - data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); - if (datasetThumbnail != null) { - data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); - DataFile dataFile = datasetThumbnail.getDataFile(); - if (dataFile != null) { - /** - * @todo Change this from a String to a long. - */ - data.add("dataFileId", dataFile.getId().toString()); - } - } - return ok(data); - } - - /** - * validatePassword - *

- * Validate a password with an API call - * - * @param password - * The password - * @return A response with the validation result. - */ - @Path("validatePassword") - @POST - public Response validatePassword(String password) { - - final List errors = passwordValidatorService.validate(password, new Date(), false); - final JsonArrayBuilder errorArray = Json.createArrayBuilder(); - errors.forEach(errorArray::add); - return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); - } - - @GET - @Path("/isOrcid") - public Response isOrcidEnabled() { - return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); - } + /** + * This method is used in API tests, called from UtilIt.java. + */ + @GET + @Path("datasets/thumbnailMetadata/{id}") + public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { + Dataset dataset = datasetSvc.find(idSupplied); + if (dataset == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); + } + JsonObjectBuilder data = Json.createObjectBuilder(); + DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); + data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); + if (datasetThumbnail != null) { + data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); + DataFile dataFile = datasetThumbnail.getDataFile(); + if (dataFile != null) { + /** + * @todo Change this from a String to a long. + */ + data.add("dataFileId", dataFile.getId().toString()); + } + } + return ok(data); + } + + /** + * validatePassword + *

+ * Validate a password with an API call + * + * @param password + * The password + * @return A response with the validation result. + */ + @Path("validatePassword") + @POST + public Response validatePassword(String password) { + + final List errors = passwordValidatorService.validate(password, new Date(), false); + final JsonArrayBuilder errorArray = Json.createArrayBuilder(); + errors.forEach(errorArray::add); + return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); + } + + @GET + @Path("/isOrcid") + public Response isOrcidEnabled() { + return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); + } @POST - @AuthRequired + @AuthRequired @Path("{id}/reregisterHDLToPID") public Response reregisterHdlToPID(@Context ContainerRequestContext crc, @PathParam("id") String id) { logger.info("Starting to reregister " + id + " Dataset Id. (from hdl to doi)" + new Date()); @@ -1825,7 +1805,7 @@ public Response updateHashValues(@Context ContainerRequestContext crc, @PathPara } @POST - @AuthRequired + @AuthRequired @Path("/computeDataFileHashValue/{fileId}/algorithm/{alg}") public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @PathParam("alg") String alg) { @@ -1887,7 +1867,7 @@ public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @ } @POST - @AuthRequired + @AuthRequired @Path("/validateDataFileHashValue/{fileId}") public Response validateDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId) { @@ -1954,7 +1934,7 @@ public Response validateDataFileHashValue(@Context ContainerRequestContext crc, } @POST - @AuthRequired + @AuthRequired @Path("/submitDatasetVersionToArchive/{id}/{version}") public Response submitDatasetVersionToArchive(@Context ContainerRequestContext crc, @PathParam("id") String dsid, @PathParam("version") String versionNumber) { @@ -2027,7 +2007,7 @@ public void run() { * @return */ @POST - @AuthRequired + @AuthRequired @Path("/archiveAllUnarchivedDatasetVersions") public Response archiveAllUnarchivedDatasetVersions(@Context ContainerRequestContext crc, @QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { @@ -2126,7 +2106,7 @@ public Response clearMetricsCacheByName(@PathParam("name") String name) { } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/addRoleAssignmentsToChildren") public Response addRoleAssignementsToChildren(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse owner = dataverseSvc.findByAlias(alias); @@ -2157,90 +2137,90 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(dataverse.getStorageDriverId()); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver + return ok(dataverse.getStorageDriverId()); } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response setStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias, String label) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { - if(store.getKey().equals(label)) { - dataverse.setStorageDriverId(store.getValue()); - return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); - } - } - return error(Response.Status.BAD_REQUEST, - "No Storage Driver found for : " + label); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { + if(store.getKey().equals(label)) { + dataverse.setStorageDriverId(store.getValue()); + return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); + } + } + return error(Response.Status.BAD_REQUEST, + "No Storage Driver found for : " + label); } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response resetStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - dataverse.setStorageDriverId(""); - return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + dataverse.setStorageDriverId(""); + return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/storageDrivers") public Response listStorageDrivers(@Context ContainerRequestContext crc) throws WrappedResponse { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder bld = jsonObjectBuilder(); - DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); - return ok(bld); + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder bld = jsonObjectBuilder(); + DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); + return ok(bld); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2262,7 +2242,7 @@ public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias, @QueryParam("name") String name) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2293,7 +2273,7 @@ public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2313,7 +2293,7 @@ public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @Pat } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/curationLabelSets") public Response listCurationLabelSets(@Context ContainerRequestContext crc) throws WrappedResponse { try { @@ -2423,7 +2403,7 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } @POST - @AuthRequired + @AuthRequired @Consumes("application/json") @Path("/requestSignedUrl") public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject urlInfo) { @@ -2541,4 +2521,160 @@ public Response getFeatureFlag(@PathParam("flag") String flagIn) { } } + @GET + @AuthRequired + @Path("/datafiles/auditFiles") + public Response getAuditFiles(@Context ContainerRequestContext crc, + @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId, + @QueryParam("datasetIdentifierList") String datasetIdentifierList) throws WrappedResponse { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + int datasetsChecked = 0; + long startId = (firstId == null ? 0 : firstId); + long endId = (lastId == null ? Long.MAX_VALUE : lastId); + + List datasetIdentifiers; + if (datasetIdentifierList == null || datasetIdentifierList.isEmpty()) { + datasetIdentifiers = Collections.emptyList(); + } else { + startId = 0; + endId = Long.MAX_VALUE; + datasetIdentifiers = List.of(datasetIdentifierList.split(",")); + } + if (endId < startId) { + return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId"); + } + + NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder(); + JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); + JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); + + if (startId > 0) { + jsonObjectBuilder.add("firstId", startId); + } + if (endId < Long.MAX_VALUE) { + jsonObjectBuilder.add("lastId", endId); + } + + // compile the list of ids to process + List datasetIds; + if (datasetIdentifiers.isEmpty()) { + datasetIds = datasetService.findAllLocalDatasetIds(); + } else { + datasetIds = new ArrayList<>(datasetIdentifiers.size()); + JsonArrayBuilder jab = Json.createArrayBuilder(); + datasetIdentifiers.forEach(id -> { + String dId = id.trim(); + jab.add(dId); + Dataset d = datasetService.findByGlobalId(dId); + if (d != null) { + datasetIds.add(d.getId()); + } else { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetIdentifier",dId); + job.add("reason","Not Found"); + jsonFailuresArrayBuilder.add(job); + } + }); + jsonObjectBuilder.add("datasetIdentifierList", jab); + } + + for (Long datasetId : datasetIds) { + if (datasetId < startId) { + continue; + } else if (datasetId > endId) { + break; + } + Dataset dataset; + try { + dataset = findDatasetOrDie(String.valueOf(datasetId)); + datasetsChecked++; + } catch (WrappedResponse e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + continue; + } + + List missingFiles = new ArrayList<>(); + List missingFileMetadata = new ArrayList<>(); + try { + Predicate filter = s -> true; + StorageIO datasetIO = DataAccess.getStorageIO(dataset); + final List result = datasetIO.cleanUp(filter, true); + // add files that are in dataset files but not in cleanup result or DataFiles with missing FileMetadata + dataset.getFiles().forEach(df -> { + try { + StorageIO datafileIO = df.getStorageIO(); + String storageId = df.getStorageIdentifier(); + FileMetadata fm = df.getFileMetadata(); + if (!datafileIO.exists()) { + missingFiles.add(storageId + "," + (fm != null ? + (fm.getDirectoryLabel() != null || !fm.getDirectoryLabel().isEmpty() ? "directoryLabel,"+fm.getDirectoryLabel()+"," : "") + +"label,"+fm.getLabel() : "type,"+df.getContentType())); + } + if (fm == null) { + missingFileMetadata.add(storageId + ",dataFileId," + df.getId()); + } + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("dataFileId", df.getId()); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + }); + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + + JsonObjectBuilder job = Json.createObjectBuilder(); + if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { + job.add("id", dataset.getId()); + job.add("pid", dataset.getProtocol() + ":" + dataset.getAuthority() + "/" + dataset.getIdentifier()); + job.add("persistentURL", dataset.getPersistentURL()); + if (!missingFileMetadata.isEmpty()) { + JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); + missingFileMetadata.forEach(mm -> { + String[] missingMetadata = mm.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingMetadata[0]) + .add(missingMetadata[1], missingMetadata[2]); + jabMissingFileMetadata.add(jobj); + }); + job.add("missingFileMetadata", jabMissingFileMetadata); + } + if (!missingFiles.isEmpty()) { + JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder(); + missingFiles.forEach(mf -> { + String[] missingFile = mf.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingFile[0]); + for (int i = 2; i < missingFile.length; i+=2) { + jobj.add(missingFile[i-1], missingFile[i]); + } + jabMissingFiles.add(jobj); + }); + job.add("missingFiles", jabMissingFiles); + } + jsonDatasetsArrayBuilder.add(job); + } + } + + jsonObjectBuilder.add("datasetsChecked", datasetsChecked); + jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder); + jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); + + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 01c51dc2b4c..907295ad848 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -126,7 +126,7 @@ public Response getByName(@PathParam("name") String name) { String solrFieldSearchable = dsf.getSolrField().getNameSearchable(); String solrFieldFacetable = dsf.getSolrField().getNameFacetable(); String metadataBlock = dsf.getMetadataBlock().getName(); - String uri=dsf.getUri(); + String uri = dsf.getUri(); boolean hasParent = dsf.isHasParent(); boolean allowsMultiples = dsf.isAllowMultiples(); boolean isRequired = dsf.isRequired(); @@ -243,7 +243,9 @@ public Response loadDatasetFields(File file) { br = new BufferedReader(new FileReader("/" + file)); while ((line = br.readLine()) != null) { lineNumber++; - values = line.split(splitBy); + values = Arrays.stream(line.split(splitBy)) + .map(String::trim) + .toArray(String[]::new); if (values[0].startsWith("#")) { // Header row switch (values[0]) { case "#metadataBlock": @@ -326,7 +328,7 @@ public Response loadDatasetFields(File file) { */ public String getGeneralErrorMessage(HeaderType header, int lineNumber, String message) { List arguments = new ArrayList<>(); - arguments.add(header.name()); + arguments.add(header != null ? header.name() : "unknown"); arguments.add(String.valueOf(lineNumber)); arguments.add(message); return BundleUtil.getStringFromBundle("api.admin.datasetfield.load.GeneralErrorMessage", arguments); @@ -334,9 +336,9 @@ public String getGeneralErrorMessage(HeaderType header, int lineNumber, String m /** * Turn ArrayIndexOutOfBoundsException into an informative error message - * @param lineNumber * @param header - * @param e + * @param lineNumber + * @param wrongIndex * @return */ public String getArrayIndexOutOfBoundMessage(HeaderType header, diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 369a22fe8d7..2ec10816acc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2171,8 +2171,32 @@ public Response getAssignments(@Context ContainerRequestContext crc, @PathParam( @GET @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return getPreviewUrlData(crc, idSupplied); + } + + @POST + @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") + @Path("{id}/privateUrl") + public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @DefaultValue("false") @QueryParam("anonymizedAccess") boolean anonymizedAccess) { + return createPreviewUrl(crc, idSupplied, anonymizedAccess); + } + + @DELETE + @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") + @Path("{id}/privateUrl") + public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return deletePreviewUrl(crc, idSupplied); + } + + @GET + @AuthRequired + @Path("{id}/previewUrl") + public Response getPreviewUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); return (privateUrl != null) ? ok(json(privateUrl)) @@ -2182,8 +2206,8 @@ public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathPar @POST @AuthRequired - @Path("{id}/privateUrl") - public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { + @Path("{id}/previewUrl") + public Response createPreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) { throw new NotAcceptableException("Anonymized Access not enabled"); } @@ -2194,8 +2218,8 @@ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathPara @DELETE @AuthRequired - @Path("{id}/privateUrl") - public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + @Path("{id}/previewUrl") + public Response deletePreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { Dataset dataset = findDatasetOrDie(idSupplied); PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); @@ -2208,6 +2232,7 @@ public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathPara }, getRequestUser(crc)); } + @GET @AuthRequired @Path("{id}/thumbnail/candidates") @@ -2992,6 +3017,26 @@ public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("i } + @GET + @AuthRequired + @Path("{id}/versions/{versionId1}/compare/{versionId2}") + public Response getCompareVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, + @PathParam("versionId1") String versionId1, + @PathParam("versionId2") String versionId2, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + try { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers); + DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers); + if (dsv1.getCreateTime().getTime() > dsv2.getCreateTime().getTime()) { + return error(BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order")); + } + return ok(DatasetVersion.compareVersions(dsv1, dsv2)); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + private static Set getDatasetFilenames(Dataset dataset) { Set files = new HashSet<>(); for (DataFile dataFile: dataset.getFiles()) { @@ -4833,6 +4878,33 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String } return ok(responseJson); } + + @GET + @Path("previewUrlDatasetVersion/{previewUrlToken}") + public Response getPreviewUrlDatasetVersion(@PathParam("previewUrlToken") String previewUrlToken, @QueryParam("returnOwners") boolean returnOwners) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } + boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess(); + String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + if(isAnonymizedAccess && anonymizedFieldTypeNames == null) { + throw new NotAcceptableException("Anonymized Access not enabled"); + } + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken); + if (dsv == null || dsv.getId() == null) { + return notFound("Dataset version not found"); + } + JsonObjectBuilder responseJson; + if (isAnonymizedAccess) { + List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); + responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners); + } else { + responseJson = json(dsv, null, true, returnOwners); + } + return ok(responseJson); + } + @GET @Path("privateUrlDatasetVersion/{privateUrlToken}/citation") @@ -4845,6 +4917,18 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken" return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess())); } + + @GET + @Path("previewUrlDatasetVersion/{previewUrlToken}/citation") + public Response getPreviewUrlDatasetVersionCitation(@PathParam("previewUrlToken") String previewUrlToken) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken); + return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") + : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess())); + } @GET @AuthRequired diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f05bba8830e..f864a5a9d1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -195,7 +195,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List facets = parseFacets(body); AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO)); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true)); return ok(json(dataverse)); } catch (WrappedResponse ww) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index 257519677d3..2439c996816 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -12,12 +12,17 @@ import jakarta.ws.rs.Produces; import org.apache.commons.io.IOUtils; +import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import io.gdcc.spi.export.Exporter; +import io.gdcc.spi.export.ExportException; +import io.gdcc.spi.export.XMLExporter; import jakarta.ejb.EJB; import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; @@ -92,6 +97,32 @@ public Response getZipDownloadLimit() { return ok(zipDownloadLimit); } + @GET + @Path("exportFormats") + public Response getExportFormats() { + JsonObjectBuilder responseModel = Json.createObjectBuilder(); + ExportService instance = ExportService.getInstance(); + for (String[] labels : instance.getExportersLabels()) { + try { + Exporter exporter = instance.getExporter(labels[1]); + JsonObjectBuilder exporterObject = Json.createObjectBuilder().add("displayName", labels[0]) + .add("mediaType", exporter.getMediaType()).add("isHarvestable", exporter.isHarvestable()) + .add("isVisibleInUserInterface", exporter.isAvailableToUsers()); + if (exporter instanceof XMLExporter xmlExporter) { + exporterObject.add("XMLNameSpace", xmlExporter.getXMLNameSpace()) + .add("XMLSchemaLocation", xmlExporter.getXMLSchemaLocation()) + .add("XMLSchemaVersion", xmlExporter.getXMLSchemaVersion()); + } + responseModel.add(labels[1], exporterObject); + } + catch (ExportException ex){ + logger.warning("Failed to get: " + labels[1]); + logger.warning(ex.getLocalizedMessage()); + } + } + return ok(responseModel); + } + private Response getSettingResponseByKey(SettingsServiceBean.Key key) { String setting = settingsService.getValueForKey(key); if (setting != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java index 33a11a2df23..e6519c9ff36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java @@ -181,7 +181,7 @@ public Response delete(@PathParam("id") long doomedId, @QueryParam("unlink") boo try { wasDeleted = savedSearchSvc.delete(doomedId, unlink); } catch (Exception e) { - return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId); + return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId + ". Exception: " + e.getLocalizedMessage()); } if (wasDeleted) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index 6b9fcb38305..f86f9f446fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -175,7 +175,7 @@ public Response search( JsonArrayBuilder itemsArrayBuilder = Json.createArrayBuilder(); List solrSearchResults = solrQueryResponse.getSolrSearchResults(); for (SolrSearchResult solrSearchResult : solrSearchResults) { - itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields, getDatasetFileCount(solrSearchResult))); + itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields)); } JsonObjectBuilder spelling_alternatives = Json.createObjectBuilder(); @@ -229,15 +229,6 @@ public Response search( } } - private Long getDatasetFileCount(SolrSearchResult solrSearchResult) { - DvObject dvObject = solrSearchResult.getEntity(); - if (dvObject.isInstanceofDataset()) { - DatasetVersion datasetVersion = ((Dataset) dvObject).getVersionFromId(solrSearchResult.getDatasetVersionId()); - return datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion); - } - return null; - } - private User getUser(ContainerRequestContext crc) throws WrappedResponse { User userToExecuteSearchAs = GuestUser.get(); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 85d4868605d..35d35316f73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -210,7 +210,7 @@ private void processDDI(ImportType importType, XMLStreamReader xmlr, DatasetDTO // study description section. we'll use the one we found in // the codeBook entry: FieldDTO otherIdValue = FieldDTO.createPrimitiveFieldDTO("otherIdValue", codeBookLevelId); - FieldDTO otherId = FieldDTO.createCompoundFieldDTO("otherId", otherIdValue); + FieldDTO otherId = FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIdValue); citationBlock.getFields().add(otherId); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index d32a548c8bf..aa5b25e3967 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -150,12 +150,16 @@ public DatasetDTO processXML( XMLStreamReader xmlr, ForeignMetadataFormatMapping } - // Helper method for importing harvested Dublin Core xml. + // Helper methods for importing harvested Dublin Core xml. // Dublin Core is considered a mandatory, built in metadata format mapping. // It is distributed as required content, in reference_data.sql. // Note that arbitrary formatting tags are supported for the outer xml // wrapper. -- L.A. 4.5 public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException { + return processOAIDCxml(DcXmlToParse, null, false); + } + + public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier, boolean preferSuppliedIdentifier) throws XMLStreamException { // look up DC metadata mapping: ForeignMetadataFormatMapping dublinCoreMapping = findFormatMappingByName(DCTERMS); @@ -185,18 +189,37 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException datasetDTO.getDatasetVersion().setVersionState(DatasetVersion.VersionState.RELEASED); - // Our DC import handles the contents of the dc:identifier field - // as an "other id". In the context of OAI harvesting, we expect - // the identifier to be a global id, so we need to rearrange that: + // In some cases, the identifier that we want to use for the dataset is + // already supplied to the method explicitly. For example, in some + // harvesting cases we'll want to use the OAI identifier (the identifier + // from the

section of the OAI record) for that purpose, without + // expecting to find a valid persistent id in the body of the DC record: - String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion()); - logger.fine("Imported identifier: "+identifier); + String globalIdentifier; - String globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); - logger.fine("Detected global identifier: "+globalIdentifier); + if (oaiIdentifier != null) { + logger.fine("Attempting to use " + oaiIdentifier + " as the persistentId of the imported dataset"); + + globalIdentifier = reassignIdentifierAsGlobalId(oaiIdentifier, datasetDTO); + } else { + // Our DC import handles the contents of the dc:identifier field + // as an "other id". Unless we are using an externally supplied + // global id, we will be using the first such "other id" that we + // can parse and recognize as the global id for the imported dataset + // (note that this is the default behavior during harvesting), + // so we need to reaassign it accordingly: + String identifier = selectIdentifier(datasetDTO.getDatasetVersion(), oaiIdentifier, preferSuppliedIdentifier); + logger.fine("Imported identifier: " + identifier); + + globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); + logger.fine("Detected global identifier: " + globalIdentifier); + } if (globalIdentifier == null) { - throw new EJBException("Failed to find a global identifier in the OAI_DC XML record."); + String exceptionMsg = oaiIdentifier == null ? + "Failed to find a global identifier in the OAI_DC XML record." : + "Failed to parse the supplied identifier as a valid Persistent Id"; + throw new EJBException(exceptionMsg); } return datasetDTO; @@ -205,8 +228,17 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException private void processXMLElement(XMLStreamReader xmlr, String currentPath, String openingTag, ForeignMetadataFormatMapping foreignFormatMapping, DatasetDTO datasetDTO) throws XMLStreamException { logger.fine("entering processXMLElement; ("+currentPath+")"); - - for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { + + while (xmlr.hasNext()) { + + int event; + try { + event = xmlr.next(); + } catch (XMLStreamException ex) { + logger.warning("Error occurred in the XML parsing : " + ex.getMessage()); + continue; // Skip Undeclared namespace prefix and Unexpected close tag related to com.ctc.wstx.exc.WstxParsingException + } + if (event == XMLStreamConstants.START_ELEMENT) { String currentElement = xmlr.getLocalName(); @@ -335,8 +367,20 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St return value; } - private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { + public String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier) { + return selectIdentifier(datasetVersionDTO, suppliedIdentifier, false); + } + + private String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier, boolean preferSuppliedIdentifier) { List otherIds = new ArrayList<>(); + + if (suppliedIdentifier != null && preferSuppliedIdentifier) { + // This supplied identifier (in practice, his is likely the OAI-PMH + // identifier from the
section) will be our first + // choice candidate for the pid of the imported dataset: + otherIds.add(suppliedIdentifier); + } + for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); @@ -354,6 +398,16 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { } } } + + if (suppliedIdentifier != null && !preferSuppliedIdentifier) { + // Unless specifically instructed to prefer this extra identifier + // (in practice, this is likely the OAI-PMH identifier from the + //
section), we will try to use it as the *last* + // possible candidate for the pid, so, adding it to the end of the + // list: + otherIds.add(suppliedIdentifier); + } + if (!otherIds.isEmpty()) { // We prefer doi or hdl identifiers like "doi:10.7910/DVN/1HE30F" for (String otherId : otherIds) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index b203738a9fd..7dc2aed799e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -208,7 +208,13 @@ public JsonObjectBuilder handleFile(DataverseRequest dataverseRequest, Dataverse } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, Date oaiDateStamp, PrintWriter cleanupLog) throws ImportException, IOException { + public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, + HarvestingClient harvestingClient, + String harvestIdentifier, + String metadataFormat, + File metadataFile, + Date oaiDateStamp, + PrintWriter cleanupLog) throws ImportException, IOException { if (harvestingClient == null || harvestingClient.getDataverse() == null) { throw new ImportException("importHarvestedDataset called with a null harvestingClient, or an invalid harvestingClient."); } @@ -244,8 +250,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) { logger.fine("importing DC "+metadataFile.getAbsolutePath()); try { - String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); - dsDTO = importGenericService.processOAIDCxml(xmlToParse); + String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); + dsDTO = importGenericService.processOAIDCxml(xmlToParse, harvestIdentifier, harvestingClient.isUseOaiIdentifiersAsPids()); } catch (IOException | XMLStreamException e) { throw new ImportException("Failed to process Dublin Core XML record: "+ e.getClass() + " (" + e.getMessage() + ")"); } @@ -313,7 +319,7 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve // Creating a new dataset from scratch: harvestedDataset = parser.parseDataset(obj); - + harvestedDataset.setHarvestedFrom(harvestingClient); harvestedDataset.setHarvestIdentifier(harvestIdentifier); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 3bf2107e52b..d0da66c38e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -215,7 +215,7 @@ public long retrieveSizeFromMedia() { JsonArray dataArray = responseJson.getJsonArray("DATA"); if (dataArray != null && dataArray.size() != 0) { //File found - return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getJsonNumber("size").longValueExact(); + return (long) dataArray.getJsonObject(0).getJsonNumber("size").longValueExact(); } } else { logger.warning("Response from " + get.getURI().toString() + " was " diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index d2fdec7b323..5b9e496281f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -753,6 +753,12 @@ public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { @Override public boolean exists() { + try { + key = getMainFileKey(); + } catch (IOException e) { + logger.warning("Caught an IOException in S3AccessIO.exists(): " + e.getMessage()); + return false; + } String destinationKey = null; if (dvObject instanceof DataFile) { destinationKey = key; diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index a470f08f736..6b98848021c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -136,8 +136,6 @@ public class AddReplaceFileHelper{ private String newFileName; // step 30 private String newFileContentType; // step 30 private String newStorageIdentifier; // step 30 - private String newCheckSum; // step 30 - private ChecksumType newCheckSumType; //step 30 // -- Optional private DataFile fileToReplace; // step 25 @@ -146,6 +144,7 @@ public class AddReplaceFileHelper{ private DatasetVersion clone; List initialFileList; List finalFileList; + private boolean trustSuppliedFileSizes; // ----------------------------------- // Ingested files @@ -610,15 +609,9 @@ private boolean runAddReplacePhase1(Dataset owner, return false; } - if(optionalFileParams != null) { - if(optionalFileParams.hasCheckSum()) { - newCheckSum = optionalFileParams.getCheckSum(); - newCheckSumType = optionalFileParams.getCheckSumType(); - } - } msgt("step_030_createNewFilesViaIngest"); - if (!this.step_030_createNewFilesViaIngest()){ + if (!this.step_030_createNewFilesViaIngest(optionalFileParams)){ return false; } @@ -1191,7 +1184,7 @@ private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile } - private boolean step_030_createNewFilesViaIngest(){ + private boolean step_030_createNewFilesViaIngest(OptionalFileParams optionalFileParams){ if (this.hasError()){ return false; @@ -1203,21 +1196,28 @@ private boolean step_030_createNewFilesViaIngest(){ //Don't repeatedly update the clone (losing changes) in multifile case clone = workingVersion.cloneDatasetVersion(); } + + Long suppliedFileSize = null; + String newCheckSum = null; + ChecksumType newCheckSumType = null; + + + if (optionalFileParams != null) { + if (optionalFileParams.hasCheckSum()) { + newCheckSum = optionalFileParams.getCheckSum(); + newCheckSumType = optionalFileParams.getCheckSumType(); + } + if (trustSuppliedFileSizes && optionalFileParams.hasFileSize()) { + suppliedFileSize = optionalFileParams.getFileSize(); + } + } + try { - /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, - this.newFileInputStream, - this.newFileName, - this.newFileContentType, - this.newStorageIdentifier, - this.newCheckSum, - this.newCheckSumType, - this.systemConfig);*/ - UploadSessionQuotaLimit quota = null; if (systemConfig.isStorageQuotasEnforced()) { quota = fileService.getUploadSessionQuotaLimit(dataset); } - Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType); + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, suppliedFileSize); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); initialFileList = createDataFilesResult.getDataFiles(); @@ -2033,9 +2033,15 @@ public void setDuplicateFileWarning(String duplicateFileWarning) { * @param jsonData - an array of jsonData entries (one per file) using the single add file jsonData format * @param dataset * @param authUser + * @param trustSuppliedSizes - whether to accept the fileSize values passed + * in jsonData (we don't want to trust the users of the S3 direct + * upload API with that information - we will verify the status of + * the files in the S3 bucket and confirm the sizes in the process. + * we do want GlobusService to be able to pass the file sizes, since + * they are obtained and verified via a Globus API lookup). * @return */ - public Response addFiles(String jsonData, Dataset dataset, User authUser) { + public Response addFiles(String jsonData, Dataset dataset, User authUser, boolean trustSuppliedFileSizes) { msgt("(addFilesToDataset) jsonData: " + jsonData.toString()); JsonArrayBuilder jarr = Json.createArrayBuilder(); @@ -2044,6 +2050,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { int totalNumberofFiles = 0; int successNumberofFiles = 0; + this.trustSuppliedFileSizes = trustSuppliedFileSizes; // ----------------------------------------------------------- // Read jsonData and Parse files information from jsondata : // ----------------------------------------------------------- @@ -2176,6 +2183,10 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { .add("data", Json.createObjectBuilder().add("Files", jarr).add("Result", result)).build() ).build(); } + public Response addFiles(String jsonData, Dataset dataset, User authUser) { + return addFiles(jsonData, dataset, authUser, false); + } + /** * Replace multiple files with prepositioned replacements as listed in the * jsonData. Works with direct upload, Globus, and other out-of-band methods. diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 959dbc4e262..54844160163 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -39,6 +39,12 @@ * - Provenance related information * * @author rmp553 + * @todo (?) We may want to consider renaming this class to DataFileParams or + * DataFileInfo... it was originally created to encode some bits of info - + * the file "tags" specifically, that didn't fit in elsewhere in the normal + * workflow; but it's been expanded to cover pretty much everything else associated + * with DataFiles and it's not really "optional" anymore when, for example, used + * in the direct upload workflow. (?) */ public class OptionalFileParams { @@ -76,6 +82,8 @@ public class OptionalFileParams { public static final String MIME_TYPE_ATTR_NAME = "mimeType"; private String checkSumValue; private ChecksumType checkSumType; + public static final String FILE_SIZE_ATTR_NAME = "fileSize"; + private Long fileSize; public static final String LEGACY_CHECKSUM_ATTR_NAME = "md5Hash"; public static final String CHECKSUM_OBJECT_NAME = "checksum"; public static final String CHECKSUM_OBJECT_TYPE = "@type"; @@ -268,6 +276,18 @@ public String getCheckSum() { public ChecksumType getCheckSumType() { return checkSumType; } + + public boolean hasFileSize() { + return fileSize != null; + } + + public Long getFileSize() { + return fileSize; + } + + public void setFileSize(long fileSize) { + this.fileSize = fileSize; + } /** * Set tags @@ -416,7 +436,13 @@ else if ((jsonObj.has(CHECKSUM_OBJECT_NAME)) && (!jsonObj.get(CHECKSUM_OBJECT_NA this.checkSumType = ChecksumType.fromString(((JsonObject) jsonObj.get(CHECKSUM_OBJECT_NAME)).get(CHECKSUM_OBJECT_TYPE).getAsString()); } - + // ------------------------------- + // get file size as a Long, if supplied + // ------------------------------- + if ((jsonObj.has(FILE_SIZE_ATTR_NAME)) && (!jsonObj.get(FILE_SIZE_ATTR_NAME).isJsonNull())){ + + this.fileSize = jsonObj.get(FILE_SIZE_ATTR_NAME).getAsLong(); + } // ------------------------------- // get tags // ------------------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 40c2abf5d21..91f3a5b823c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -19,13 +19,15 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand private final List inputLevels; private final List facets; protected final List metadataBlocks; + private final boolean resetRelationsOnNullValues; public AbstractWriteDataverseCommand(Dataverse dataverse, Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, - List metadataBlocks) { + List metadataBlocks, + boolean resetRelationsOnNullValues) { super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { @@ -43,42 +45,61 @@ public AbstractWriteDataverseCommand(Dataverse dataverse, } else { this.metadataBlocks = null; } + this.resetRelationsOnNullValues = resetRelationsOnNullValues; } @Override public Dataverse execute(CommandContext ctxt) throws CommandException { dataverse = innerExecute(ctxt); + processMetadataBlocks(); + processFacets(ctxt); + processInputLevels(ctxt); + + return ctxt.dataverses().save(dataverse); + } + + private void processMetadataBlocks() { if (metadataBlocks != null && !metadataBlocks.isEmpty()) { dataverse.setMetadataBlockRoot(true); dataverse.setMetadataBlocks(metadataBlocks); + } else if (resetRelationsOnNullValues) { + dataverse.setMetadataBlockRoot(false); + dataverse.clearMetadataBlocks(); } + } + private void processFacets(CommandContext ctxt) { if (facets != null) { ctxt.facets().deleteFacetsFor(dataverse); - + dataverse.setDataverseFacets(new ArrayList<>()); + if (!facets.isEmpty()) { dataverse.setFacetRoot(true); } - int i = 0; - for (DatasetFieldType df : facets) { - ctxt.facets().create(i++, df, dataverse); + for (int i = 0; i < facets.size(); i++) { + ctxt.facets().create(i, facets.get(i), dataverse); } + } else if (resetRelationsOnNullValues) { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setFacetRoot(false); } + } + private void processInputLevels(CommandContext ctxt) { if (inputLevels != null) { if (!inputLevels.isEmpty()) { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); } ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); - for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevels.forEach(inputLevel -> { inputLevel.setDataverse(dataverse); ctxt.fieldTypeInputLevels().create(inputLevel); - } + }); + } else if (resetRelationsOnNullValues) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); } - - return ctxt.dataverses().save(dataverse); } abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 145cfb6199c..3728f3ee6ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 76939751899..e9a2025b112 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -93,6 +93,10 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, null, null); } + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType, Long newFileSize) { + this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, newFileSize, null); + } + // This version of the command must be used when files are created in the // context of creating a brand new dataset (from the Add Dataset page): diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index e6e8279a314..e378e2e2ef7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -131,7 +131,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasetversion.update.failure"), this); } else { - metadataUpdated = DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd); + metadataUpdated = !DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd).isEmpty(); publishedFmd.setLabel(draftFmd.getLabel()); publishedFmd.setDescription(draftFmd.getDescription()); publishedFmd.setCategories(draftFmd.getCategories()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 1ac41105237..902bea7f833 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -11,6 +11,9 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; + +import jakarta.persistence.OptimisticLockException; + import java.util.Optional; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; @@ -105,10 +108,15 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException Optional prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset); if ( prePubWf.isPresent() ) { // We start a workflow - theDataset = ctxt.em().merge(theDataset); - ctxt.em().flush(); - ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); - return new PublishDatasetResult(theDataset, Status.Workflow); + try { + theDataset = ctxt.em().merge(theDataset); + ctxt.em().flush(); + ctxt.workflows().start(prePubWf.get(), + buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); + return new PublishDatasetResult(theDataset, Status.Workflow); + } catch (OptimisticLockException e) { + throw new CommandException(e.getMessage(), e, this); + } } else{ // We will skip trying to register the global identifiers for datafiles @@ -157,7 +165,12 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException lock.setInfo(info); ctxt.datasets().addDatasetLock(theDataset, lock); } - theDataset = ctxt.em().merge(theDataset); + try { + theDataset = ctxt.em().merge(theDataset); + } catch (OptimisticLockException e) { + ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.finalizePublication); + throw new CommandException(e.getMessage(), e, this); + } // The call to FinalizePublicationCommand has been moved to the new @onSuccess() // method: //ctxt.datasets().callFinalizePublishCommandAsynchronously(theDataset.getId(), ctxt, request, datasetExternallyReleased); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 55cc3708097..6dc4ab4d00d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -32,7 +32,7 @@ public UpdateDataverseCommand(Dataverse dataverse, List featuredDataverses, DataverseRequest request, List inputLevels) { - this(dataverse, facets, featuredDataverses, request, inputLevels, null, null); + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false); } public UpdateDataverseCommand(Dataverse dataverse, @@ -41,8 +41,9 @@ public UpdateDataverseCommand(Dataverse dataverse, DataverseRequest request, List inputLevels, List metadataBlocks, - DataverseDTO updatedDataverseDTO) { - super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks); + DataverseDTO updatedDataverseDTO, + boolean resetRelationsOnNullValues) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues); if (featuredDataverses != null) { this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java index edd01ae98a3..d76020cb8d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java @@ -98,8 +98,10 @@ public class DDIExportServiceBean { public static final String LEVEL_FILE = "file"; public static final String NOTE_TYPE_UNF = "VDC:UNF"; public static final String NOTE_TYPE_TAG = "DATAVERSE:TAG"; + public static final String NOTE_TYPE_FILEDESCRIPTION = "DATAVERSE:FILEDESC"; public static final String NOTE_SUBJECT_UNF = "Universal Numeric Fingerprint"; public static final String NOTE_SUBJECT_TAG = "Data File Tag"; + public static final String NOTE_SUBJECT_FILEDESCRIPTION = "DataFile Description"; /* * Internal service objects: @@ -742,11 +744,6 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // fileName } - /* - xmlw.writeStartElement("fileCont"); - xmlw.writeCharacters( df.getContentType() ); - xmlw.writeEndElement(); // fileCont - */ // dimensions if (checkField("dimensns", excludedFieldSet, includedFieldSet)) { if (dt.getCaseQuantity() != null || dt.getVarQuantity() != null || dt.getRecordsPerCase() != null) { @@ -801,26 +798,6 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // notes } - /* - xmlw.writeStartElement("notes"); - writeAttribute( xmlw, "type", "vdc:category" ); - xmlw.writeCharacters( fm.getCategory() ); - xmlw.writeEndElement(); // notes - */ - // A special note for LOCKSS crawlers indicating the restricted - // status of the file: - - /* - if (tdf != null && isRestrictedFile(tdf)) { - xmlw.writeStartElement("notes"); - writeAttribute( xmlw, "type", NOTE_TYPE_LOCKSS_CRAWL ); - writeAttribute( xmlw, "level", LEVEL_FILE ); - writeAttribute( xmlw, "subject", NOTE_SUBJECT_LOCKSS_PERM ); - xmlw.writeCharacters( "restricted" ); - xmlw.writeEndElement(); // notes - - } - */ if (checkField("tags", excludedFieldSet, includedFieldSet) && df.getTags() != null) { for (int i = 0; i < df.getTags().size(); i++) { xmlw.writeStartElement("notes"); @@ -831,6 +808,17 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // notes } } + + // A dedicated node for the Description entry + if (!StringUtilisEmpty(fm.getDescription())) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("level", LEVEL_FILE); + xmlw.writeAttribute("type", NOTE_TYPE_FILEDESCRIPTION); + xmlw.writeAttribute("subject", NOTE_SUBJECT_FILEDESCRIPTION); + xmlw.writeCharacters(fm.getDescription()); + xmlw.writeEndElement(); // notes + } + xmlw.writeEndElement(); // fileDscr } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index f5efc448090..05ddbe83e78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -14,8 +14,10 @@ import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_UNF; +import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_FILEDESCRIPTION; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF; +import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_FILEDESCRIPTION; import edu.harvard.iq.dataverse.export.DDIExporter; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -1901,6 +1903,8 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) xmlw.writeEndElement(); // notes } + // If any tabular tags are present, each is formatted in a + // dedicated note: if (fileJson.containsKey("tabularTags")) { JsonArray tags = fileJson.getJsonArray("tabularTags"); for (int j = 0; j < tags.size(); j++) { @@ -1912,6 +1916,17 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) xmlw.writeEndElement(); // notes } } + + // Adding a dedicated node for the description entry (for + // non-tabular files we format it under the field) + if (fileJson.containsKey("description")) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("level", LEVEL_FILE); + xmlw.writeAttribute("type", NOTE_TYPE_FILEDESCRIPTION); + xmlw.writeAttribute("subject", NOTE_SUBJECT_FILEDESCRIPTION); + xmlw.writeCharacters(fileJson.getString("description")); + xmlw.writeEndElement(); // notes + } // TODO: add the remaining fileDscr elements! xmlw.writeEndElement(); // fileDscr diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index ac3c81622fc..58992805dc8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -74,6 +74,7 @@ import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import jakarta.json.JsonNumber; import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -284,6 +285,52 @@ private int makeDir(GlobusEndpoint endpoint, String dir) { return result.status; } + private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { + MakeRequestResponse result; + + try { + logger.fine("Attempting to look up the contents of the Globus folder "+dir); + URL url = new URL( + "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + + "/ls?path=" + dir); + result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null); + + switch (result.status) { + case 200: + logger.fine("Looked up directory " + dir + " successfully."); + break; + default: + logger.warning("Status " + result.status + " received when looking up dir " + dir); + logger.fine("Response: " + result.jsonResponse); + return null; + } + } catch (MalformedURLException ex) { + // Misconfiguration + logger.warning("Failed to list the contents of the directory "+ dir + " on endpoint " + endpoint.getId()); + return null; + } + + Map ret = new HashMap<>(); + + JsonObject listObject = JsonUtil.getJsonObject(result.jsonResponse); + JsonArray dataArray = listObject.getJsonArray("DATA"); + + if (dataArray != null && !dataArray.isEmpty()) { + for (int i = 0; i < dataArray.size(); i++) { + String dataType = dataArray.getJsonObject(i).getString("DATA_TYPE", null); + if (dataType != null && dataType.equals("file")) { + // is it safe to assume that any entry with a valid "DATA_TYPE": "file" + // will also have valid "name" and "size" entries? + String fileName = dataArray.getJsonObject(i).getString("name"); + long fileSize = dataArray.getJsonObject(i).getJsonNumber("size").longValueExact(); + ret.put(fileName, fileSize); + } + } + } + + return ret; + } + private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) { Gson gson = new GsonBuilder().create(); MakeRequestResponse result = null; @@ -938,9 +985,20 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); } + + Map fileSizeMap = null; + + if (filesJsonArray.size() >= systemConfig.getGlobusBatchLookupSize()) { + // Look up the sizes of all the files in the dataset folder, to avoid + // looking them up one by one later: + // @todo: we should only be doing this if this is a managed store, probably (?) + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath()); + } // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, myLogger); + JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); logger.fine("Size: " + newfilesJsonArray.size()); logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0))); @@ -964,20 +1022,26 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut if (newfileJsonObject != null) { logger.fine("List Size: " + newfileJsonObject.size()); // if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { - JsonPatch path = Json.createPatchBuilder() + JsonPatch patch = Json.createPatchBuilder() .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); - fileJsonObject = path.apply(fileJsonObject); - path = Json.createPatchBuilder() + fileJsonObject = patch.apply(fileJsonObject); + patch = Json.createPatchBuilder() .add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); - fileJsonObject = path.apply(fileJsonObject); + fileJsonObject = patch.apply(fileJsonObject); + // If we already know the size of this file on the Globus end, + // we'll pass it to /addFiles, to avoid looking up file sizes + // one by one: + if (fileSizeMap != null && fileSizeMap.get(fileId) != null) { + Long uploadedFileSize = fileSizeMap.get(fileId); + myLogger.info("Found size for file " + fileId + ": " + uploadedFileSize + " bytes"); + patch = Json.createPatchBuilder() + .add("/fileSize", Json.createValue(uploadedFileSize)).build(); + fileJsonObject = patch.apply(fileJsonObject); + } else { + logger.fine("No file size entry found for file "+fileId); + } addFilesJsonData.add(fileJsonObject); countSuccess++; - // } else { - // globusLogger.info(fileName - // + " will be skipped from adding to dataset by second API due to missing - // values "); - // countError++; - // } } else { myLogger.info(fileName + " will be skipped from adding to dataset in the final AddReplaceFileHelper.addFiles() call. "); @@ -1029,7 +1093,7 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut // The old code had 2 sec. of sleep, so ... Thread.sleep(2000); - Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser); + Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser, true); if (addFilesResponse == null) { logger.info("null response from addFiles call"); @@ -1211,7 +1275,7 @@ private GlobusTaskState globusStatusCheck(GlobusEndpoint endpoint, String taskId return task; } - public JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) + private JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) throws InterruptedException, ExecutionException, IOException { List> hashvalueCompletableFutures = inputList.stream() @@ -1230,7 +1294,7 @@ public JsonObject calculateMissingMetadataFields(List inputList, Logger }); JsonArrayBuilder filesObject = (JsonArrayBuilder) completableFuture.get(); - + JsonObject output = Json.createObjectBuilder().add("files", filesObject).build(); return output; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index 0667f5594ce..7280b6af129 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -252,8 +252,16 @@ public void setAllowHarvestingMissingCVV(boolean allowHarvestingMissingCVV) { this.allowHarvestingMissingCVV = allowHarvestingMissingCVV; } - // TODO: do we need "orphanRemoval=true"? -- L.A. 4.4 - // TODO: should it be @OrderBy("startTime")? -- L.A. 4.4 + private boolean useOaiIdAsPid; + + public boolean isUseOaiIdentifiersAsPids() { + return useOaiIdAsPid; + } + + public void setUseOaiIdentifiersAsPids(boolean useOaiIdAsPid) { + this.useOaiIdAsPid = useOaiIdAsPid; + } + @OneToMany(mappedBy="harvestingClient", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("id") private List harvestHistory; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index b42fd950528..71c498a4d0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -344,10 +344,20 @@ public List saveAndAddFilesToDataset(DatasetVersion version, try { StorageIO dataAccess = DataAccess.getStorageIO(dataFile); //Populate metadata - dataAccess.open(DataAccessOption.READ_ACCESS); - // (the .open() above makes a remote call to check if - // the file exists and obtains its size) - confirmedFileSize = dataAccess.getSize(); + + // There are direct upload sub-cases where the file size + // is already known at this point. For example, direct uploads + // to S3 that go through the jsf dataset page. Or the Globus + // uploads, where the file sizes are looked up in bulk on + // the completion of the remote upload task. + if (dataFile.getFilesize() >= 0) { + confirmedFileSize = dataFile.getFilesize(); + } else { + dataAccess.open(DataAccessOption.READ_ACCESS); + // (the .open() above makes a remote call to check if + // the file exists and obtains its size) + confirmedFileSize = dataAccess.getSize(); + } // For directly-uploaded files, we will perform the file size // limit and quota checks here. Perform them *again*, in @@ -362,13 +372,16 @@ public List saveAndAddFilesToDataset(DatasetVersion version, if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) { //set file size - logger.fine("Setting file size: " + confirmedFileSize); - dataFile.setFilesize(confirmedFileSize); + if (dataFile.getFilesize() < 0) { + logger.fine("Setting file size: " + confirmedFileSize); + dataFile.setFilesize(confirmedFileSize); + } if (dataAccess instanceof S3AccessIO) { ((S3AccessIO) dataAccess).removeTempTag(); } savedSuccess = true; + logger.info("directly uploaded file successfully saved. file size: "+dataFile.getFilesize()); } } catch (IOException ioex) { logger.warning("Failed to get file size, storage id, or failed to remove the temp tag on the saved S3 object" + dataFile.getStorageIdentifier() + " (" diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java index 9d61663d034..1f03d8a6cfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java @@ -59,6 +59,11 @@ * service. * As of now, it only does the registration updates, to accommodate * the modifyRegistration datasets API sub-command. + * + * Note that while Handles are nominally case sensitive, handle.net is + * configured to be case-insensitive and Dataverse makes case-insensitve + * database look-ups to find Handles (See #11003). That said, database + * entries are stored in the case matching the configuration of the provider. */ public class HandlePidProvider extends AbstractPidProvider { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java index 7b55292350f..2cc0d41ede7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java @@ -24,6 +24,9 @@ * overridable by a configurable parameter to support use of an external * resolver. * + * Note that while PermaLinks are nominally case sensitive, Dataverse makes + * case-insensitve database look-ups to find them (See #11003). That said, database + * entries are stored in the case matching the configuration of the provider. */ public class PermaLinkPidProvider extends AbstractPidProvider { diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java index beb676f60d1..63b5bf03ea7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java @@ -30,7 +30,7 @@ public class PrivateUrl { public PrivateUrl(RoleAssignment roleAssignment, Dataset dataset, String dataverseSiteUrl) { this.token = roleAssignment.getPrivateUrlToken(); - this.link = dataverseSiteUrl + "/privateurl.xhtml?token=" + token; + this.link = dataverseSiteUrl + "/previewurl.xhtml?token=" + token; this.dataset = dataset; this.roleAssignment = roleAssignment; } diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index 9af4bb6af9e..17c622be9e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -1,6 +1,10 @@ package edu.harvard.iq.dataverse.privateurl; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.PermissionsWrapper; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import java.io.Serializable; import java.util.logging.Logger; @@ -20,8 +24,14 @@ public class PrivateUrlPage implements Serializable { @EJB PrivateUrlServiceBean privateUrlService; + @EJB + DatasetServiceBean datasetServiceBean; @Inject DataverseSession session; + @Inject + PermissionsWrapper permissionsWrapper; + @Inject + DataverseRequestServiceBean dvRequestService; /** * The unique string used to look up a PrivateUrlUser and the associated @@ -34,7 +44,16 @@ public String init() { PrivateUrlRedirectData privateUrlRedirectData = privateUrlService.getPrivateUrlRedirectDataFromToken(token); String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true"; PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser(); - session.setUser(privateUrlUser); + boolean sessionUserCanViewUnpublishedDataset = false; + if (session.getUser().isAuthenticated()){ + Long datasetId = privateUrlUser.getDatasetId(); + Dataset dataset = datasetServiceBean.find(datasetId); + sessionUserCanViewUnpublishedDataset = permissionsWrapper.canViewUnpublishedDataset(dvRequestService.getDataverseRequest(), dataset); + } + if(!sessionUserCanViewUnpublishedDataset){ + //Only Reset if user cannot view this Draft Version + session.setUser(privateUrlUser); + } logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo); return draftDatasetPageToBeRedirectedTo; } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java index 6e939c1bb6d..1310e0eb199 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java @@ -1,19 +1,19 @@ /** - * Private URL for unpublished datasets. + * Preview URL for unpublished datasets. *

- * The Private URL feature has been implemented as a specialized role assignment + * The Preview (formerly Private) URL feature has been implemented as a specialized role assignment * with an associated token that permits read-only access to the metadata and * all files (regardless of if the files are restricted or not) of a draft * version of a dataset. *

- * As of this note, a second option - to create a Private URL that provides an + * As of this note, a second option - to create a Preview URL that provides an * anonymized view of the dataset has been added. This option works the same as * the original except that it hides author names in the citation block, hides * the values for an admin specified list of metadata fields, disables citation * downloads, and disables API access (except for file and file thumbnail * downloads which are used by the UI). *

- * The primary use case for a Private URL is for journal editors to send a link + * The primary use case for a Preview URL is for journal editors to send a link * to reviewers of a dataset before publication. In most cases, these journal * editors do not permit depositors to publish on their own, which is to say * they only allow depositors to have the "Contributor" role on the datasets @@ -24,42 +24,42 @@ * the depositor, who is in charge of both the security of the dataset and the * timing of when the dataset is published. *

- * A secondary use case for a Private URL is for depositors who have the ability + * A secondary use case for a Preview URL is for depositors who have the ability * to manage permissions on their dataset (depositors who have the "Curator" or * "Admin" role, which grants much more power than the "Contributor" role) to * send a link to coauthors or other trusted parties to preview the dataset * before the depositors publish the dataset on their own. For better security, * these depositors could ask their coauthors to create Dataverse accounts and - * assign roles to them directly, rather than using a Private URL which requires + * assign roles to them directly, rather than using a Preview URL which requires * no username or password. *

* As of this note, a second option aimed specifically at the review use case - - * to create a Private URL that provides an anonymized view of the dataset - has + * to create a Preview URL that provides an anonymized view of the dataset - has * been added. This option works the same as the original except that it hides * author names in the citation block, hides the values for an admin specified * list of metadata fields, disables citation downloads, and disables API access * (except for file and file thumbnail downloads which are used by the UI). *

- * The token associated with the Private URL role assignment that can be used + * The token associated with the Preview URL role assignment that can be used * either in the GUI or, for the non-anonymized-access option, via the API to * elevate privileges beyond what a "Guest" can see. The ability to use a - * Private URL token via API was added mostly to facilitate automated testing of - * the feature but the far more common case is expected to be use of the Private + * Preview URL token via API was added mostly to facilitate automated testing of + * the feature but the far more common case is expected to be use of the Preview * URL token in a link that is clicked to open a browser, similar to links * shared via Dropbox, Google, etc. *

- * When reviewers click a Private URL their browser sessions are set to the + * When reviewers click a Preview URL their browser sessions are set to the * "{@link edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser}" that * has the "Member" role only on the dataset in question and redirected to that * dataset, where they will see an indication in blue at the top of the page * that they are viewing an unpublished dataset. If the reviewer happens to be * logged into Dataverse already, clicking the link will log them out because * the review is meant to be blind. Because the dataset is always in draft when - * a Private URL is in effect, no downloads or any other activity by the - * reviewer are logged to the guestbook. All reviewers click the same Private + * a Preview URL is in effect, no downloads or any other activity by the + * reviewer are logged to the guestbook. All reviewers click the same Preview * URL containing the same token, and with the exception of an IP address being * logged, it should be impossible to trace which reviewers have clicked a - * Private URL. If the reviewer navigates to the home page, the session is set + * Preview URL. If the reviewer navigates to the home page, the session is set * to the Guest user and they will see what a Guest would see. *

* The "Member" role is used because it contains the necessary read-only @@ -76,51 +76,51 @@ * version. A Member can also download restricted files that have been deleted * from previously published versions. *

- * Likewise, when a Private URL token is used via API, commands are executed + * Likewise, when a Preview URL token is used via API, commands are executed * using the "PrivateUrlUser" that has the "Member" role only on the dataset in * question. This means that read-only operations such as downloads of the - * dataset's files are permitted. The Search API does not respect the Private + * dataset's files are permitted. The Search API does not respect the Preview * URL token but you can download files using the Access API, and, with the * non-anonymized-access option, download unpublished metadata using the Native * API. *

- * A Private URL cannot be created for a published version of a dataset. In the + * A Preview URL cannot be created for a published version of a dataset. In the * GUI, you will be reminded of this fact with a popup. The API will explain * this as well. *

- * An anonymized-access Private URL can't be created if any published dataset + * An anonymized-access Preview URL can't be created if any published dataset * version exists. The primary reason for this is that, since datasets have * DOIs, the full metadata about published versions is available directly from * the DOI provider. (While the metadata for that version could be somewhat * different, in practice it would probably provide a means of identifying * some/all of the authors). *

- * If a draft dataset containing a Private URL is - * published, the Private URL is deleted. This means that reviewers who click + * If a draft dataset containing a Preview URL is + * published, the Preview URL is deleted. This means that reviewers who click * the link after publication will see a 404. *

- * If a post-publication draft containing a Private URL is deleted, the Private + * If a post-publication draft containing a Preview URL is deleted, the Preview * URL is deleted. This is to ensure that if a new draft is created in the * future, a new token will be used. *

- * The creation and deletion of a Private URL are limited to the "Curator" and + * The creation and deletion of a Preview URL are limited to the "Curator" and * "Admin" roles because only those roles have the permission called * "ManageDatasetPermissions", which is the permission used by the * "AssignRoleCommand" and "RevokeRoleCommand" commands. If you have the - * permission to create or delete a Private URL, the fact that a Private URL is + * permission to create or delete a Preview URL, the fact that a Preview URL is * enabled for a dataset will be indicated in blue at the top of the page. * Success messages are shown at the top of the page when you create or delete a - * Private URL. In the GUI, deleting a Private URL is called "disabling" and you + * Preview URL. In the GUI, deleting a Preview URL is called "disabling" and you * will be prompted for a confirmation. No matter what you call it the role is - * revoked. You can also delete a Private URL by revoking the role. + * revoked. You can also delete a Preview URL by revoking the role. *

* A "Contributor" does not have the "ManageDatasetPermissions" permission and - * cannot see "Permissions" nor "Private URL" under the "Edit" menu of their - * dataset. When a Curator or Admin has enabled a Private URL on a Contributor's - * dataset, the Contributor does not see a visual indication that a Private URL + * cannot see "Permissions" nor "Preview URL" under the "Edit" menu of their + * dataset. When a Curator or Admin has enabled a Preview URL on a Contributor's + * dataset, the Contributor does not see a visual indication that a Preview URL * has been enabled for their dataset. *

- * There is no way for an "Admin" or "Curator" to see when a Private URL was + * There is no way for an "Admin" or "Curator" to see when a Preview URL was * created or deleted for a dataset but someone who has access to the database * can see that the following commands are logged to the "actionlogrecord" * database table: @@ -129,7 +129,7 @@ *

  • {@link edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand}
  • *
  • {@link edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand}
  • * - * See also the Private URL To Unpublished Dataset BRD at * https://docs.google.com/document/d/1FT47QkZKcmjSgRnePaJO2g1nzcotLyN3Yb2ORvBr6cs/edit?usp=sharing */ diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index d0dcf3461cf..4efd339ee46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -135,6 +135,9 @@ public class IndexServiceBean { @EJB DatasetFieldServiceBean datasetFieldService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + public static final String solrDocIdentifierDataverse = "dataverse_"; public static final String solrDocIdentifierFile = "datafile_"; public static final String solrDocIdentifierDataset = "dataset_"; @@ -1018,6 +1021,8 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, SetFeature Request/Idea: Harvest metadata values that aren't from a list of controlled values #9992 @@ -1296,7 +1299,6 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set findPermissionsInSolrOnly() throws SearchException { String dtype = dvObjectService.getDtype(id); if (dtype == null) { permissionInSolrOnly.add(docId); - } - if (dtype.equals(DType.Dataset.getDType())) { + }else if (dtype.equals(DType.Dataset.getDType())) { List states = datasetService.getVersionStates(id); if (states != null) { String latestState = states.get(states.size() - 1); @@ -2252,7 +2253,7 @@ public List findPermissionsInSolrOnly() throws SearchException { } else if (dtype.equals(DType.DataFile.getDType())) { List states = dataFileService.findVersionStates(id); Set strings = states.stream().map(VersionState::toString).collect(Collectors.toSet()); - logger.fine("States for " + docId + ": " + String.join(", ", strings)); + logger.finest("States for " + docId + ": " + String.join(", ", strings)); if (docId.endsWith("draft_permission")) { if (!states.contains(VersionState.DRAFT)) { permissionInSolrOnly.add(docId); @@ -2266,7 +2267,7 @@ public List findPermissionsInSolrOnly() throws SearchException { permissionInSolrOnly.add(docId); } else { if (!dataFileService.isInReleasedVersion(id)) { - logger.fine("Adding doc " + docId + " to list of permissions in Solr only"); + logger.finest("Adding doc " + docId + " to list of permissions in Solr only"); permissionInSolrOnly.add(docId); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java index ef27a5eefaf..1f1137016f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java @@ -291,5 +291,6 @@ more targeted results for just datasets. The format is YYYY (i.e. public static final String DATASET_VALID = "datasetValid"; public static final String DATASET_LICENSE = "license"; + public static final String FILE_COUNT = "fileCount"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index de75c88009f..3fd97d418c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -497,7 +497,8 @@ public SolrQueryResponse search( Long retentionEndDate = (Long) solrDocument.getFieldValue(SearchFields.RETENTION_END_DATE); // Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID); - + Long fileCount = (Long) solrDocument.getFieldValue(SearchFields.FILE_COUNT); + List matchedFields = new ArrayList<>(); SolrSearchResult solrSearchResult = new SolrSearchResult(query, name); @@ -570,6 +571,7 @@ public SolrQueryResponse search( solrSearchResult.setDeaccessionReason(deaccessionReason); solrSearchResult.setDvTree(dvTree); solrSearchResult.setDatasetValid(datasetValid); + solrSearchResult.setFileCount(fileCount); if (Boolean.TRUE.equals((Boolean) solrDocument.getFieldValue(SearchFields.IS_HARVESTED))) { solrSearchResult.setHarvested(true); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index 27900bac63f..8802555affd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -78,6 +78,10 @@ public class SolrSearchResult { private String citation; private String citationHtml; private String datasetType; + /** + * Only Dataset can have a file count + */ + private Long fileCount; /** * Files and datasets might have a UNF. Dataverses don't. */ @@ -456,10 +460,10 @@ public JsonObjectBuilder getJsonForMyData(boolean isValid) { } // getJsonForMydata public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) { - return json(showRelevance, showEntityIds, showApiUrls, null, null); + return json(showRelevance, showEntityIds, showApiUrls, null); } - public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields, Long datasetFileCount) { + public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields) { if (this.type == null) { return jsonObjectBuilder(); } @@ -597,7 +601,7 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool subjects.add(subject); } nullSafeJsonBuilder.add("subjects", subjects); - nullSafeJsonBuilder.add("fileCount", datasetFileCount); + nullSafeJsonBuilder.add("fileCount", this.fileCount); nullSafeJsonBuilder.add("versionId", dv.getId()); nullSafeJsonBuilder.add("versionState", dv.getVersionState().toString()); if (this.isPublishedState()) { @@ -1348,4 +1352,12 @@ public boolean isValid(Predicate canUpdateDataset) { } return !canUpdateDataset.test(this); } + + public Long getFileCount() { + return fileCount; + } + + public void setFileCount(Long fileCount) { + this.fileCount = fileCount; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 8ed96690e84..b5eb483c2c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -539,6 +539,12 @@ Whether Harvesting (OAI) service is enabled * */ GlobusSingleFileTransfer, + /** Lower limit of the number of files in a Globus upload task where + * the batch mode should be utilized in looking up the file information + * on the remote end node (file sizes, primarily), instead of individual + * lookups. + */ + GlobusBatchLookupSize, /** * Optional external executables to run on the metadata for dataverses * and datasets being published; as an extra validation step, to diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index 922e6ff5d28..771cf5fd0f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -111,7 +111,7 @@ public static ResourceBundle getResourceBundle(String propertyFileName, Locale c ClassLoader loader = getClassLoader(filesRootDirectory); bundle = ResourceBundle.getBundle(propertyFileName, currentLocale, loader); } catch (MissingResourceException mre) { - logger.warning("No property file named " + propertyFileName + "_" + currentLocale.getLanguage() + logger.fine("No property file named " + propertyFileName + "_" + currentLocale.getLanguage() + " found in " + filesRootDirectory + ", using untranslated values"); bundle = ResourceBundle.getBundle("propertyFiles/" + propertyFileName, currentLocale); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 434b3bd8f8f..e769cacfdb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -78,6 +78,7 @@ public class SystemConfig { public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours + private static final int defaultGlobusBatchLookupSize = 50; private String buildNumber = null; @@ -954,6 +955,11 @@ public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } + public int getGlobusBatchLookupSize() { + String batchSizeOption = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusBatchLookupSize); + return getIntLimitFromStringOrDefault(batchSizeOption, defaultGlobusBatchLookupSize); + } + private Boolean getMethodAvailable(String method, boolean upload) { String methods = settingsService.getValueForKey( upload ? SettingsServiceBean.Key.UploadMethods : SettingsServiceBean.Key.DownloadMethods); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 8552389525d..232b7431a24 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -1007,7 +1007,6 @@ public void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cv if (cvv == null) { if (allowHarvestingMissingCVV) { // we need to process this as a primitive value - logger.warning(">>>> Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'. Processing as primitive per setting override."); parsePrimitiveValue(dsf, cvvType , json); return; } else { @@ -1053,6 +1052,7 @@ public String parseHarvestingClient(JsonObject obj, HarvestingClient harvestingC harvestingClient.setHarvestingSet(obj.getString("set",null)); harvestingClient.setCustomHttpHeaders(obj.getString("customHeaders", null)); harvestingClient.setAllowHarvestingMissingCVV(obj.getBoolean("allowHarvestingMissingCVV", false)); + harvestingClient.setUseOaiIdentifiersAsPids(obj.getBoolean("useOaiIdentifiersAsPids", false)); return dataverseAlias; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1bdee48b14d..91af13c79a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -276,7 +276,9 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re } bld.add("permissionRoot", dv.isPermissionRoot()) .add("description", dv.getDescription()) - .add("dataverseType", dv.getDataverseType().name()); + .add("dataverseType", dv.getDataverseType().name()) + .add("isMetadataBlockRoot", dv.isMetadataBlockRoot()) + .add("isFacetRoot", dv.isFacetRoot()); if (dv.getOwner() != null) { bld.add("ownerId", dv.getOwner().getId()); } @@ -1011,6 +1013,7 @@ public static JsonObjectBuilder json(HarvestingClient harvestingClient) { add("status", harvestingClient.isHarvestingNow() ? "inProgress" : "inActive"). add("customHeaders", harvestingClient.getCustomHttpHeaders()). add("allowHarvestingMissingCVV", harvestingClient.getAllowHarvestingMissingCVV()). + add("useOaiIdentifiersAsPids", harvestingClient.isUseOaiIdentifiersAsPids()). add("lastHarvest", harvestingClient.getLastHarvestTime() == null ? null : harvestingClient.getLastHarvestTime().toString()). add("lastResult", harvestingClient.getLastResult()). add("lastSuccessful", harvestingClient.getLastSuccessfulHarvestTime() == null ? null : harvestingClient.getLastSuccessfulHarvestTime().toString()). diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 149e6a7e828..c47356008ff 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -307,7 +307,13 @@ notification.typeDescription.WORKFLOW_FAILURE=External workflow run has failed notification.typeDescription.STATUSUPDATED=Status of dataset has been updated notification.typeDescription.DATASETCREATED=Dataset was created by user notification.typeDescription.DATASETMENTIONED=Dataset was referenced in remote system - +notification.typeDescription.GLOBUSUPLOADCOMPLETED=Globus upload is completed +notification.typeDescription.GLOBUSUPLOADCOMPLETEDWITHERRORS=Globus upload completed with errors +notification.typeDescription.GLOBUSDOWNLOADCOMPLETED=Globus download is completed +notification.typeDescription.GLOBUSDOWNLOADCOMPLETEDWITHERRORS=Globus download completed with errors +notification.typeDescription.GLOBUSUPLOADLOCALFAILURE=Globus upload failed, internal error +notification.typeDescription.GLOBUSUPLOADREMOTEFAILURE=Globus upload failed, remote transfer error +notification.typeDescription.REQUESTEDFILEACCESS=File access requested groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. user.message.signup.label=Create Account user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. @@ -837,7 +843,8 @@ notification.email.datasetWasMentioned=Hello {0},

    The {1} has just been notification.email.datasetWasMentioned.subject={0}: A Dataset Relationship has been reported! notification.email.globus.uploadCompleted.subject={0}: Files uploaded successfully via Globus and verified notification.email.globus.downloadCompleted.subject={0}: Files downloaded successfully via Globus -notification.email.globus.uploadCompletedWithErrors.subject={0}: Uploaded files via Globus with errors +notification.email.globus.downloadCompletedWithErrors.subject={0}: Globus download task completed, errors encountered +notification.email.globus.uploadCompletedWithErrors.subject={0}: Globus upload task completed with errors notification.email.globus.uploadFailedRemotely.subject={0}: Failed to upload files via Globus notification.email.globus.uploadFailedLocally.subject={0}: Failed to add files uploaded via Globus to dataset # dataverse.xhtml @@ -1457,14 +1464,14 @@ dataset.editBtn.itemLabel.metadata=Metadata dataset.editBtn.itemLabel.terms=Terms dataset.editBtn.itemLabel.permissions=Permissions dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets -dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.privateUrl=Preview URL dataset.editBtn.itemLabel.permissionsDataset=Dataset dataset.editBtn.itemLabel.permissionsFile=Restricted Files dataset.editBtn.itemLabel.deleteDataset=Delete Dataset dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version dataset.editBtn.itemLabel.deaccession=Deaccession Dataset dataset.exportBtn=Export Metadata -dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.ddi=DDI Codebook v2 dataset.exportBtn.itemLabel.dublinCore=Dublin Core dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD dataset.exportBtn.itemLabel.datacite=DataCite @@ -1669,6 +1676,8 @@ dataset.message.createFailure=The dataset could not be created. dataset.message.termsFailure=The dataset terms could not be updated. dataset.message.label.fileAccess=Publicly-accessible storage dataset.message.publicInstall=Files in this dataset may be readable outside Dataverse, restricted and embargoed access are disabled +dataset.message.parallelUpdateError=Changes cannot be saved. This dataset has been edited since this page was opened. To continue, copy your changes, refresh the page to see the recent updates, and re-enter any changes you want to save. +dataset.message.parallelPublishError=Publishing is blocked. This dataset has been edited since this page was opened. To publish it, refresh the page to see the recent updates, and publish again. dataset.metadata.publicationDate=Publication Date dataset.metadata.publicationDate.tip=The publication date of a Dataset. dataset.metadata.citationDate=Citation Date @@ -1721,23 +1730,36 @@ dataset.transferUnrestricted=Click Continue to transfer the elligible files. dataset.requestAccessToRestrictedFiles=You may request access to any restricted file(s) by clicking the Request Access button. dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button. dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} -dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. -dataset.privateurl.header=Unpublished Dataset Private URL -dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. -dataset.privateurl.absent=Private URL has not been created. -dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.infoMessageReviewer=You are viewing a preview of this unpublished dataset version. +dataset.privateurl.header=Unpublished Dataset Preview URL +dataset.privateurl.tip=To cite this data in publications, use the dataset's persistent ID instead of this URL. For more information about the Preview URL feature, please refer to the User Guide. +dataset.privateurl.onlyone=Only one Preview URL can be active for a single dataset. +dataset.privateurl.absent=Preview URL has not been created. +dataset.privateurl.general.button.label=Create General Preview URL +dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. They will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. +dataset.privateurl.general.title=General Preview +dataset.privateurl.anonymous.title=Anonymous Preview +dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL +dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. +dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the Anonymous Preview URL will not be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository, which might expose the dataset authors' identities. +dataset.privateurl.createPrivateUrl=Create Preview URL +dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published or if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published -dataset.privateurl.disablePrivateUrl=Disable Private URL -dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL -dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. -dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. -dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.disablePrivateUrl=Disable Preview URL +dataset.privateurl.disableGeneralPreviewUrl=Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrl=Disable Anonymous Preview URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Preview URL +dataset.privateurl.disableGeneralPreviewUrlConfirm=Yes, Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrlConfirm=Yes, Disable Anonymous Preview URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Preview URL? If you have shared the Preview URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Preview URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Preview URL Enabled dataset.privateurl.createdSuccess=Success! -dataset.privateurl.full=This Private URL provides full read access to the dataset -dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset -dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. -dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +dataset.privateurl.full=This Preview URL provides full read access to the dataset +dataset.privateurl.anonymized=This Preview URL provides access to the anonymized dataset +dataset.privateurl.disabledSuccess=You have successfully disabled the Preview URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Preview URL you must have the following permissions: {0}. dataset.externalstatus.header=Curation Status Changed dataset.externalstatus.removed=Curation Status Removed dataset.externalstatus.info=Curation Status is now "{0}" @@ -1934,7 +1956,7 @@ file.downloadBtn.format.all=All File Formats + Information file.downloadBtn.format.tab=Tab-Delimited file.downloadBtn.format.original={0} (Original File Format) file.downloadBtn.format.rdata=RData -file.downloadBtn.format.var=Variable Metadata +file.downloadBtn.format.var=DDI Codebook v2 file.downloadBtn.format.citation=Data File Citation file.download.filetype.unknown=Original File Format file.more.information.link=Link to more file information for @@ -2498,6 +2520,7 @@ dataset.version.file.changed=Files (Changed File Metadata: {0} dataset.version.file.changed2=; Changed File Metadata: {0} dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0} dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0} +dataset.version.compare.incorrect.order=Compare requires the older dataset version to be listed first. #DataversePage.java dataverse.item.required=Required @@ -2719,8 +2742,8 @@ datasets.api.grant.role.assignee.has.role.error=User already has this role for t datasets.api.revoke.role.not.found.error="Role assignment {0} not found" datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2} datasets.api.privateurl.error.datasetnotfound=Could not find dataset. -datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset. -datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft. +datasets.api.privateurl.error.alreadyexists=Preview URL already exists for this dataset. +datasets.api.privateurl.error.notdraft=Can't create Preview URL because the latest version of this dataset is not a draft. datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published. datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date diff --git a/src/main/resources/db/migration/V6.4.0.1.sql b/src/main/resources/db/migration/V6.4.0.1.sql new file mode 100644 index 00000000000..0bcd87dd736 --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.1.sql @@ -0,0 +1,4 @@ +-- Adding a case-insensitive index related to #11003 +-- + +CREATE UNIQUE INDEX IF NOT EXISTS INDEX_DVOBJECT_authority_protocol_upper_identifier ON dvobject (authority, protocol, UPPER(identifier)); \ No newline at end of file diff --git a/src/main/resources/db/migration/V6.4.0.2.sql b/src/main/resources/db/migration/V6.4.0.2.sql new file mode 100644 index 00000000000..bc4a85b278f --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.2.sql @@ -0,0 +1,2 @@ +-- #10118 +ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text; diff --git a/src/main/resources/db/migration/V6.4.0.3.sql b/src/main/resources/db/migration/V6.4.0.3.sql new file mode 100644 index 00000000000..307d8ed206c --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.3.sql @@ -0,0 +1,2 @@ +-- Add this boolean flag to accommodate a new harvesting client feature +ALTER TABLE harvestingclient ADD COLUMN IF NOT EXISTS useOaiIdAsPid BOOLEAN DEFAULT FALSE; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 6de0f00e94e..9426884d349 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -20,6 +20,7 @@ + - + @@ -834,7 +835,6 @@ /> @@ -995,7 +995,6 @@

    +

    #{bundle['dataset.privateurl.introduction']}

    +

    +

    +

    #{bundle['dataset.privateurl.onlyone']}

    +

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.general.description']}

    + + +
    +
    + +

    + #{privateUrlLink} +

    +
    +
    +
    + + +
    +
    + +

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.anonymous.description']}

    +

    #{bundle['dataset.privateurl.anonymous.description.paragraph.two']} #{bundle['dataset.privateurl.createPrivateUrl.anonymized.unavailable']}.

    + + + +

    + +
    +
    + +

    + #{privateUrlLink} +

    +
    +
    +
    + + +
    +
    +

    #{bundle['dataset.privateurl.absent']}

    @@ -1200,17 +1273,11 @@

    -
    - - - - - -
    +

    #{bundle['dataset.privateurl.cannotCreate']}

    @@ -1224,7 +1291,10 @@

    #{bundle['dataset.privateurl.disableConfirmationText']}

    - + + + + diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 117710cfd53..154700f7cf4 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -301,24 +301,24 @@
  • - + +
  • - + -
  • diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 723f95148cd..f8367ce01f8 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -130,7 +130,7 @@ - +
  • -
    +
    #{bundle['mydataFragment.resultsByUserName']} - +
    @@ -150,4 +150,4 @@
    - \ No newline at end of file + diff --git a/src/main/webapp/previewurl.xhtml b/src/main/webapp/previewurl.xhtml new file mode 100644 index 00000000000..980d775506b --- /dev/null +++ b/src/main/webapp/previewurl.xhtml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/webapp/resources/iqbs/messages.xhtml b/src/main/webapp/resources/iqbs/messages.xhtml index bd17cf34d21..f8e1f5e8e9d 100644 --- a/src/main/webapp/resources/iqbs/messages.xhtml +++ b/src/main/webapp/resources/iqbs/messages.xhtml @@ -63,7 +63,7 @@ Server: - #{systemConfig.dataverseServer} + #{systemConfig.dataverseSiteUrl} #{msg.rendered()} diff --git a/src/main/webapp/resources/js/mydata.js b/src/main/webapp/resources/js/mydata.js index 899ba6637e2..c731d6772ac 100644 --- a/src/main/webapp/resources/js/mydata.js +++ b/src/main/webapp/resources/js/mydata.js @@ -391,7 +391,7 @@ function submit_my_data_search(){ // -------------------------------- // ah, but with the horribly coded xhtml page, we can't use form tags... //var formData = $('#mydata_filter_form').serialize(); - var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column2 :input").serialize() ; + var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column3 :input").serialize()+ '&' + $("#my_data_filter_column2 :input").serialize() ; // For debugging, show the search params if (MYDATA_DEBUG_ON){ diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index 505fe681363..fc224443a8e 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -582,7 +582,7 @@ - +
    diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java index a235c9b0061..588bf5294e5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java @@ -63,17 +63,17 @@ public void testIsSanitizeHtml() { //if textbox then sanitize - allow tags instance.setFieldType(DatasetFieldType.FieldType.TEXTBOX); result = instance.isSanitizeHtml(); - assertEquals(true, result); + assertTrue(result); //if textbox then don't sanitize - allow tags instance.setFieldType(DatasetFieldType.FieldType.EMAIL); result = instance.isSanitizeHtml(); - assertEquals(false, result); + assertFalse(result); //URL, too instance.setFieldType(DatasetFieldType.FieldType.URL); result = instance.isSanitizeHtml(); - assertEquals(true, result); + assertTrue(result); } @Test @@ -102,7 +102,7 @@ public void testIsEscapeOutputText(){ //URL, too instance.setFieldType(DatasetFieldType.FieldType.URL); result = instance.isEscapeOutputText(); - assertEquals(false, result); + assertFalse(result); } @@ -121,7 +121,7 @@ public void testGetSolrField(){ parent.setAllowMultiples(true); instance.setParentDatasetFieldType(parent); solrField = instance.getSolrField(); - assertEquals(true, solrField.isAllowedToBeMultivalued()); + assertTrue(solrField.isAllowedToBeMultivalued()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java index 3f85acc1f87..b753f534c6b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java @@ -52,63 +52,63 @@ public void testIsValid() { //Make string too long - should fail. value.setValue("asdfgX"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Make string too long - should fail. value.setValue("asdf"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Now lets try Dates dft.setFieldType(DatasetFieldType.FieldType.DATE); dft.setValidationFormat(null); value.setValue("1999AD"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("44BCE"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("2004-10-27"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("2002-08"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("[1999?]"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("Blergh"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Float dft.setFieldType(DatasetFieldType.FieldType.FLOAT); value.setValue("44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("44 1/2"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Integer dft.setFieldType(DatasetFieldType.FieldType.INT); value.setValue("44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("-44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("12.14"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java new file mode 100644 index 00000000000..0ba8dde8aa0 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java @@ -0,0 +1,460 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.branding.BrandingUtilTest; +import edu.harvard.iq.dataverse.datavariable.VariableMetadata; +import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; +import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; +import edu.harvard.iq.dataverse.util.BundleUtil; + +import java.net.URI; +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.logging.Logger; + +import static org.assertj.core.util.DateUtil.now; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import edu.harvard.iq.dataverse.util.json.JsonUtil; +import io.restassured.path.json.JsonPath; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; + +public class DatasetVersionDifferenceTest { + + private static final Logger logger = Logger.getLogger(DatasetVersion.class.getCanonicalName()); + + private static List addedFiles; + private static List removedFiles; + private static List changedFileMetadata; + private static List changedVariableMetadata; + private static List replacedFiles; + private static Long fileId = Long.valueOf(0); + + @BeforeAll + public static void setUp() { + BrandingUtilTest.setupMocks(); + } + + @AfterAll + public static void tearDown() { + BrandingUtilTest.setupMocks(); + } + + @Test + public void testDifferencing() { + Dataset dataset = new Dataset(); + License license = new License("CC0 1.0", + "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", + URI.create("http://creativecommons.org/publicdomain/zero/1.0"), URI.create("/resources/images/cc0.png"), + true, 1l); + license.setDefault(true); + dataset.setProtocol("doi"); + dataset.setAuthority("10.5072/FK2"); + dataset.setIdentifier("LK0D1H"); + DatasetVersion datasetVersion = new DatasetVersion(); + datasetVersion.setDataset(dataset); + datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + datasetVersion.setVersionNumber(1L); + datasetVersion.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + DatasetVersion datasetVersion2 = new DatasetVersion(); + datasetVersion2.setDataset(dataset); + datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT); + + // Published version's two files + DataFile dataFile = new DataFile(); + dataFile.setId(1L); + DataFile dataFile2 = new DataFile(); + dataFile2.setId(2L); + + FileMetadata fileMetadata1 = createFileMetadata(10L, datasetVersion, dataFile, "file1.txt"); + fileMetadata1.setLabel("file1.txt"); + + FileMetadata fileMetadata2 = createFileMetadata(20L, datasetVersion, dataFile2, "file2.txt"); + + // Draft version - same two files with one label change + FileMetadata fileMetadata3 = fileMetadata1.createCopy(); + fileMetadata3.setId(30L); + + FileMetadata fileMetadata4 = fileMetadata2.createCopy(); + fileMetadata4.setLabel("file3.txt"); + fileMetadata4.setId(40L); + + List fileMetadatas = new ArrayList<>(Arrays.asList(fileMetadata1, fileMetadata2)); + datasetVersion.setFileMetadatas(fileMetadatas); + List fileMetadatas2 = new ArrayList<>(Arrays.asList(fileMetadata3, fileMetadata4)); + datasetVersion2.setFileMetadatas(fileMetadatas2); + + SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd"); + Date publicationDate; + try { + publicationDate = dateFmt.parse("19551105"); + datasetVersion.setReleaseTime(publicationDate); + dataset.setPublicationDate(new Timestamp(publicationDate.getTime())); + } catch (ParseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + List versionList = new ArrayList<>(Arrays.asList(datasetVersion, datasetVersion2)); + dataset.setVersions(versionList); + + // One file has a changed label + List expectedAddedFiles = new ArrayList<>(); + List expectedRemovedFiles = new ArrayList<>(); + ; + List expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4); + List expectedChangedVariableMetadata = new ArrayList<>(); + List expectedReplacedFiles = new ArrayList<>(); + List changedTerms = new ArrayList<>(); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // change label for first file as well + fileMetadata3.setLabel("file1_updated.txt"); + expectedChangedFileMetadata = Arrays.asList(fileMetadata1, fileMetadata3, fileMetadata2, fileMetadata4); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // Add one change to variable metadata + fileMetadata3.setVariableMetadatas(Arrays.asList(new VariableMetadata())); + expectedChangedVariableMetadata = Arrays.asList(fileMetadata1, fileMetadata3); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // Replaced File + DataFile replacingFile = new DataFile(); + replacingFile.setId(3L); + replacingFile.setPreviousDataFileId(1L); + fileMetadata3.setDataFile(replacingFile); + expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4); + expectedChangedVariableMetadata = new ArrayList<>(); + + FileMetadata[] filePair = new FileMetadata[2]; + filePair[0] = fileMetadata1; + filePair[1] = fileMetadata3; + expectedReplacedFiles = new ArrayList<>(); + expectedReplacedFiles.add(filePair); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Add a new file + DataFile newFile = new DataFile(); + newFile.setId(3L); + FileMetadata fileMetadata5 = createFileMetadata(50L, datasetVersion2, newFile, "newFile.txt"); + datasetVersion2.getFileMetadatas().add(fileMetadata5); + expectedAddedFiles = Arrays.asList(fileMetadata5); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Remove a file + datasetVersion2.getFileMetadatas().remove(fileMetadata4); + expectedRemovedFiles = Arrays.asList(fileMetadata2); + expectedChangedFileMetadata = new ArrayList<>(); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set the published version's TermsOfUseAndAccess to a non-null value + TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess(); + datasetVersion.setTermsOfUseAndAccess(termsOfUseAndAccess); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set the draft version's TermsOfUseAndAccess to a non-null value + + datasetVersion2.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set a term field + + datasetVersion2.getTermsOfUseAndAccess().setTermsOfUse("Terms o' Use"); + String[] termField = new String[] { + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"), "", "Terms o' Use" }; + changedTerms.add(termField); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set a term field in the original version + + datasetVersion.getTermsOfUseAndAccess().setDisclaimer("Not our fault"); + String[] termField2 = new String[] { + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"), + "Not our fault", "" }; + changedTerms.add(termField2); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + } + + private FileMetadata createFileMetadata(long id, DatasetVersion datasetVersion, DataFile dataFile, String label) { + FileMetadata fileMetadata = new FileMetadata(); + fileMetadata.setId(id); + fileMetadata.setDatasetVersion(datasetVersion); + fileMetadata.setDataFile(dataFile); + fileMetadata.setLabel(label); + fileMetadata.setCategories(new ArrayList<>()); + return fileMetadata; + } + + /** + * CompareResults is currently testing the output of the + * DatasetVersionDifference class with the manually created expected results + * included as parameters and with the results of the less efficient algorithm + * it is replacing. Once we're collectively convinced that the tests here are + * correct (i.e. the manually created expected* parameters are set correctly for + * each use case), we could drop running the originalCalculateDifference method + * and just compare with the expected* results. + * + * @param changedTerms + */ + private void compareResults(DatasetVersion datasetVersion, DatasetVersion datasetVersion2, + List expectedAddedFiles, List expectedRemovedFiles, + List expectedChangedFileMetadata, List expectedChangedVariableMetadata, + List expectedReplacedFiles, List changedTerms) { + DatasetVersionDifference diff = new DatasetVersionDifference(datasetVersion2, datasetVersion); + // Run the original algorithm + originalCalculateDifference(datasetVersion2, datasetVersion); + // Compare the old and new algorithms + assertEquals(addedFiles, diff.getAddedFiles()); + assertEquals(removedFiles, diff.getRemovedFiles()); + assertEquals(changedFileMetadata, diff.getChangedFileMetadata()); + assertEquals(changedVariableMetadata, diff.getChangedVariableMetadata()); + assertEquals(replacedFiles.size(), diff.getReplacedFiles().size()); + for (int i = 0; i < replacedFiles.size(); i++) { + assertEquals(replacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); + assertEquals(replacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]); + } + + // Also compare the new algorithm with the manually created expected* values for + // the test cases + assertEquals(expectedAddedFiles, diff.getAddedFiles()); + assertEquals(expectedRemovedFiles, diff.getRemovedFiles()); + assertEquals(expectedChangedFileMetadata, diff.getChangedFileMetadata()); + assertEquals(expectedChangedVariableMetadata, diff.getChangedVariableMetadata()); + assertEquals(expectedReplacedFiles.size(), diff.getReplacedFiles().size()); + for (int i = 0; i < expectedReplacedFiles.size(); i++) { + assertEquals(expectedReplacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); + assertEquals(expectedReplacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]); + } + + assertEquals(changedTerms.size(), diff.getChangedTermsAccess().size()); + for (int i = 0; i < changedTerms.size(); i++) { + String[] diffArray = diff.getChangedTermsAccess().get(i); + assertEquals(changedTerms.get(i)[0], diffArray[0]); + assertEquals(changedTerms.get(i)[1], diffArray[1]); + assertEquals(changedTerms.get(i)[2], diffArray[2]); + } + } + + @Deprecated + // This is the "Original" difference calculation from DatasetVersionDifference + // It is included here to help verify that the new implementation is the same as + // the original + private static void originalCalculateDifference(DatasetVersion newVersion, DatasetVersion originalVersion) { + + addedFiles = new ArrayList<>(); + removedFiles = new ArrayList<>(); + changedFileMetadata = new ArrayList<>(); + changedVariableMetadata = new ArrayList<>(); + replacedFiles = new ArrayList<>(); + long startTime = System.currentTimeMillis(); + // TODO: ? + // It looks like we are going through the filemetadatas in both versions, + // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of + // changed, deleted and added files between the 2 versions... But why + // are we doing it, if we are doing virtually the same thing inside + // the initDatasetFilesDifferenceList(), below - but in a more efficient + // way (sorting both lists, then goint through them in parallel, at the + // cost of (N+M) max.? + // -- 4.6 Nov. 2016 + + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + boolean deleted = true; + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + deleted = false; + if (!DatasetVersionDifference.compareFileMetadatas(fmdo, fmdn).isEmpty()) { + changedFileMetadata.add(fmdo); + changedFileMetadata.add(fmdn); + } + if (!VariableMetadataUtil.compareVariableMetadata(fmdo, fmdn) + || !DatasetVersionDifference.compareVarGroup(fmdo, fmdn)) { + changedVariableMetadata.add(fmdo); + changedVariableMetadata.add(fmdn); + } + break; + } + } + if (deleted) { + removedFiles.add(fmdo); + } + } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + boolean added = true; + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + added = false; + break; + } + } + if (added) { + addedFiles.add(fmdn); + } + } + + getReplacedFiles(); + logger.info("Difference Loop Execution time: " + (System.currentTimeMillis() - startTime) + " ms"); + + } + + @Deprecated + // This is used only in the original algorithm and was removed from + // DatasetVersionDifference + private static void getReplacedFiles() { + if (addedFiles.isEmpty() || removedFiles.isEmpty()) { + return; + } + List addedToReplaced = new ArrayList<>(); + List removedToReplaced = new ArrayList<>(); + for (FileMetadata added : addedFiles) { + DataFile addedDF = added.getDataFile(); + Long replacedId = addedDF.getPreviousDataFileId(); + if (added.getDataFile().getPreviousDataFileId() != null) { + } + for (FileMetadata removed : removedFiles) { + DataFile test = removed.getDataFile(); + if (test.getId().equals(replacedId)) { + addedToReplaced.add(added); + removedToReplaced.add(removed); + FileMetadata[] replacedArray = new FileMetadata[2]; + replacedArray[0] = removed; + replacedArray[1] = added; + replacedFiles.add(replacedArray); + } + } + } + if (addedToReplaced.isEmpty()) { + } else { + addedToReplaced.stream().forEach((delete) -> { + addedFiles.remove(delete); + }); + removedToReplaced.stream().forEach((delete) -> { + removedFiles.remove(delete); + }); + } + } + + @Test + public void testCompareVersionsAsJson() { + + Dataverse dv = new Dataverse(); + Dataset ds = new Dataset(); + ds.setOwner(dv); + ds.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null)); + + DatasetVersion dv1 = initDatasetVersion(0L, ds, DatasetVersion.VersionState.RELEASED); + DatasetVersion dv2 = initDatasetVersion(1L, ds, DatasetVersion.VersionState.DRAFT); + ds.setVersions(List.of(dv1, dv2)); + + TermsOfUseAndAccess toa = new TermsOfUseAndAccess(); + toa.setDisclaimer("disclaimer"); + dv2.setTermsOfUseAndAccess(toa); + DatasetField dsf = new DatasetField(); + dsf.setDatasetFieldType(new DatasetFieldType("Author", DatasetFieldType.FieldType.TEXT, true)); + MetadataBlock mb = new MetadataBlock(); + mb.setDisplayName("testMetadataBlock"); + dsf.getDatasetFieldType().setMetadataBlock(mb); + dsf.setSingleValue("TEST"); + dv2.getDatasetFields().add(dsf); + // modify file at index 0 + dv2.getFileMetadatas().get(0).setRestricted(!dv2.getFileMetadatas().get(2).isRestricted()); + + FileMetadata addedFile = initFile(dv2); // add a new file + FileMetadata removedFile = dv2.getFileMetadatas().get(1); // remove the second file + dv2.getFileMetadatas().remove(1); + FileMetadata replacedFile = dv2.getFileMetadatas().get(1); // the third file is now at index 1 since the second file was removed + FileMetadata replacementFile = initFile(dv2, replacedFile.getDataFile().getId()); // replace the third file with a new file + dv2.getFileMetadatas().remove(1); + + DatasetVersionDifference dvd = new DatasetVersionDifference(dv2, dv1); + + JsonObjectBuilder json = dvd.compareVersionsAsJson(); + JsonObject obj = json.build(); + System.out.println(JsonUtil.prettyPrint(obj)); + + JsonPath dataFile = JsonPath.from(JsonUtil.prettyPrint(obj)); + assertTrue("TEST".equalsIgnoreCase(dataFile.getString("metadataChanges[0].changed[0].newValue"))); + assertTrue(addedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesAdded[0].fileName"))); + assertTrue(removedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesRemoved[0].fileName"))); + assertTrue(replacedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].oldFile.fileName"))); + assertTrue(replacementFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].newFile.fileName"))); + assertTrue("true".equalsIgnoreCase(dataFile.getString("fileChanges[0].changed[0].newValue"))); + assertTrue("disclaimer".equalsIgnoreCase(dataFile.getString("TermsOfAccess.changed[0].newValue"))); + } + private DatasetVersion initDatasetVersion(Long id, Dataset ds, DatasetVersion.VersionState vs) { + DatasetVersion dv = new DatasetVersion(); + dv.setDataset(ds); + dv.setVersion(1L); + dv.setVersionState(vs); + dv.setMinorVersionNumber(0L); + if (vs == DatasetVersion.VersionState.RELEASED) { + dv.setVersionNumber(1L); + dv.setVersion(1L); + dv.setReleaseTime(now()); + } + dv.setId(id); + dv.setCreateTime(now()); + dv.setLastUpdateTime(now()); + dv.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + dv.setFileMetadatas(initFiles(dv)); + return dv; + } + private List initFiles(DatasetVersion dsv) { + List fileMetadatas = new ArrayList<>(); + fileId = 0L; + for (int i=0; i < 10; i++) { + FileMetadata fm = initFile(dsv); + fileMetadatas.add(fm); + } + return fileMetadatas; + } + private FileMetadata initFile(DatasetVersion dsv) { + return initFile(dsv, null); + } + private FileMetadata initFile(DatasetVersion dsv, Long prevId) { + Long id = fileId++; + FileMetadata fm = new FileMetadata(); + DataFile df = new DataFile(); + fm.setDatasetVersion(dsv); + DataTable dt = new DataTable(); + dt.setOriginalFileName("filename"+id+".txt"); + df.setId(id); + df.setDescription("Desc"+id); + df.setRestricted(false); + df.setFilesize(100 + id); + df.setChecksumType(DataFile.ChecksumType.MD5); + df.setChecksumValue("value"+id); + df.setDataTable(dt); + df.setOwner(dsv.getDataset()); + df.getFileMetadatas().add(fm); + df.setPreviousDataFileId(prevId); + fm.setId(id); + fm.setDataFile(df); + fm.setLabel("Label"+id); + fm.setDirectoryLabel("/myFilePath/"); + fm.setDescription("Desc"+id); + dsv.getFileMetadatas().add(fm); + return fm; + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java index 4cd6c4dfaa7..b36d8907472 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java @@ -80,7 +80,7 @@ public void testIsInReview() { DatasetVersion nonDraft = new DatasetVersion(); nonDraft.setVersionState(DatasetVersion.VersionState.RELEASED); - assertEquals(false, nonDraft.isInReview()); + assertFalse(nonDraft.isInReview()); ds.addLock(null); assertFalse(nonDraft.isInReview()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 6d7dd2eae29..94aece95861 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -16,6 +16,8 @@ import java.util.HashMap; import java.util.List; +import jakarta.json.Json; +import jakarta.json.JsonArray; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; @@ -26,13 +28,11 @@ import java.util.Map; import java.util.UUID; -import java.util.logging.Level; import java.util.logging.Logger; import static jakarta.ws.rs.core.Response.Status.*; +import static org.hamcrest.CoreMatchers.*; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.jupiter.api.Assertions.assertTrue; public class AdminIT { @@ -901,6 +901,50 @@ public void testDownloadTmpFile() throws IOException { .body("message", equalTo("Path must begin with '/tmp' but after normalization was '/etc/passwd'.")); } + @Test + public void testFindMissingFiles() { + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUserResponse); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + UtilIT.setSuperuserStatus(username, true); + + String dataverseAlias = ":root"; + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + + // Upload file + Response uploadResponse = UtilIT.uploadRandomFile(datasetPersistentId, apiToken); + uploadResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Audit files + Response resp = UtilIT.auditFiles(apiToken, null, 100L, null); + resp.prettyPrint(); + JsonArray emptyArray = Json.createArrayBuilder().build(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.lastId", equalTo(100)); + + // Audit files with invalid parameters + resp = UtilIT.auditFiles(apiToken, 100L, 0L, null); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo("ERROR")) + .body("message", equalTo("Invalid Parameters: lastId must be equal to or greater than firstId")); + + // Audit files with list of dataset identifiers parameter + resp = UtilIT.auditFiles(apiToken, 1L, null, "bad/id, " + datasetPersistentId); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.failures[0].datasetIdentifier", equalTo("bad/id")) + .body("data.failures[0].reason", equalTo("Not Found")); + } + private String createTestNonSuperuserApiToken() { Response createUserResponse = UtilIT.createRandomUser(); createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java index ca99960f240..5f00d34b276 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java @@ -1,15 +1,61 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.ControlledVocabularyValueServiceBean; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.ws.rs.core.Response; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import java.io.File; +import java.io.StringReader; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; +@ExtendWith(MockitoExtension.class) public class DatasetFieldServiceApiTest { + @Mock + private ActionLogServiceBean actionLogSvc; + + @Mock + private MetadataBlockServiceBean metadataBlockService; + + @Mock + private DataverseServiceBean dataverseService; + + @Mock + private DatasetFieldServiceBean datasetFieldService; + + @Mock + private ControlledVocabularyValueServiceBean controlledVocabularyValueService; + + private DatasetFieldServiceApi api; + + @BeforeEach + public void setup(){ + api = new DatasetFieldServiceApi(); + api.actionLogSvc = actionLogSvc; + api.metadataBlockService = metadataBlockService; + api.dataverseService = dataverseService; + api.datasetFieldService = datasetFieldService; + api.controlledVocabularyValueService = controlledVocabularyValueService; + } + @Test public void testArrayIndexOutOfBoundMessageBundle() { List arguments = new ArrayList<>(); @@ -59,4 +105,41 @@ public void testGetGeneralErrorMessage() { message ); } + + @Test + public void testGetGeneralErrorMessageEmptyHeader() { + DatasetFieldServiceApi api = new DatasetFieldServiceApi(); + String message = api.getGeneralErrorMessage(null, 5, "some error"); + assertEquals( + "Error parsing metadata block in unknown part, line #5: some error", + message + ); + } + + @Test + public void testLoadDatasetFieldsWhitespaceTrimming() { + + Path resourceDirectory = Paths.get("src/test/resources/tsv/whitespace-test.tsv"); + File testfile = new File(resourceDirectory.toFile().getAbsolutePath()); + JsonReader jsonReader; + try (Response response = api.loadDatasetFields(testfile)) { + assertEquals(200, response.getStatus()); + jsonReader = Json.createReader(new StringReader(response.getEntity().toString())); + } + JsonObject jsonObject = jsonReader.readObject(); + + final List metadataNames = jsonObject.getJsonObject("data").getJsonArray("added") + .getValuesAs(e -> e.asJsonObject().getString("name")); + assertThat(metadataNames).contains("whitespaceDemo") + .contains("whitespaceDemoOne") + .contains("whitespaceDemoTwo") + .contains("whitespaceDemoThree") + .contains("CV1") + .contains("CV2") + .contains("CV3"); + assertThat(metadataNames).doesNotContain(" whitespaceDemo") + .doesNotContain("whitespaceDemoOne ") + .doesNotContain("CV1 ") + .doesNotContain(" CV2"); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 93f1024ae7a..34afbb404f0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1676,7 +1676,7 @@ public void testPrivateUrl() { List assignments = with(roleAssignments.body().asString()).param("member", "member").getJsonObject("data.findAll { data -> data._roleAlias == member }"); assertEquals(1, assignments.size()); PrivateUrlUser privateUrlUser = new PrivateUrlUser(datasetId); - assertEquals("Private URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); + assertEquals("Preview URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); List assigneeShouldExistForPrivateUrlUser = with(roleAssignments.body().asString()).param("assigneeString", privateUrlUser.getIdentifier()).getJsonObject("data.findAll { data -> data.assignee == assigneeString }"); logger.info(assigneeShouldExistForPrivateUrlUser + " found for " + privateUrlUser.getIdentifier()); assertEquals(1, assigneeShouldExistForPrivateUrlUser.size()); @@ -1757,7 +1757,7 @@ public void testPrivateUrl() { Response privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, apiToken); privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.prettyPrint(); - assertEquals(false, privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier())); + assertFalse(privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier())); String newTitleAgain = "I am changing the title again"; Response draftCreatedAgainPostPub = UtilIT.updateDatasetTitleViaSword(dataset1PersistentId, newTitleAgain, apiToken); @@ -4242,7 +4242,7 @@ public void testCitationDate() throws IOException { .statusCode(OK.getStatusCode()) .body("data.message", is(expectedCitation)); - Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportDatasetAsDublinCore.prettyPrint(); exportDatasetAsDublinCore.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) @@ -4259,7 +4259,7 @@ public void testCitationDate() throws IOException { rexport.then().assertThat().statusCode(OK.getStatusCode()); String todayDate = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportPostClear.prettyPrint(); exportPostClear.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) @@ -5168,4 +5168,134 @@ public void testGetCanDownloadAtLeastOneFile() { Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, secondUserApiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } + + @Test + public void testCompareDatasetVersionsAPI() throws InterruptedException { + + Response createUser = UtilIT.createRandomUser(); + assertEquals(200, createUser.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + // used for all added files + JsonObjectBuilder json = Json.createObjectBuilder() + .add("description", "my description") + .add("directoryLabel", "/data/subdir1/") + .add("categories", Json.createArrayBuilder() + .add("Data") + ); + JsonObject jsonObj = json.build(); + String pathToFile = "src/main/webapp/resources/images/dataverse-icon-1200.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer modifyFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + pathToFile = "src/main/webapp/resources/images/dataverseproject_logo.jpg"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer deleteFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + + pathToFile = "src/main/webapp/resources/images/fav/favicon-16x16.png"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer replaceFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + + Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + assertEquals(200, publishDataset.getStatusCode()); + + // post publish update to create DRAFT version + String pathToJsonFilePostPub = "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; + Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken); + addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); + + // Test adding a file + pathToFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToFile, jsonObj, apiToken); + uploadTabularFileResponse.prettyPrint(); + uploadTabularFileResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer addedFileId = UtilIT.getDataFileIdFromResponse(uploadTabularFileResponse); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(String.valueOf(addedFileId), apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.prettyPrint(); + setFileTabularTagsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test removing a file + uploadResponse = UtilIT.deleteFile(deleteFileId, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(NO_CONTENT.getStatusCode()); + + // Test Replacing a file + Response replaceResponse = UtilIT.replaceFile(String.valueOf(replaceFileId), "src/main/webapp/resources/images/fav/favicon-32x32.png", jsonObj, apiToken); + replaceResponse.prettyPrint(); + replaceResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test modify by restricting the file + Response restrictResponse = UtilIT.restrictFile(modifyFileId.toString(), true, apiToken); + restrictResponse.prettyPrint(); + restrictResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Also test a terms of access change + String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}"; + Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true); + updateTerms.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":latest-published", ":draft", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("data.oldVersion.versionNumber", CoreMatchers.equalTo("1.0")) + .body("data.newVersion.versionNumber", CoreMatchers.equalTo("DRAFT")) + .body("data.metadataChanges[0].blockName", CoreMatchers.equalTo("Citation Metadata")) + .body("data.metadataChanges[0].changed[0].fieldName", CoreMatchers.equalTo("Author")) + .body("data.metadataChanges[0].changed[0].oldValue", CoreMatchers.containsString("Finch, Fiona; (Birds Inc.)")) + .body("data.metadataChanges[1].blockName", CoreMatchers.equalTo("Life Sciences Metadata")) + .body("data.metadataChanges[1].changed[0].fieldName", CoreMatchers.equalTo("Design Type")) + .body("data.metadataChanges[1].changed[0].oldValue", CoreMatchers.containsString("")) + .body("data.metadataChanges[1].changed[0].newValue", CoreMatchers.containsString("Parallel Group Design; Nested Case Control Design")) + .body("data.filesAdded[0].fileName", CoreMatchers.equalTo("test.tab")) + .body("data.filesAdded[0].filePath", CoreMatchers.equalTo("data/subdir1")) + .body("data.filesAdded[0].description", CoreMatchers.equalTo("my description")) + .body("data.filesAdded[0].tags[0]", CoreMatchers.equalTo("Survey")) + .body("data.filesRemoved[0].fileName", CoreMatchers.equalTo("dataverseproject_logo.jpg")) + .body("data.fileChanges[0].fileName", CoreMatchers.equalTo("dataverse-icon-1200.png")) + .body("data.fileChanges[0].changed[0].newValue", CoreMatchers.equalTo("true")) + .body("data.filesReplaced[0].oldFile.fileName", CoreMatchers.equalTo("favicon-16x16.png")) + .body("data.filesReplaced[0].newFile.fileName", CoreMatchers.equalTo("favicon-32x32.png")) + .body("data.TermsOfAccess", CoreMatchers.notNullValue()) + .statusCode(OK.getStatusCode()); + + compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":draft", ":latest-published", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order"))) + .statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 01c02900158..76bb515beb2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -135,14 +135,16 @@ public void testDataverseCategory() { public void testMinimalDataverse() throws FileNotFoundException { Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); - String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); JsonObject dvJson; FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-minimal.json"); dvJson = Json.createReader(reader).readObject(); Response create = UtilIT.createDataverse(dvJson, apiToken); create.prettyPrint(); - create.then().assertThat().statusCode(CREATED.getStatusCode()); + create.then().assertThat() + .body("data.isMetadataBlockRoot", equalTo(false)) + .body("data.isFacetRoot", equalTo(false)) + .statusCode(CREATED.getStatusCode()); Response deleteDataverse = UtilIT.deleteDataverse("science", apiToken); deleteDataverse.prettyPrint(); deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); @@ -1377,6 +1379,48 @@ public void testUpdateDataverse() { Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // Update the dataverse without setting metadata blocks, facets, or input levels + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the metadata blocks are inherited from the parent + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].name", equalTo("citation")); + + // Assert that the facets are inherited from the parent + String[] rootFacetIds = new String[]{"authorName", "subject", "keywordValue", "dateOfDeposit"}; + listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]"); + String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]"); + String actualFacetName3 = listDataverseFacetsResponse.then().extract().path("data[2]"); + String actualFacetName4 = listDataverseFacetsResponse.then().extract().path("data[3]"); + assertThat(rootFacetIds, hasItemInArray(actualFacetName1)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName2)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName3)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName4)); + + // Assert that the dataverse should not have any input level + listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + listDataverseInputLevelsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); + // Should return error when the dataverse to edit does not exist updateDataverseResponse = UtilIT.updateDataverse( "unexistingDataverseAlias", diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index e3c26284d55..98107eca33a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2275,7 +2275,6 @@ public void testDeleteFile() { Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken); v1.prettyPrint(); v1.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("cc0.png")) .statusCode(OK.getStatusCode()); Map v1files1 = with(v1.body().asString()).param("fileToFind", "cc0.png") @@ -2290,7 +2289,6 @@ public void testDeleteFile() { Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) .statusCode(OK.getStatusCode()); Map v1files2 = with(postv1draft2.body().asString()).param("fileToFind", "orcid_16x16.png") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java index 5e436dd0e98..b198d2769a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java @@ -6,11 +6,14 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; import static jakarta.ws.rs.core.Response.Status.OK; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Paths; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; +import org.skyscreamer.jsonassert.JSONAssert; public class InfoIT { @@ -81,6 +84,22 @@ public void testGetZipDownloadLimit() { .body("data", notNullValue()); } + @Test + public void testGetExportFormats() throws IOException { + Response response = given().urlEncodingEnabled(false) + .get("/api/info/exportFormats"); + response.prettyPrint(); + response.then().assertThat().statusCode(OK.getStatusCode()); + + String actual = response.getBody().asString(); + String expected = + java.nio.file.Files.readString( + Paths.get("src/test/resources/json/export-formats.json"), + StandardCharsets.UTF_8); + JSONAssert.assertEquals(expected, actual, true); + + } + private void testSettingEndpoint(SettingsServiceBean.Key settingKey, String testSettingValue) { String endpoint = "/api/info/settings/" + settingKey; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java index 90357596c25..08ebec31cd6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java @@ -22,7 +22,7 @@ public class SavedSearchIT { @BeforeAll public static void setUpClass() { - + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } @AfterAll @@ -53,81 +53,55 @@ public void testSavedSearches() { Integer datasetId2 = UtilIT.getDatasetIdFromResponse(createDatasetResponse2); // missing body - Response resp = RestAssured.given() - .contentType("application/json") - .post("/api/admin/savedsearches"); + Response resp = UtilIT.setSavedSearch(); resp.prettyPrint(); resp.then().assertThat() .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); // creatorId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // definitionPointId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // missing filter - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId)) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId)); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); // create a saved search as superuser : OK - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); @@ -136,8 +110,7 @@ public void testSavedSearches() { Integer createdSavedSearchId = path.getInt("data.id"); // get list as non superuser : OK - Response getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + Response getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); getListReponse.then().assertThat() .statusCode(OK.getStatusCode()); @@ -146,22 +119,19 @@ public void testSavedSearches() { List listBeforeDelete = path2.getList("data.savedSearches"); // makelinks/all as non superuser : OK - Response makelinksAll = RestAssured.given() - .put("/api/admin/savedsearches/makelinks/all"); + Response makelinksAll = UtilIT.setSavedSearchMakelinksAll(); makelinksAll.prettyPrint(); makelinksAll.then().assertThat() .statusCode(OK.getStatusCode()); //delete a saved search as non superuser : OK - Response deleteReponse = RestAssured.given() - .delete("/api/admin/savedsearches/" + createdSavedSearchId); + Response deleteReponse = UtilIT.deleteSavedSearchById(createdSavedSearchId); deleteReponse.prettyPrint(); deleteReponse.then().assertThat() .statusCode(OK.getStatusCode()); // check list count minus 1 - getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); JsonPath path3 = JsonPath.from(getListReponse.body().asString()); List listAfterDelete = path3.getList("data.savedSearches"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java index 518431bfa2d..709908ac6eb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java @@ -850,7 +850,7 @@ public void testDeleteFiles() { String citation = atomEntryDraftV2.body().xmlPath().getString("entry.bibliographicCitation"); logger.info("citation (should contain 'DRAFT'): " + citation); boolean draftStringFoundInCitation = citation.matches(".*DRAFT.*"); - assertEquals(true, draftStringFoundInCitation); + assertTrue(draftStringFoundInCitation); List oneFileLeftInV2Draft = statement3.getBody().xmlPath().getList("feed.entry.id"); logger.info("Number of files remaining in this post version 1 draft:" + oneFileLeftInV2Draft.size()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 502f1ecb0a8..1930610532a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -241,6 +241,22 @@ public static Response clearThumbnailFailureFlag(long fileId) { return response; } + public static Response auditFiles(String apiToken, Long firstId, Long lastId, String csvList) { + String params = ""; + if (firstId != null) { + params = "?firstId="+ firstId; + } + if (lastId != null) { + params = params + (params.isEmpty() ? "?" : "&") + "lastId="+ lastId; + } + if (csvList != null) { + params = params + (params.isEmpty() ? "?" : "&") + "datasetIdentifierList="+ csvList; + } + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/admin/datafiles/auditFiles" + params); + } + private static String getAuthenticatedUserAsJsonString(String persistentUserId, String firstName, String lastName, String authenticationProviderId, String identifier) { JsonObjectBuilder builder = Json.createObjectBuilder(); builder.add("authenticationProviderId", authenticationProviderId); @@ -325,7 +341,14 @@ static Integer getDatasetIdFromResponse(Response createDatasetResponse) { logger.info("Id found in create dataset response: " + datasetId); return datasetId; } - + + static Integer getDataFileIdFromResponse(Response uploadDataFileResponse) { + JsonPath dataFile = JsonPath.from(uploadDataFileResponse.body().asString()); + int dataFileId = dataFile.getInt("data.files[0].dataFile.id"); + logger.info("Id found in upload DataFile response: " + dataFileId); + return dataFileId; + } + static Integer getSearchCountFromResponse(Response searchResponse) { JsonPath createdDataset = JsonPath.from(searchResponse.body().asString()); int searchCount = createdDataset.getInt("data.total_count"); @@ -1608,7 +1631,16 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str + persistentId + (excludeFiles ? "&excludeFiles=true" : "")); } - + static Response compareDatasetVersions(String persistentId, String versionNumber1, String versionNumber2, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/:persistentId/versions/" + + versionNumber1 + + "/compare/" + + versionNumber2 + + "?persistentId=" + + persistentId); + } static Response getDatasetWithOwners(String persistentId, String apiToken, boolean returnOwners) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -2161,19 +2193,22 @@ static Response uploadProvFreeForm(String idOrPersistentId, JsonObject jsonObjec // return requestSpecification.delete("/api/files/" + idInPath + "/prov-freeform" + optionalQueryParam); // } static Response exportDataset(String datasetPersistentId, String exporter) { - return exportDataset(datasetPersistentId, exporter, null); + return exportDataset(datasetPersistentId, exporter, null, false); } - static Response exportDataset(String datasetPersistentId, String exporter, String apiToken) { -// http://localhost:8080/api/datasets/export?exporter=dataverse_json&persistentId=doi%3A10.5072/FK2/W6WIMQ + return exportDataset(datasetPersistentId, exporter, apiToken, false); + } + static Response exportDataset(String datasetPersistentId, String exporter, String apiToken, boolean wait) { + // Wait for the Async call to finish to get the updated data + if (wait) { + sleepForReexport(datasetPersistentId, apiToken, 10); + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); } return requestSpecification - // .header(API_TOKEN_HTTP_HEADER, apiToken) - // .get("/api/datasets/:persistentId/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); .get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); } @@ -4131,8 +4166,37 @@ static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, S .body(driverLabel) .put("/api/datasets/" + datasetId + "/storageDriver"); } - - + + /** GET on /api/admin/savedsearches/list */ + static Response getSavedSearchList() { + return given().get("/api/admin/savedsearches/list"); + } + + /** POST on /api/admin/savedsearches without body */ + static Response setSavedSearch() { + return given() + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** POST on /api/admin/savedsearches with body */ + static Response setSavedSearch(String body) { + return given() + .body(body) + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** PUT on /api/admin/savedsearches/makelinks/all */ + static Response setSavedSearchMakelinksAll() { + return given().put("/api/admin/savedsearches/makelinks/all"); + } + + /** DELETE on /api/admin/savedsearches/{id} with identifier */ + static Response deleteSavedSearchById(Integer id) { + return given().delete("/api/admin/savedsearches/" + id); + } + //Globus Store related - not currently used static Response getDatasetGlobusUploadParameters(Integer datasetId, String locale, String apiToken) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java index 44739f3f62a..acf5d970358 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java @@ -1,6 +1,13 @@ package edu.harvard.iq.dataverse.api.imports; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; +import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; + +import org.apache.commons.io.FileUtils; +import com.google.gson.Gson; +import java.io.File; +import java.io.IOException; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; @@ -8,6 +15,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; +import java.nio.charset.StandardCharsets; + @ExtendWith(MockitoExtension.class) public class ImportGenericServiceBeanTest { @@ -15,7 +24,47 @@ public class ImportGenericServiceBeanTest { private ImportGenericServiceBean importGenericService; @Test - public void testReassignIdentifierAsGlobalId() { + void testIdentifierHarvestableWithOtherID() throws IOException { + // "otherIdValue" containing the value : doi:10.7910/DVN/TJCLKP + File file = new File("src/test/resources/json/importGenericWithOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + // junk or null + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "junk")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, null)); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testIdentifierHarvestableWithoutOtherID() throws IOException { + // Does not contain data of type "otherIdValue" + File file = new File("src/test/resources/json/importGenericWithoutOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + // non-URL + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "doi:10.7910/DVN/TJCLKP")); + assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "hdl:10.7910/DVN/TJCLKP")); + // HTTPS + assertEquals("https://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://hdl.handle.net/10.7910/DVN/TJCLKP")); + // HTTP (no S) + assertEquals("http://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://hdl.handle.net/10.7910/DVN/TJCLKP")); + // junk or null + assertNull(importGenericService.selectIdentifier(dto, "junk")); + assertNull(importGenericService.selectIdentifier(dto, null)); + assertNull(importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertNull(importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testReassignIdentifierAsGlobalId() { // non-URL assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO())); assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO())); @@ -29,6 +78,8 @@ public void testReassignIdentifierAsGlobalId() { assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO())); // junk assertNull(importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("http://www.example.com", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("https://dataverse.org", new DatasetDTO())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java index 7bd802b3b02..bd3bfcc1a60 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java @@ -378,20 +378,20 @@ public void testMutingInJson() { public void testHasEmailMuted() { testUser.setMutedEmails(mutedTypes); System.out.println("hasEmailMuted"); - assertEquals(true, testUser.hasEmailMuted(Type.ASSIGNROLE)); - assertEquals(true, testUser.hasEmailMuted(Type.REVOKEROLE)); - assertEquals(false, testUser.hasEmailMuted(Type.CREATEDV)); - assertEquals(false, testUser.hasEmailMuted(null)); + assertTrue(testUser.hasEmailMuted(Type.ASSIGNROLE)); + assertTrue(testUser.hasEmailMuted(Type.REVOKEROLE)); + assertFalse(testUser.hasEmailMuted(Type.CREATEDV)); + assertFalse(testUser.hasEmailMuted(null)); } @Test public void testHasNotificationsMutedMuted() { testUser.setMutedNotifications(mutedTypes); System.out.println("hasNotificationMuted"); - assertEquals(true, testUser.hasNotificationMuted(Type.ASSIGNROLE)); - assertEquals(true, testUser.hasNotificationMuted(Type.REVOKEROLE)); - assertEquals(false, testUser.hasNotificationMuted(Type.CREATEDV)); - assertEquals(false, testUser.hasNotificationMuted(null)); + assertTrue(testUser.hasNotificationMuted(Type.ASSIGNROLE)); + assertTrue(testUser.hasNotificationMuted(Type.REVOKEROLE)); + assertFalse(testUser.hasNotificationMuted(Type.CREATEDV)); + assertFalse(testUser.hasNotificationMuted(null)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java index a8dda2f6a7e..d3c5cdca470 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java @@ -38,7 +38,7 @@ void getIdentifier() { @Test void testGetDisplayInfo() { RoleAssigneeDisplayInfo displayInfo = privateUrlUser.getDisplayInfo(); - assertEquals("Private URL Enabled", displayInfo.getTitle()); + assertEquals("Preview URL Enabled", displayInfo.getTitle()); assertNull(displayInfo.getEmailAddress()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java index 552d76b74e8..ea5cc4b66a8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java @@ -89,20 +89,20 @@ public void tearDownClass() throws IOException { */ @Test public void testOpen() throws IOException { - assertEquals(false, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); datasetAccess.open(DataAccessOption.READ_ACCESS); - assertEquals(true, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertTrue(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); datasetAccess.open(DataAccessOption.WRITE_ACCESS); - assertEquals(false, datasetAccess.canRead()); - assertEquals(true, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertTrue(datasetAccess.canWrite()); dataFileAccess.open(DataAccessOption.READ_ACCESS); - assertEquals(true, dataFileAccess.canRead()); - assertEquals(false, dataFileAccess.canWrite()); + assertTrue(dataFileAccess.canRead()); + assertFalse(dataFileAccess.canWrite()); } /** @@ -133,7 +133,7 @@ public void testOpenAuxChannel() throws IOException { */ @Test public void testIsAuxObjectCached() throws IOException { - assertEquals(true, datasetAccess.isAuxObjectCached("Dataset")); + assertTrue(datasetAccess.isAuxObjectCached("Dataset")); } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java index 84a241b90f6..3aab66dc63b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java @@ -189,48 +189,48 @@ public void testResponseHeaders() { @Test public void testFileLocation() { - assertEquals(true, instance.isLocalFile()); + assertTrue(instance.isLocalFile()); instance.setIsLocalFile(false); - assertEquals(false, instance.isLocalFile()); + assertFalse(instance.isLocalFile()); - assertEquals(false, instance.isRemoteAccess()); + assertFalse(instance.isRemoteAccess()); instance.setIsRemoteAccess(true); - assertEquals(true, instance.isRemoteAccess()); + assertTrue(instance.isRemoteAccess()); } @Test public void testHttpAccess() { - assertEquals(false, instance.isHttpAccess()); + assertFalse(instance.isHttpAccess()); instance.setIsHttpAccess(true); - assertEquals(true, instance.isHttpAccess()); + assertTrue(instance.isHttpAccess()); }*/ @Test public void testDownloadSupported() { - assertEquals(true, instance.isDownloadSupported()); + assertTrue(instance.isDownloadSupported()); instance.setIsDownloadSupported(false); - assertEquals(false, instance.isDownloadSupported()); + assertFalse(instance.isDownloadSupported()); } @Test public void testSubsetSupported() { - assertEquals(false, instance.isSubsetSupported()); + assertFalse(instance.isSubsetSupported()); instance.setIsSubsetSupported(true); - assertEquals(true, instance.isSubsetSupported()); + assertTrue(instance.isSubsetSupported()); } @Test public void testZippedStream() { - assertEquals(false, instance.isZippedStream()); + assertFalse(instance.isZippedStream()); instance.setIsZippedStream(true); - assertEquals(true, instance.isZippedStream()); + assertTrue(instance.isZippedStream()); } @Test public void testNoVarHeader() { - assertEquals(false, instance.noVarHeader()); + assertFalse(instance.noVarHeader()); instance.setNoVarHeader(true); - assertEquals(true, instance.noVarHeader()); + assertTrue(instance.noVarHeader()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java index 942e4329384..27e0ac758e0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java @@ -51,14 +51,14 @@ public void setUpClass() throws IOException { */ @Test public void testPerms() throws IOException { - assertEquals(false, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); } @Test public void testIsExpiryExpired() { long currentTime = 1502221467; - assertEquals(false, swiftAccess.isExpiryExpired(60, 1502281, currentTime)); + assertFalse(swiftAccess.isExpiryExpired(60, 1502281, currentTime)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java index eb19f22df63..148d34dc5f7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java @@ -18,6 +18,7 @@ import org.apache.http.message.BasicStatusLine; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import org.junit.jupiter.api.Test; public class DataCaptureModuleUtilTest { @@ -25,13 +26,13 @@ public class DataCaptureModuleUtilTest { @Test public void testRsyncSupportEnabled() { System.out.println("rsyncSupportEnabled"); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled(null)); - assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled(null)); + assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); // Comma sepratated lists of upload methods are supported. - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh")); - assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("junk")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh")); + assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("junk")); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java index 8eed2a33c5a..2db8851c48a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java @@ -70,8 +70,8 @@ public void testGetThumbnailRestricted() { */ @Test public void testDeleteDatasetLogo() { - assertEquals(false, DatasetUtil.deleteDatasetLogo(null)); - assertEquals(false, DatasetUtil.deleteDatasetLogo(new Dataset())); + assertFalse(DatasetUtil.deleteDatasetLogo(null)); + assertFalse(DatasetUtil.deleteDatasetLogo(new Dataset())); } /** @@ -106,7 +106,7 @@ public void testGetThumbnailAsInputStream() { @Test public void testIsDatasetLogoPresent() { Dataset dataset = MocksFactory.makeDataset(); - assertEquals(false, DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); + assertFalse(DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java index bfb9134cfca..475b4c1cff5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java @@ -66,16 +66,16 @@ void variableTest(Map vmMap) { assertEquals(vm.getLiteralquestion(), "This is a literal question."); assertEquals(vm.getNotes(), "These are notes.\nA lot of them."); assertEquals(vm.getUniverse(),"Our universe"); - assertEquals(false, vm.isIsweightvar()); - assertEquals(false, vm.isWeighted()); + assertFalse(vm.isIsweightvar()); + assertFalse(vm.isWeighted()); testCategoriesVar1(vm); vm = vmMap.get(1169L); assertNotNull(vm); - assertEquals(false, vm.isIsweightvar()); - assertEquals(true, vm.isWeighted()); + assertFalse(vm.isIsweightvar()); + assertTrue(vm.isWeighted()); assertEquals(vm.getLabel(), "age_rollup" ); assertEquals(vm.getInterviewinstruction(), null); @@ -90,8 +90,8 @@ void variableTest(Map vmMap) { vm = vmMap.get(1168L); assertNotNull(vm); - assertEquals(true, vm.isIsweightvar()); - assertEquals(false, vm.isWeighted()); + assertTrue(vm.isIsweightvar()); + assertFalse(vm.isWeighted()); assertEquals(vm.getLabel(), "weight" ); assertEquals(vm.getInterviewinstruction(), null); assertEquals(vm.getLiteralquestion(), "Literal question for weight"); diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java index 508eac46cb4..0ba29f74774 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java @@ -18,7 +18,9 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.sql.Timestamp; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.concurrent.Future; @@ -171,9 +173,9 @@ public void testCreatePrivateUrlSuccessfully() throws CommandException { assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test @@ -188,22 +190,24 @@ public void testCreateAnonymizedAccessPrivateUrlSuccessfully() throws CommandExc assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); assertTrue(privateUrl.isAnonymizedAccess()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test - public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { + public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() throws CommandException { dataset = new Dataset(); List versions = new ArrayList<>(); + dataset.setPublicationDate(new Timestamp(new Date().getTime())); DatasetVersion datasetVersion = new DatasetVersion(); datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); DatasetVersion datasetVersion2 = new DatasetVersion(); - - versions.add(datasetVersion); + datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT); + versions.add(datasetVersion2); + versions.add(datasetVersion); dataset.setVersions(versions); dataset.setId(versionIsReleased); PrivateUrl privateUrl = null; @@ -211,6 +215,7 @@ public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { privateUrl = testEngine.submit(new CreatePrivateUrlCommand(null, dataset, true)); assertTrue(false); } catch (CommandException ex) { + } assertNull(privateUrl); } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index 9850e9d80e9..2121aa4d9f9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -253,7 +253,7 @@ public void testGetDisplayName() { */ @Test public void testIsXMLFormat() { - assertEquals(false, schemaDotOrgExporter instanceof XMLExporter); + assertFalse(schemaDotOrgExporter instanceof XMLExporter); } /** @@ -261,7 +261,7 @@ public void testIsXMLFormat() { */ @Test public void testIsHarvestable() { - assertEquals(false, schemaDotOrgExporter.isHarvestable()); + assertFalse(schemaDotOrgExporter.isHarvestable()); } /** @@ -269,7 +269,7 @@ public void testIsHarvestable() { */ @Test public void testIsAvailableToUsers() { - assertEquals(true, schemaDotOrgExporter.isAvailableToUsers()); + assertTrue(schemaDotOrgExporter.isAvailableToUsers()); } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java index 4dfedf5aa17..955070a662a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java @@ -112,8 +112,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -128,8 +128,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); } @Test @@ -218,8 +218,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -234,8 +234,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); } @Test @@ -347,9 +347,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); - assertEquals(false, file3NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); + assertFalse(file3NameAltered); // add duplicate file in root datasetVersion.getFileMetadatas().add(fmd3); @@ -371,9 +371,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); - assertEquals(true, file3NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); + assertTrue(file3NameAltered); } @Test @@ -457,7 +457,7 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception { } // check filename is altered since tabular and will change to .tab after ingest - assertEquals(true, file2NameAltered); + assertTrue(file2NameAltered); } @@ -553,8 +553,8 @@ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception { } // check filenames are unique and unaltered - assertEquals(true, file1NameAltered); - assertEquals(false, file2NameAltered); + assertTrue(file1NameAltered); + assertFalse(file2NameAltered); } @Test @@ -657,7 +657,7 @@ public void testRecalculateDatasetVersionUNF() { DataTable dataTable = new DataTable(); dataTable.setUnf("unfOnDataTable"); datafile1.setDataTable(dataTable); - assertEquals(true, datafile1.isTabularData()); + assertTrue(datafile1.isTabularData()); FileMetadata fmd1 = new FileMetadata(); fmd1.setId(1L); @@ -692,7 +692,7 @@ public void testGetUnfValuesOfFiles() { @Test public void testshouldHaveUnf() { - assertEquals(false, IngestUtil.shouldHaveUnf(null)); + assertFalse(IngestUtil.shouldHaveUnf(null)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java index a0ac22f99f3..a2729ce7514 100644 --- a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java @@ -2,6 +2,8 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.Arrays; @@ -171,13 +173,13 @@ public void testBasics() { pager1 = new Pager(102, 10, 1); msgt("Test: 102 results, 10 per page, page 1"); - assertEquals(true, pager1.isPagerNecessary()); + assertTrue(pager1.isPagerNecessary()); assertEquals(102, pager1.getNumResults()); assertEquals(1, pager1.getPreviousPageNumber()); assertEquals(2, pager1.getNextPageNumber()); - assertEquals(false, pager1.hasPreviousPageNumber()); - assertEquals(true, pager1.hasNextPageNumber()); + assertFalse(pager1.hasPreviousPageNumber()); + assertTrue(pager1.hasNextPageNumber()); msg("page list: " + Arrays.toString(pager1.getPageNumberList())); //assertEquals(new int[]{1, 2, 3, 4, 5}, pager1.getPageNumberList()); @@ -232,13 +234,13 @@ public void testNoResults() { System.out.println("getNumResults"); Pager pager1 = new Pager(0, 10, 1); - assertEquals(false, pager1.isPagerNecessary()); + assertFalse(pager1.isPagerNecessary()); assertEquals(0, pager1.getNumResults()); assertEquals(0, pager1.getPreviousPageNumber()); assertEquals(0, pager1.getNextPageNumber()); - assertEquals(false, pager1.hasPreviousPageNumber()); - assertEquals(false, pager1.hasNextPageNumber()); + assertFalse(pager1.hasPreviousPageNumber()); + assertFalse(pager1.hasNextPageNumber()); msgt("page list: " + Arrays.toString(pager1.getPageNumberList())); //assertEquals(null, pager1.getPageNumberList()); diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java index ecf18e6b1ca..bacb231b4d5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java @@ -99,7 +99,7 @@ @JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "FAKE 1", varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = FakeDOIProvider.TYPE, varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5074", varArgs = "fake1") -@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK", varArgs = "fake1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "fk", varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_MANAGED_LIST, value = "doi:10.5073/FK3ABCDEF", varArgs ="fake1") //HANDLE 1 @@ -315,6 +315,13 @@ public void testUnmanagedParsing() throws IOException { GlobalId pid6 = PidUtil.parseAsGlobalID(pid6String); assertEquals(pid6String, pid6.asString()); assertEquals(UnmanagedPermaLinkPidProvider.ID, pid6.getProviderId()); + + //Lowercase test for unmanaged DOIs + String pid7String = "doi:10.5281/zenodo.6381129"; + GlobalId pid7 = PidUtil.parseAsGlobalID(pid7String); + assertEquals(UnmanagedDOIProvider.ID, pid5.getProviderId()); + assertEquals(pid7String.toUpperCase().replace("DOI", "doi"), pid7.asString()); + } @@ -353,15 +360,15 @@ public void testExcludedSetParsing() throws IOException { @Test public void testManagedSetParsing() throws IOException { - String pid1String = "doi:10.5073/FK3ABCDEF"; + String pid1String = "doi:10.5073/fk3ABCDEF"; GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); - assertEquals(pid1String, pid2.asString()); + assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid2.asString()); assertEquals("fake1", pid2.getProviderId()); assertEquals("https://doi.org/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); assertEquals("10.5073", pid2.getAuthority()); assertEquals(AbstractDOIProvider.DOI_PROTOCOL, pid2.getProtocol()); GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); - assertEquals(pid1String, pid3.asString()); + assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid3.asString()); assertEquals("fake1", pid3.getProviderId()); assertFalse(PidUtil.getPidProvider(pid3.getProviderId()).canCreatePidsLike(pid3)); diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java index c03146904de..2bd6818821d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java @@ -1,15 +1,21 @@ package edu.harvard.iq.dataverse.pidproviders.doi.datacite; +import edu.harvard.iq.dataverse.ControlledVocabularyValue; +import edu.harvard.iq.dataverse.DataCitation; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetAuthor; import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataset.DatasetType; @@ -20,16 +26,30 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.CompoundVocabularyException; +import edu.harvard.iq.dataverse.util.json.ControlledVocabularyException; +import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import edu.harvard.iq.dataverse.util.xml.XmlValidator; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonString; +import java.io.File; import java.io.IOException; import java.io.StringReader; import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import javax.xml.transform.stream.StreamSource; @@ -73,6 +93,8 @@ public static void setupMocks() { } /** + * A minimal example to assure that the XMLMetadataTemplate generates output + * consistent with the DataCite XML v4.5 schema. */ @Test public void testDataCiteXMLCreation() throws IOException { @@ -106,7 +128,7 @@ public void testDataCiteXMLCreation() throws IOException { doiMetadata.setAuthors(authors); doiMetadata.setPublisher("Dataverse"); XmlMetadataTemplate template = new XmlMetadataTemplate(doiMetadata); - + Dataset d = new Dataset(); GlobalId doi = new GlobalId("doi", "10.5072", "FK2/ABCDEF", null, null, null); d.setGlobalId(doi); @@ -135,15 +157,291 @@ public void testDataCiteXMLCreation() throws IOException { d.setDatasetType(dType); String xml = template.generateXML(d); - System.out.println("Output is " + xml); + System.out.println("Output from minimal example is " + xml); try { StreamSource source = new StreamSource(new StringReader(xml)); source.setSystemId("DataCite XML for test dataset"); - assertTrue(XmlValidator.validateXmlSchema(source, new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); + assertTrue(XmlValidator.validateXmlSchema(source, + new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); } catch (SAXException e) { System.out.println("Invalid schema: " + e.getMessage()); } - + + } + + /** + * This tests a more complete example based off of the dataset-all-defaults + * file, again checking for conformance of the result with the DataCite XML v4.5 + * schema. + */ + @Test + public void testDataCiteXMLCreationAllFields() throws IOException { + Dataverse collection = new Dataverse(); + collection.setCitationDatasetFieldTypes(new ArrayList<>()); + Dataset d = new Dataset(); + d.setOwner(collection); + DatasetVersion dv = new DatasetVersion(); + TermsOfUseAndAccess toa = new TermsOfUseAndAccess(); + toa.setTermsOfUse("Some terms"); + dv.setTermsOfUseAndAccess(toa); + dv.setDataset(d); + DatasetFieldType primitiveDSFType = new DatasetFieldType(DatasetFieldConstant.title, + DatasetFieldType.FieldType.TEXT, false); + DatasetField testDatasetField = new DatasetField(); + + dv.setVersionState(VersionState.DRAFT); + + testDatasetField.setDatasetVersion(dv); + + File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt"); + String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath()))); + JsonObject datasetJson = JsonUtil.getJsonObject(datasetVersionAsJson); + + GlobalId doi = new GlobalId("doi", datasetJson.getString("authority"), datasetJson.getString("identifier"), + null, null, null); + d.setGlobalId(doi); + + List fields = assertDoesNotThrow(() -> XmlMetadataTemplateTest + .parseMetadataBlocks(datasetJson.getJsonObject("datasetVersion").getJsonObject("metadataBlocks"))); + dv.setDatasetFields(fields); + + ArrayList dsvs = new ArrayList<>(); + dsvs.add(0, dv); + d.setVersions(dsvs); + DatasetType dType = new DatasetType(); + dType.setName(DatasetType.DATASET_TYPE_DATASET); + d.setDatasetType(dType); + String xml = DOIDataCiteRegisterService.getMetadataFromDvObject(dv.getDataset().getGlobalId().asString(), + new DataCitation(dv).getDataCiteMetadata(), dv.getDataset()); + System.out.println("Output from dataset-all-defaults is " + xml); + try { + StreamSource source = new StreamSource(new StringReader(xml)); + source.setSystemId("DataCite XML for test dataset"); + assertTrue(XmlValidator.validateXmlSchema(source, + new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); + } catch (SAXException e) { + System.out.println("Invalid schema: " + e.getMessage()); + } + + } + + /** + * Mock Utility Methods - These methods support importing DatasetFields from the + * Dataverse JSON export format. They assume that any DatasetFieldType + * referenced exists, that any Controlled Vocabulary value exists, etc. which + * avoids having to do database lookups or read metadatablock tsv files. They + * are derived from the JsonParser methods of the same names with any db + * references and DatasetFieldType-related error checking removed. + */ + public static List parseMetadataBlocks(JsonObject json) throws JsonParseException { + + Map existingTypes = new HashMap<>(); + + Set keys = json.keySet(); + List fields = new LinkedList<>(); + + for (String blockName : keys) { + MetadataBlock block = new MetadataBlock(); + block.setName(blockName); + JsonObject blockJson = json.getJsonObject(blockName); + JsonArray fieldsJson = blockJson.getJsonArray("fields"); + fields.addAll(parseFieldsFromArray(fieldsJson, true, block, existingTypes)); + } + return fields; + } + + private static List parseFieldsFromArray(JsonArray fieldsArray, Boolean testType, MetadataBlock block, + Map existingTypes) throws JsonParseException { + List fields = new LinkedList<>(); + for (JsonObject fieldJson : fieldsArray.getValuesAs(JsonObject.class)) { + + DatasetField field = parseField(fieldJson, testType, block, existingTypes); + if (field != null) { + fields.add(field); + } + + } + return fields; + + } + + public static DatasetField parseField(JsonObject json, Boolean testType, MetadataBlock block, + Map existingTypes) throws JsonParseException { + if (json == null) { + return null; + } + + DatasetField ret = new DatasetField(); + String fieldName = json.getString("typeName", ""); + String typeClass = json.getString("typeClass", ""); + if (!existingTypes.containsKey(fieldName)) { + boolean multiple = json.getBoolean("multiple"); + DatasetFieldType fieldType = new DatasetFieldType(); + fieldType.setName(fieldName); + fieldType.setAllowMultiples(multiple); + fieldType.setAllowControlledVocabulary(typeClass.equals("controlledVocabulary")); + fieldType.setFieldType(FieldType.TEXT); + fieldType.setMetadataBlock(block); + fieldType.setChildDatasetFieldTypes(new ArrayList<>()); + existingTypes.put(fieldName, fieldType); + } + DatasetFieldType type = existingTypes.get(fieldName); + ret.setDatasetFieldType(type); + + if (typeClass.equals("compound")) { + parseCompoundValue(ret, type, json, testType, block, existingTypes); + } else if (type.isControlledVocabulary()) { + parseControlledVocabularyValue(ret, type, json); + } else { + parsePrimitiveValue(ret, type, json); + } + + return ret; + } + + public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json, + Boolean testType, MetadataBlock block, Map existingTypes) + throws JsonParseException { + List vocabExceptions = new ArrayList<>(); + List vals = new LinkedList<>(); + if (compoundType.isAllowMultiples()) { + int order = 0; + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + compoundType.getName() + + ". It should be an array of values."); + } + for (JsonObject obj : json.getJsonArray("value").getValuesAs(JsonObject.class)) { + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); + List fields = new LinkedList<>(); + for (String fieldName : obj.keySet()) { + JsonObject childFieldJson = obj.getJsonObject(fieldName); + DatasetField f = null; + try { + f = parseField(childFieldJson, testType, block, existingTypes); + } catch (ControlledVocabularyException ex) { + vocabExceptions.add(ex); + } + + if (f != null) { + f.setParentDatasetFieldCompoundValue(cv); + fields.add(f); + } + } + if (!fields.isEmpty()) { + cv.setChildDatasetFields(fields); + cv.setDisplayOrder(order); + vals.add(cv); + } + order++; + } + + } else { + + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); + List fields = new LinkedList<>(); + JsonObject value = json.getJsonObject("value"); + for (String key : value.keySet()) { + JsonObject childFieldJson = value.getJsonObject(key); + DatasetField f = null; + try { + f = parseField(childFieldJson, testType, block, existingTypes); + } catch (ControlledVocabularyException ex) { + vocabExceptions.add(ex); + } + if (f != null) { + f.setParentDatasetFieldCompoundValue(cv); + fields.add(f); + } + } + if (!fields.isEmpty()) { + cv.setChildDatasetFields(fields); + vals.add(cv); + } + + } + if (!vocabExceptions.isEmpty()) { + throw new CompoundVocabularyException("Invalid controlled vocabulary in compound field ", vocabExceptions, + vals); + } + + for (DatasetFieldCompoundValue dsfcv : vals) { + dsfcv.setParentDatasetField(dsf); + } + dsf.setDatasetFieldCompoundValues(vals); + } + + public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft, JsonObject json) + throws JsonParseException { + List vals = new LinkedList<>(); + if (dft.isAllowMultiples()) { + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid values submitted for " + dft.getName() + ". It should be an array of values."); + } + for (JsonString val : json.getJsonArray("value").getValuesAs(JsonString.class)) { + DatasetFieldValue datasetFieldValue = new DatasetFieldValue(dsf); + datasetFieldValue.setDisplayOrder(vals.size() - 1); + datasetFieldValue.setValue(val.getString().trim()); + vals.add(datasetFieldValue); + } + + } else { + try { + json.getString("value"); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid value submitted for " + dft.getName() + ". It should be a single value."); + } + DatasetFieldValue datasetFieldValue = new DatasetFieldValue(); + datasetFieldValue.setValue(json.getString("value", "").trim()); + datasetFieldValue.setDatasetField(dsf); + vals.add(datasetFieldValue); + } + + dsf.setDatasetFieldValues(vals); + } + + public static void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cvvType, JsonObject json) + throws JsonParseException { + List vals = new LinkedList<>(); + try { + if (cvvType.isAllowMultiples()) { + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid values submitted for " + cvvType.getName() + ". It should be an array of values."); + } + for (JsonString strVal : json.getJsonArray("value").getValuesAs(JsonString.class)) { + String strValue = strVal.getString(); + ControlledVocabularyValue cvv = new ControlledVocabularyValue(); + cvv.setDatasetFieldType(cvvType); + cvv.setStrValue(strVal.getString()); + vals.add(cvv); + } + + } else { + try { + json.getString("value"); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid value submitted for " + cvvType.getName() + ". It should be a single value."); + } + String strValue = json.getString("value", ""); + ControlledVocabularyValue cvv = new ControlledVocabularyValue(); + cvv.setDatasetFieldType(cvvType); + cvv.setStrValue(strValue); + vals.add(cvv); + } + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + cvvType.getName()); + } + + dsf.setControlledVocabularyValues(vals); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java index da94b288bee..f06be37578d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java @@ -277,7 +277,7 @@ public void testGetPrivateUrlFromRoleAssignmentSuccess() { PrivateUrl privateUrl = PrivateUrlUtil.getPrivateUrlFromRoleAssignment(ra, dataverseSiteUrl); assertNotNull(privateUrl); assertEquals(new Long(42), privateUrl.getDataset().getId()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index 124ce19369c..8e24c546556 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -53,6 +53,7 @@ public void setUp() { indexService.dataverseService = Mockito.mock(DataverseServiceBean.class); indexService.datasetFieldService = Mockito.mock(DatasetFieldServiceBean.class); indexService.datasetVersionService = Mockito.mock(DatasetVersionServiceBean.class); + indexService.datasetVersionFilesServiceBean = Mockito.mock(DatasetVersionFilesServiceBean.class); BrandingUtil.injectServices(indexService.dataverseService, indexService.settingsService); Mockito.when(indexService.dataverseService.findRootDataverse()).thenReturn(dataverse); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 11da71e1980..7ec8e0b25f3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -133,7 +133,7 @@ public void testJson_PrivateUrl() { assertNotNull(job); JsonObject jsonObject = job.build(); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token")); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken")); assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getJsonObject("roleAssignment").getString("assignee")); } @@ -290,7 +290,7 @@ public void testDataversePrinter() { assertEquals("42 Inc.", jsonObject.getString("affiliation")); assertEquals(0, jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getInt("displayOrder")); assertEquals("dv42@mailinator.com", jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getString("contactEmail")); - assertEquals(false, jsonObject.getBoolean("permissionRoot")); + assertFalse(jsonObject.getBoolean("permissionRoot")); assertEquals("Description for Dataverse 42.", jsonObject.getString("description")); assertEquals("UNCATEGORIZED", jsonObject.getString("dataverseType")); } diff --git a/src/test/resources/json/export-formats.json b/src/test/resources/json/export-formats.json new file mode 100644 index 00000000000..b4dc0168629 --- /dev/null +++ b/src/test/resources/json/export-formats.json @@ -0,0 +1,83 @@ +{ + "status": "OK", + "data": { + "OAI_ORE": { + "displayName": "OAI_ORE", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "Datacite": { + "displayName": "DataCite", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.5/metadata.xsd", + "XMLSchemaVersion": "4.5" + }, + "oai_dc": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "http://www.openarchives.org/OAI/2.0/oai_dc/", + "XMLSchemaLocation": "http://www.openarchives.org/OAI/2.0/oai_dc.xsd", + "XMLSchemaVersion": "2.0" + }, + "oai_datacite": { + "displayName": "OpenAIRE", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://schema.datacite.org/meta/kernel-4.1/metadata.xsd", + "XMLSchemaVersion": "4.1" + }, + "schema.org": { + "displayName": "Schema.org JSON-LD", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "ddi": { + "displayName": "DDI Codebook v2", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + }, + "dcterms": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://purl.org/dc/terms/", + "XMLSchemaLocation": "http://dublincore.org/schemas/xmls/qdc/dcterms.xsd", + "XMLSchemaVersion": "2.0" + }, + "html": { + "displayName": "DDI HTML Codebook", + "mediaType": "text/html", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "dataverse_json": { + "displayName": "JSON", + "mediaType": "application/json", + "isHarvestable": true, + "isVisibleInUserInterface": true + }, + "oai_ddi": { + "displayName": "DDI Codebook v2", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + } + } +} diff --git a/src/test/resources/json/importGenericWithOtherId.json b/src/test/resources/json/importGenericWithOtherId.json new file mode 100644 index 00000000000..af9241393e9 --- /dev/null +++ b/src/test/resources/json/importGenericWithOtherId.json @@ -0,0 +1,307 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "otherId", + "value": [ + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "my agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId2" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "doi:10.7910/DVN/TJCLKP" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } diff --git a/src/test/resources/json/importGenericWithoutOtherId.json b/src/test/resources/json/importGenericWithoutOtherId.json new file mode 100644 index 00000000000..ceb2263c2cf --- /dev/null +++ b/src/test/resources/json/importGenericWithoutOtherId.json @@ -0,0 +1,258 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } diff --git a/src/test/resources/tsv/whitespace-test.tsv b/src/test/resources/tsv/whitespace-test.tsv new file mode 100644 index 00000000000..5485c948825 --- /dev/null +++ b/src/test/resources/tsv/whitespace-test.tsv @@ -0,0 +1,10 @@ +#metadataBlock name dataverseAlias displayName + whitespaceDemo Whitespace Demo +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + whitespaceDemoOne One Trailing Space text 0 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo + whitespaceDemoTwo Two Leading Space text 1 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo + whitespaceDemoThree Three CV with errors text 2 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo +#controlledVocabulary DatasetField Value identifier displayOrder + whitespaceDemoThree CV1 0 + whitespaceDemoThree CV2 1 + whitespaceDemoThree CV3 2