diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000000..f617c929f230 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,9 @@ +repos: +- repo: local + hooks: + - id: mvn-checkstyle + verbose: true + entry: python scripts/pre-commit/checkstyle.py + name: Runs maven checkstyle + language: python + files: \.(java)$ \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/clarin/ClarinUserMetadataServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/clarin/ClarinUserMetadataServiceImpl.java index 4c3c3450b136..8704fc45a417 100644 --- a/dspace-api/src/main/java/org/dspace/content/clarin/ClarinUserMetadataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/clarin/ClarinUserMetadataServiceImpl.java @@ -10,6 +10,8 @@ import java.sql.SQLException; import java.util.List; import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; import org.apache.commons.lang.NullArgumentException; import org.dspace.authorize.AuthorizeException; @@ -76,4 +78,44 @@ public void delete(Context context, ClarinUserMetadata clarinUserMetadata) throw } clarinUserMetadataDAO.delete(context, clarinUserMetadata); } + + @Override + public List findByUserRegistrationAndBitstream(Context context, Integer userRegUUID, + UUID bitstreamUUID, boolean lastTransaction) + throws SQLException { + if (lastTransaction) { + return getLastTransactionUserMetadata(clarinUserMetadataDAO.findByUserRegistrationAndBitstream(context, + userRegUUID, bitstreamUUID)); + } + return clarinUserMetadataDAO.findByUserRegistrationAndBitstream(context, userRegUUID, bitstreamUUID); + } + + private List getLastTransactionUserMetadata(List userMetadataList) { + Integer latestTransactionId = getIdOfLastTransaction(userMetadataList); + if (latestTransactionId == null) { + return userMetadataList; + } + + List filteredUserMetadata = null; + // Filter all user metadata by the last transaction + try { + filteredUserMetadata = userMetadataList.stream() + .filter(clarinUserMetadata -> clarinUserMetadata.getTransaction().getID() + .equals(latestTransactionId)) + .collect(Collectors.toList()); + } catch (Exception e) { + log.error("Error filtering user metadata by the last transaction", e); + } + return filteredUserMetadata; + } + + private Integer getIdOfLastTransaction(List userMetadataList) { + // userMetadataList is filtered by the last transaction - first element is the last transaction + try { + return userMetadataList.get(0).getTransaction().getID(); + } catch (IndexOutOfBoundsException e) { + log.error("No transaction found for the user metadata"); + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/clarin/ClarinUserMetadataDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/clarin/ClarinUserMetadataDAO.java index db18c605a4a7..c25b77435d11 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/clarin/ClarinUserMetadataDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/clarin/ClarinUserMetadataDAO.java @@ -7,8 +7,15 @@ */ package org.dspace.content.dao.clarin; +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + import org.dspace.content.clarin.ClarinUserMetadata; +import org.dspace.core.Context; import org.dspace.core.GenericDAO; public interface ClarinUserMetadataDAO extends GenericDAO { + List findByUserRegistrationAndBitstream(Context context, Integer userRegUUID, + UUID bitstreamUUID) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/clarin/ClarinUserMetadataDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/clarin/ClarinUserMetadataDAOImpl.java index d0d45c11df0f..74fb5cee2ea6 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/clarin/ClarinUserMetadataDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/clarin/ClarinUserMetadataDAOImpl.java @@ -7,9 +7,15 @@ */ package org.dspace.content.dao.impl.clarin; +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import javax.persistence.Query; + import org.dspace.content.clarin.ClarinUserMetadata; import org.dspace.content.dao.clarin.ClarinUserMetadataDAO; import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; public class ClarinUserMetadataDAOImpl extends AbstractHibernateDAO implements ClarinUserMetadataDAO { @@ -17,4 +23,22 @@ public class ClarinUserMetadataDAOImpl extends AbstractHibernateDAO findByUserRegistrationAndBitstream(Context context, Integer userRegUUID, + UUID bitstreamUUID) throws SQLException { + Query query = createQuery(context, "SELECT cum FROM ClarinUserMetadata as cum " + + "JOIN cum.eperson as ur " + + "JOIN cum.transaction as clrua " + + "JOIN clrua.licenseResourceMapping as map " + + "WHERE ur.id = :userRegUUID " + + "AND map.bitstream.id = :bitstreamUUID " + + "ORDER BY clrua.id DESC"); + + query.setParameter("userRegUUID", userRegUUID); + query.setParameter("bitstreamUUID", bitstreamUUID); + query.setHint("org.hibernate.cacheable", Boolean.TRUE); + + return list(query); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/service/clarin/ClarinUserMetadataService.java b/dspace-api/src/main/java/org/dspace/content/service/clarin/ClarinUserMetadataService.java index 12a4cb5ba01a..3ea93d398f05 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/clarin/ClarinUserMetadataService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/clarin/ClarinUserMetadataService.java @@ -9,6 +9,7 @@ import java.sql.SQLException; import java.util.List; +import java.util.UUID; import org.dspace.authorize.AuthorizeException; import org.dspace.content.clarin.ClarinUserMetadata; @@ -22,4 +23,8 @@ public interface ClarinUserMetadataService { List findAll(Context context) throws SQLException; void update(Context context, ClarinUserMetadata clarinUserMetadata) throws SQLException; void delete(Context context, ClarinUserMetadata clarinUserMetadata) throws SQLException, AuthorizeException; + + List findByUserRegistrationAndBitstream(Context context, Integer userRegUUID, + UUID bitstreamUUID, boolean lastTransaction) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/SyncS3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/SyncS3BitStoreService.java index cae46a512a56..ff1e2f86740d 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/SyncS3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/SyncS3BitStoreService.java @@ -8,15 +8,30 @@ package org.dspace.storage.bitstore; import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; +import java.security.DigestInputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.List; import com.amazonaws.AmazonClientException; +import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; +import com.amazonaws.services.s3.model.PartETag; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; import com.amazonaws.services.s3.transfer.Upload; -import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.content.Bitstream; +import org.dspace.core.Utils; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -33,6 +48,16 @@ public class SyncS3BitStoreService extends S3BitStoreService { private static final Logger log = LogManager.getLogger(SyncS3BitStoreService.class); private boolean syncEnabled = false; + /** + * The uploading file is divided into parts and each part is uploaded separately. The size of the part is 50 MB. + */ + private static final long UPLOAD_FILE_PART_SIZE = 50 * 1024 * 1024; // 50 MB + + /** + * Upload large file by parts - check the checksum of every part + */ + private boolean uploadByParts = false; + @Autowired(required = true) DSBitStoreService dsBitStoreService; @@ -43,9 +68,29 @@ public SyncS3BitStoreService() { super(); } + /** + * Define syncEnabled and uploadByParts in the constructor - this values won't be overridden by the configuration + * + * @param syncEnabled if true, the file will be uploaded to the local assetstore + * @param uploadByParts if true, the file will be uploaded by parts + */ + public SyncS3BitStoreService(boolean syncEnabled, boolean uploadByParts) { + super(); + this.syncEnabled = syncEnabled; + this.uploadByParts = uploadByParts; + } + public void init() throws IOException { super.init(); - syncEnabled = configurationService.getBooleanProperty("sync.storage.service.enabled", false); + + // The syncEnabled and uploadByParts could be set to true in the constructor, + // do not override them by the configuration in this case + if (!syncEnabled) { + syncEnabled = configurationService.getBooleanProperty("sync.storage.service.enabled", false); + } + if (!uploadByParts) { + uploadByParts = configurationService.getBooleanProperty("s3.upload.by.parts.enabled", false); + } } @Override @@ -53,29 +98,44 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { String key = getFullKey(bitstream.getInternalId()); //Copy istream to temp file, and send the file, with some metadata File scratchFile = File.createTempFile(bitstream.getInternalId(), "s3bs"); - try { - FileUtils.copyInputStreamToFile(in, scratchFile); - long contentLength = scratchFile.length(); - // The ETag may or may not be and MD5 digest of the object data. - // Therefore, we precalculate before uploading - String localChecksum = org.dspace.curate.Utils.checksum(scratchFile, CSA); + try ( + FileOutputStream fos = new FileOutputStream(scratchFile); + // Read through a digest input stream that will work out the MD5 + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); + ) { + Utils.bufferedCopy(dis, fos); + in.close(); - Upload upload = tm.upload(getBucketName(), key, scratchFile); - - upload.waitForUploadResult(); + if (uploadByParts) { + uploadByParts(key, scratchFile); + } else { + uploadFluently(key, scratchFile); + } - bitstream.setSizeBytes(contentLength); - bitstream.setChecksum(localChecksum); + bitstream.setSizeBytes(scratchFile.length()); + // we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if + // the bucket is encrypted + bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); bitstream.setChecksumAlgorithm(CSA); if (syncEnabled) { - // Upload file into local assetstore + // Upload file into local assetstore - use buffered copy to avoid memory issues, because of large files File localFile = dsBitStoreService.getFile(bitstream); - FileUtils.copyFile(scratchFile, localFile); + // Create a new file in the assetstore if it does not exist + createFileIfNotExist(localFile); + + // Copy content from scratch file to local assetstore file + FileInputStream fisScratchFile = new FileInputStream(scratchFile); + FileOutputStream fosLocalFile = new FileOutputStream(localFile); + Utils.bufferedCopy(fisScratchFile, fosLocalFile); + fisScratchFile.close(); } } catch (AmazonClientException | IOException | InterruptedException e) { log.error("put(" + bitstream.getInternalId() + ", is)", e); throw new IOException(e); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); } finally { if (!scratchFile.delete()) { scratchFile.deleteOnExit(); @@ -98,4 +158,138 @@ public void remove(Bitstream bitstream) throws IOException { throw new IOException(e); } } + + /** + * Create a new file in the assetstore if it does not exist + * + * @param localFile + * @throws IOException + */ + private void createFileIfNotExist(File localFile) throws IOException { + if (localFile.exists()) { + return; + } + + // Create the necessary parent directories if they do not yet exist + if (!localFile.getParentFile().mkdirs()) { + throw new IOException("Assetstore synchronization error: Directories in the assetstore for the file " + + "with path" + localFile.getParent() + " were not created"); + } + if (!localFile.createNewFile()) { + throw new IOException("Assetstore synchronization error: File " + localFile.getPath() + + " was not created"); + } + } + + /** + * Upload a file fluently. The file is uploaded in a single request. + * + * @param key the bitstream's internalId + * @param scratchFile the file to upload + * @throws InterruptedException if the S3 upload is interrupted + */ + private void uploadFluently(String key, File scratchFile) throws InterruptedException { + Upload upload = tm.upload(getBucketName(), key, scratchFile); + + upload.waitForUploadResult(); + } + + /** + * Upload a file by parts. The file is divided into parts and each part is uploaded separately. + * The checksum of each part is checked. If the checksum does not match, the file is not uploaded. + * + * @param key the bitstream's internalId + * @param scratchFile the file to upload + * @throws IOException if an I/O error occurs + */ + private void uploadByParts(String key, File scratchFile) throws IOException { + // Initialize MessageDigest for computing checksum + MessageDigest digest; + try { + digest = MessageDigest.getInstance("MD5"); + } catch (Exception e) { + throw new RuntimeException("MD5 algorithm not available", e); + } + + // Initiate multipart upload + InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest(getBucketName(), key); + String uploadId = this.s3Service.initiateMultipartUpload(initiateRequest).getUploadId(); + + // Create a list to hold the ETags for individual parts + List partETags = new ArrayList<>(); + + try { + // Upload parts + File file = new File(scratchFile.getPath()); + long fileLength = file.length(); + long remainingBytes = fileLength; + int partNumber = 1; + + while (remainingBytes > 0) { + long bytesToUpload = Math.min(UPLOAD_FILE_PART_SIZE, remainingBytes); + + // Calculate the checksum for the part + String partChecksum = calculatePartChecksum(file, fileLength - remainingBytes, bytesToUpload, digest); + + UploadPartRequest uploadRequest = new UploadPartRequest() + .withBucketName(this.getBucketName()) + .withKey(key) + .withUploadId(uploadId) + .withPartNumber(partNumber) + .withFile(file) + .withFileOffset(fileLength - remainingBytes) + .withPartSize(bytesToUpload); + + // Upload the part + UploadPartResult uploadPartResponse = this.s3Service.uploadPart(uploadRequest); + + // Collect the ETag for the part + partETags.add(uploadPartResponse.getPartETag()); + + // Compare checksums - local with ETag + if (!StringUtils.equals(uploadPartResponse.getETag(), partChecksum)) { + String errorMessage = "Checksums do not match error: The locally computed checksum does " + + "not match with the ETag from the UploadPartResult. Local checksum: " + partChecksum + + ", ETag: " + uploadPartResponse.getETag() + ", partNumber: " + partNumber; + log.error(errorMessage); + throw new IOException(errorMessage); + } + + remainingBytes -= bytesToUpload; + partNumber++; + } + + // Complete the multipart upload + CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest(this.getBucketName(), + key, uploadId, partETags); + this.s3Service.completeMultipartUpload(completeRequest); + } catch (AmazonClientException e) { + log.error("Cannot upload the file by parts because: ", e); + } + } + + /** + * Calculate the checksum of the specified part of the file (Multipart upload) + * + * @param file the uploading file + * @param offset the offset in the file + * @param length the length of the part + * @param digest the message digest for computing the checksum + * @return the checksum of the part + * @throws IOException if an I/O error occurs + */ + public static String calculatePartChecksum(File file, long offset, long length, MessageDigest digest) + throws IOException { + try (FileInputStream fis = new FileInputStream(file); + DigestInputStream dis = new DigestInputStream(fis, digest)) { + // Skip to the specified offset + fis.skip(offset); + + // Read the specified length + IOUtils.copyLarge(dis, OutputStream.nullOutputStream(), 0, length); + + // Convert the digest to a hex string + return Utils.toHex(digest.digest()); + } + } } diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 85f5b1234fe5..8a9decfa3318 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -220,17 +220,16 @@ featured.service.teitok.description = A web-based platform for viewing, creating ##### Shibboleth ##### # Turn off the discofeed, it is allowed by default -shibboleth.discofeed.allowed = true +shibboleth.discofeed.allowed = false # File where is DiscoJuiceFeed response shibboleth.discofeed.url = TEST:/org/dspace/app/rest/discofeedResponse.json - -# Configuration properties for DiscoJuice -discojuice.feeds = edugain, dfn, cesnet, surfnet2, haka, kalmar +# Test connection to the discofeed with disabled SSL certificate validation +shibboleth.discofeed.url.test.connection = https://dev-5.pc:8443/Shibboleth.sso/DiscoFeed # CRON job refresh time definition - default is refresh in every 2 hours. discojuice.refresh = 0 */2 * * * ? # Comma separated list of entityIDs; we try to guess country on these discojuice.rewriteCountries = https://idp.scc.kit.edu/idp/shibboleth, https://fedauth.london.edu/oala/metadata, https://youidlite.youid.net/idp/shibboleth, https://cavle.org/shibboleth - +disable.ssl.check.specific.requests = true ### Add user to the groups ### #attribute -> group mapping #check shibboleth attribute ATTR and put users having value ATTR_VALUE1 and ATTR_VALUE2 to GROUP1 diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java index e4d5435f62bb..7fdd9a9ade54 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest; -import static org.apache.commons.lang.StringUtils.isBlank; import static org.apache.commons.lang.StringUtils.isNotBlank; import java.io.BufferedInputStream; @@ -26,6 +25,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import javax.net.ssl.HttpsURLConnection; import javax.ws.rs.core.NoContentException; import com.maxmind.geoip2.DatabaseReader; @@ -34,6 +34,7 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.dspace.app.rest.utils.ClarinUtils; import org.dspace.services.ConfigurationService; import org.dspace.utils.DSpace; import org.json.simple.JSONArray; @@ -55,14 +56,15 @@ public class ClarinDiscoJuiceFeedsDownloadService implements InitializingBean { protected static Logger log = org.apache.logging.log4j.LogManager.getLogger( ClarinDiscoJuiceFeedsDownloadService.class); - private static final String DISCOJUICE_URL = "https://static.discojuice.org/feeds/"; /** - * contains entityIDs of idps we wish to set the country to something different than discojuice feeds suggests + * contains entityIDs of idps we wish to set the country to something different then discovery feeds suggests **/ private Set rewriteCountries; protected static DatabaseReader locationService; + private static boolean disableSSL = false; + @Autowired private ConfigurationService configurationService; @@ -94,84 +96,52 @@ public void afterPropertiesSet() throws Exception { } for (String country : propRewriteCountries) { - country = country.trim(); - rewriteCountries.add(country); + rewriteCountries.add(country.trim()); } + + disableSSL = configurationService.getBooleanProperty("disable.ssl.check.specific.requests", false); } public String createFeedsContent() { - log.debug("Going to create feeds content."); - String[] feedsConfig = configurationService.getArrayProperty("discojuice.feeds"); - String shibbolethDiscoFeedUrl = configurationService.getProperty("shibboleth.discofeed.url"); + log.debug("Starting to create feeds content."); - if (StringUtils.isEmpty(shibbolethDiscoFeedUrl)) { - throw new RuntimeException("Cannot load the property `shibboleth.discofeed.url` from the configuration " + - "file, maybe it is not set in the configuration file"); - } + String shibbolethDiscoFeedUrl = configurationService.getProperty("shibboleth.discofeed.url"); - if (ArrayUtils.isEmpty(feedsConfig)) { - throw new RuntimeException("Cannot load the property `discojuice.feeds` from the configuration " + + if (StringUtils.isBlank(shibbolethDiscoFeedUrl)) { + throw new IllegalStateException( + "Cannot load the property `shibboleth.discofeed.url` from the configuration " + "file, maybe it is not set in the configuration file"); } - String old_value = System.getProperty("jsse.enableSNIExtension"); + String origSniVal = System.getProperty("jsse.enableSNIExtension"); System.setProperty("jsse.enableSNIExtension", "false"); + try { - final Map shibDiscoEntities = toMap(shrink( - ClarinDiscoJuiceFeedsDownloadService.downloadJSON(shibbolethDiscoFeedUrl))); - - //true is the default http://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html - old_value = (old_value == null) ? "true" : old_value; - System.setProperty("jsse.enableSNIExtension", old_value); - - String feedsContent = ""; - Set processedEntities = new HashSet<>(); - //loop through disco cdn feeds - for (String feed : feedsConfig) { - Map feedMap = toMap( - ClarinDiscoJuiceFeedsDownloadService.downloadJSON(DISCOJUICE_URL + feed.trim())); - //loop through entities in one feed - for (Map.Entry entry: feedMap.entrySet()) { - String entityID = entry.getKey(); - JSONObject cdnEntity = entry.getValue(); - //keep only entities from shibboleth, add only once, but copy geo, icon, country - if (shibDiscoEntities.containsKey(entityID) && !processedEntities.contains(entityID)) { - JSONObject geo = (JSONObject) cdnEntity.get("geo"); - String icon = (String) cdnEntity.get("icon"); - String country = (String) cdnEntity.get("country"); - JSONObject shibEntity = shibDiscoEntities.get(entityID); - if (geo != null) { - shibEntity.put("geo", geo); - } - if (icon != null) { - shibEntity.put("icon", icon); - } - if (country != null) { - shibEntity.put("country", country); - } - processedEntities.add(entityID); + final Map shibDiscoEntities = toMap(shrink( + ClarinDiscoJuiceFeedsDownloadService.downloadJSON(shibbolethDiscoFeedUrl))); + + // iterate through the entities to update countries as needed + shibDiscoEntities.forEach((entityId, shibEntity) -> { + if (rewriteCountries.contains(entityId) || StringUtils.isBlank((String) shibEntity.get("country"))) { + String oldCountry = (String) shibEntity.remove("country"); + String newCountry = guessCountry(shibEntity); + shibEntity.put("country", newCountry); + log.debug("Changed country for {} from {} to {}", entityId, oldCountry, newCountry); } - } - } + }); - //loop through shib entities, we show these... - for (JSONObject shibEntity : shibDiscoEntities.values()) { - //rewrite or guess countries - if (rewriteCountries.contains(shibEntity.get("entityID")) || isBlank((String)shibEntity.get("country"))) { - String old_country = (String)shibEntity.remove("country"); - String new_country = guessCountry(shibEntity); - shibEntity.put("country", new_country); - log.debug(String.format("For %s changed country from %s to %s", shibEntity.get("entityID"), - old_country, new_country)); + if (shibDiscoEntities.isEmpty()) { + return null; } - } - if (shibDiscoEntities.isEmpty()) { - return null; - } else { JSONArray ret = new JSONArray(); ret.addAll(shibDiscoEntities.values()); return ret.toJSONString(); + + } finally { + // true is the default http://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html + origSniVal = (origSniVal == null) ? "true" : origSniVal; + System.setProperty("jsse.enableSNIExtension", origSniVal); } } @@ -242,7 +212,7 @@ private static List getValues(JSONArray array) { /** * Open Connection for the test file or URL defined in the cfg. */ - private static URLConnection openURLConnection(String url) throws IOException { + public static URLConnection openURLConnection(String url) throws IOException { // If is not test. if (!StringUtils.startsWith(url,"TEST:")) { return new URL(url).openConnection(); @@ -265,6 +235,10 @@ private static JSONArray downloadJSON(String url) { URLConnection conn = openURLConnection(url); conn.setConnectTimeout(5000); conn.setReadTimeout(10000); + // Disable SSL certificate validation + if (disableSSL && conn instanceof HttpsURLConnection) { + ClarinUtils.disableCertificateValidation((HttpsURLConnection) conn); + } //Caution does not follow redirects, and even if you set it to http->https is not possible Object obj = parser.parse(new InputStreamReader(conn.getInputStream())); return (JSONArray) obj; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsUpdateScheduler.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsUpdateScheduler.java index 1e95adbb5c92..b86bf65ee9d6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsUpdateScheduler.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsUpdateScheduler.java @@ -49,17 +49,18 @@ public void afterPropertiesSet() throws Exception { */ @Scheduled(cron = "${discojuice.refresh:-}") public void cronJobSch() { - boolean isAllowed = configurationService.getBooleanProperty("shibboleth.discofeed.allowed", true); + // 2024/02 - unless explicitly turned on, do not use discofeed + boolean isAllowed = configurationService.getBooleanProperty("shibboleth.discofeed.allowed", false); if (!isAllowed) { return; } - log.debug("CRON Job - going to download the discojuice feeds."); + log.debug("CRON Job - going to download the discovery feeds."); String newFeedsContent = clarinDiscoJuiceFeedsDownloadService.createFeedsContent(); if (isNotBlank(newFeedsContent)) { feedsContent = newFeedsContent; } else { - log.error("Failed to obtain discojuice feeds!"); + log.error("Failed to obtain additional discovery feeds!"); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java index e726c600ce80..44036147c7f4 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java @@ -238,8 +238,8 @@ public List processSignedInUser(Context context, EPerson cur currentUser.getID() + " is null."); } - // Copy current user_metadata records into a list and append it by a new user metadata. - List newClarinUserMetadataList = new ArrayList<>(clarinUserRegistration.getUserMetadata()); + // List of the new user metadata - passed from the request + List clarinUserMetadataList = new ArrayList<>(); // Create user metadata records from request for (ClarinUserMetadataRest clarinUserMetadataRest : clarinUserMetadataRestList) { @@ -249,20 +249,20 @@ public List processSignedInUser(Context context, EPerson cur clarinUserMetadata.setEperson(clarinUserRegistration); clarinUserMetadataService.update(context, clarinUserMetadata); // Add userMetadata to the list of the new user metadata - newClarinUserMetadataList.add(clarinUserMetadata); + clarinUserMetadataList.add(clarinUserMetadata); } // Process clrua with the new clarin user metadata ClarinLicenseResourceUserAllowance clrua = - this.createClrua(context, clarinLicenseResourceMapping, newClarinUserMetadataList, downloadToken, + this.createClrua(context, clarinLicenseResourceMapping, clarinUserMetadataList, downloadToken, clarinUserRegistration); // Add Clarin License Resource Allowance to the user metadata records - for (ClarinUserMetadata clarinUserMetadata : newClarinUserMetadataList) { + for (ClarinUserMetadata clarinUserMetadata : clarinUserMetadataList) { clarinUserMetadata.setTransaction(clrua); clarinUserMetadataService.update(context, clarinUserMetadata); } - return newClarinUserMetadataList; + return clarinUserMetadataList; } private ClarinLicenseResourceUserAllowance createClrua(Context context, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestRepository.java index acb9fef41e22..3d384427448c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestRepository.java @@ -10,7 +10,10 @@ import java.sql.SQLException; import java.util.List; import java.util.Objects; +import java.util.UUID; +import org.dspace.app.rest.Parameter; +import org.dspace.app.rest.SearchRestMethod; import org.dspace.app.rest.model.ClarinUserMetadataRest; import org.dspace.content.clarin.ClarinUserMetadata; import org.dspace.content.service.clarin.ClarinUserMetadataService; @@ -53,6 +56,19 @@ public Page findAll(Context context, Pageable pageable) } } + @SearchRestMethod(name = "byUserRegistrationAndBitstream") + public Page findByUserRegistrationAndBitstream( + @Parameter(value = "userRegUUID", required = true) Integer userRegId, + @Parameter(value = "bitstreamUUID", required = true) UUID bitstreamUUID, + Pageable pageable) throws SQLException { + Context context = obtainContext(); + + List clarinUserMetadataList = + clarinUserMetadataService.findByUserRegistrationAndBitstream(context, userRegId, bitstreamUUID, true); + + return converter.toRestPage(clarinUserMetadataList, pageable, utils.obtainProjection()); + } + @Override public Class getDomainClass() { return ClarinUserMetadataRest.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ClarinUtils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ClarinUtils.java new file mode 100644 index 000000000000..2a93f5793205 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ClarinUtils.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.utils; + +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + +import org.springframework.stereotype.Component; + +/** + * Collection of utility methods for clarin customized operations + * + * @author Milan Majchrak (dspace at dataquest.sk) + */ +@Component +public class ClarinUtils { + + private ClarinUtils() { + } + + /** + * Disables SSL certificate validation for the given connection + * + * @param connection + */ + public static void disableCertificateValidation(HttpsURLConnection connection) { + try { + // Create a TrustManager that trusts all certificates + TrustManager[] trustAllCerts = { new X509TrustManager() { + public java.security.cert.X509Certificate[] getAcceptedIssuers() { + return null; + } + + public void checkClientTrusted(java.security.cert.X509Certificate[] certs, String authType) { + } + + public void checkServerTrusted(java.security.cert.X509Certificate[] certs, String authType) { + } } + }; + + // Install the TrustManager + SSLContext sslContext = SSLContext.getInstance("SSL"); + sslContext.init(null, trustAllCerts, new SecureRandom()); + connection.setSSLSocketFactory(sslContext.getSocketFactory()); + + // Set a HostnameVerifier that accepts all hostnames + connection.setHostnameVerifier((hostname, session) -> true); + + } catch (NoSuchAlgorithmException | KeyManagementException e) { + throw new RuntimeException("Error disabling SSL certificate validation", e); + } + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java index 52cccb0d777c..0075011fd0bf 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java @@ -7,33 +7,85 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.ClarinDiscoJuiceFeedsDownloadService.openURLConnection; import static org.dspace.app.rest.repository.ClarinDiscoJuiceFeedsController.APPLICATION_JAVASCRIPT_UTF8; +import static org.junit.Assert.assertNotNull; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import javax.net.ssl.HttpsURLConnection; + +import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.app.rest.utils.ClarinUtils; import org.dspace.services.ConfigurationService; +import org.json.simple.parser.JSONParser; +import org.json.simple.parser.ParseException; +import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; + /** * Test class for the controller ClarinDiscoJuiceFeedsController * - * @author Milan Majchrak (milan.majchrak at dataquest.sk) + * @author Milan Majchrak (dspace at dataquest.sk) */ public class ClarinDiscoJuiceFeedsControllerIT extends AbstractControllerIntegrationTest { @Autowired ConfigurationService configurationService; + @Autowired + ClarinDiscoJuiceFeedsUpdateScheduler clarinDiscoJuiceFeedsUpdateScheduler; + + // Just to make sure that the DiscoFeed URL is accessible. + @Ignore + @Test + public void testDiscoFeedURL() throws Exception { + String discoFeedURL = configurationService.getProperty("shibboleth.discofeed.url.test.connection"); + if (StringUtils.isBlank(discoFeedURL)) { + throw new RuntimeException("The DiscoFeed testing URL is not set in the configuration. Setup the " + + "shibboleth.discofeed.url.test.connection property in the configuration."); + } + + boolean disableSSL = configurationService.getBooleanProperty("disable.ssl.check.specific.requests", false); + JSONParser parser = new JSONParser(); + try { + URL url = new URL(discoFeedURL); + URLConnection conn = openURLConnection(String.valueOf(url)); + conn.setConnectTimeout(5000); + conn.setReadTimeout(10000); + + // Disable SSL certificate validation + if (disableSSL && conn instanceof HttpsURLConnection) { + ClarinUtils.disableCertificateValidation((HttpsURLConnection) conn); + } + + Object obj = parser.parse(new InputStreamReader(conn.getInputStream())); + assertNotNull(obj); + } catch (IOException | ParseException e) { + throw new RuntimeException("Error while reading the DiscoFeed URL: " + discoFeedURL, e); + } + } + @Test public void getDiscoFeeds() throws Exception { String authTokenAdmin = getAuthToken(eperson.getEmail(), password); + String configKey = "shibboleth.discofeed.allowed"; + boolean origVal = configurationService.getBooleanProperty(configKey); + configurationService.setProperty(configKey, true); + clarinDiscoJuiceFeedsUpdateScheduler.afterPropertiesSet(); + // Expected response created from the test file: `discofeedResponse.json` // Wrapped to the `callback` string = `dj_md_1` - String responseString = "dj_md_1([{\"country\":\"CZ\",\"keywords\":[\"Identity Provider for employees and " + + String expStr = "dj_md_1([{\"country\":\"CZ\",\"keywords\":[\"Identity Provider for employees and " + "readers of the Archiepiscopal Gymnasium in Kromeriz - Library\",\"Identity Provider pro zamstnance " + "a tene knihovny Arcibiskupskho gymnzia v Kromi\",\"Arcibiskupsk gymnzium v Kromi - " + "Knihovna\"],\"entityID\":\"https:\\/\\/agkm.cz\\/idp\\/shibboleth\",\"title\":\"Archiepiscopal " + @@ -47,12 +99,13 @@ public void getDiscoFeeds() throws Exception { "\"Studijn a vdeck knihovna v Hradci Krlov\"],\"entityID\":\"https:\\/\\/aleph.svkhk.cz\\" + "/idp\\/shibboleth\",\"title\":\"The Research Library in Hradec Krlov\"}])"; - // Load bitstream from the item. // Request with callback getClient(authTokenAdmin).perform(get("/api/discojuice/feeds?callback=dj_md_1")) .andExpect(status().isOk()) .andExpect(content().contentType(APPLICATION_JAVASCRIPT_UTF8)) - .andExpect(content().string(responseString)); + .andExpect(content().string(expStr)); + + configurationService.setProperty(configKey, origVal); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinUserMetadataRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinUserMetadataRestControllerIT.java index 6eba735a2dfb..592363ba4270 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinUserMetadataRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinUserMetadataRestControllerIT.java @@ -9,6 +9,7 @@ import static org.dspace.app.rest.repository.ClarinLicenseRestRepository.OPERATION_PATH_LICENSE_RESOURCE; import static org.dspace.app.rest.repository.ClarinUserMetadataRestController.CHECK_EMAIL_RESPONSE_CONTENT; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -45,9 +46,12 @@ import org.dspace.content.WorkspaceItem; import org.dspace.content.clarin.ClarinLicense; import org.dspace.content.clarin.ClarinLicenseLabel; +import org.dspace.content.clarin.ClarinLicenseResourceUserAllowance; +import org.dspace.content.clarin.ClarinUserMetadata; import org.dspace.content.clarin.ClarinUserRegistration; import org.dspace.content.service.clarin.ClarinLicenseLabelService; import org.dspace.content.service.clarin.ClarinLicenseService; +import org.dspace.content.service.clarin.ClarinUserMetadataService; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; @@ -58,10 +62,14 @@ public class ClarinUserMetadataRestControllerIT extends AbstractControllerIntegr ClarinLicenseService clarinLicenseService; @Autowired ClarinLicenseLabelService clarinLicenseLabelService; + @Autowired + ClarinUserMetadataService clarinUserMetadataService; WorkspaceItem witem; + WorkspaceItem witem2; ClarinLicense clarinLicense; Bitstream bitstream; + Bitstream bitstream2; // Attach ClarinLicense to the Bitstream private void prepareEnvironment(String requiredInfo, Integer confirmation) throws Exception { @@ -73,7 +81,8 @@ private void prepareEnvironment(String requiredInfo, Integer confirmation) throw // 1. Create WI with uploaded file context.turnOffAuthorisationSystem(); - witem = createWorkspaceItemWithFile(); + witem = this.createWorkspaceItemWithFile(false); + witem2 = this.createWorkspaceItemWithFile(true); List replaceOperations = new ArrayList(); String clarinLicenseName = "Test Clarin License"; @@ -96,16 +105,23 @@ private void prepareEnvironment(String requiredInfo, Integer confirmation) throw .content(updateBody) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()); + getClient(tokenAdmin).perform(patch("/api/submission/workspaceitems/" + witem2.getID()) + .content(updateBody) + .contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()); // 4. Check if the Clarin License name was added to the Item's metadata `dc.rights` getClient(tokenAdmin).perform(get("/api/submission/workspaceitems/" + witem.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.item.metadata['dc.rights'][0].value", is(clarinLicenseName))); + getClient(tokenAdmin).perform(get("/api/submission/workspaceitems/" + witem2.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.item.metadata['dc.rights'][0].value", is(clarinLicenseName))); // 5. Check if the Clarin License was attached to the Bitstream getClient(tokenAdmin).perform(get("/api/core/clarinlicenses/" + clarinLicense.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.bitstreams", is(1))); + .andExpect(jsonPath("$.bitstreams", is(2))); } @Test @@ -408,7 +424,137 @@ public void authorizedUserWithoutMetadata_shouldDownloadToken() throws Exception .andExpect(jsonPath("$.page.totalElements", is(1))); } - private WorkspaceItem createWorkspaceItemWithFile() { + @Test + public void shouldNotCreateDuplicateUserMetadataBasedOnHistory() throws Exception { + // Prepare environment with Clarin License, resource mapping, allowance, user registration and user metadata + // then try to download the same bitstream again and the user metadata should not be created based on history + this.prepareEnvironment("NAME,ADDRESS", 0); + context.turnOffAuthorisationSystem(); + ClarinUserRegistration clarinUserRegistration = ClarinUserRegistrationBuilder + .createClarinUserRegistration(context).withEPersonID(admin.getID()).build(); + context.restoreAuthSystemState(); + + ObjectMapper mapper = new ObjectMapper(); + ClarinUserMetadataRest clarinUserMetadata1 = new ClarinUserMetadataRest(); + clarinUserMetadata1.setMetadataKey("NAME"); + clarinUserMetadata1.setMetadataValue("Test"); + + ClarinUserMetadataRest clarinUserMetadata2 = new ClarinUserMetadataRest(); + clarinUserMetadata2.setMetadataKey("ADDRESS"); + clarinUserMetadata2.setMetadataValue("Test2"); + + List clarinUserMetadataRestList = new ArrayList<>(); + clarinUserMetadataRestList.add(clarinUserMetadata1); + clarinUserMetadataRestList.add(clarinUserMetadata2); + + String adminToken = getAuthToken(admin.getEmail(), password); + + // There should exist record in the UserRegistration table + getClient(adminToken).perform(get("/api/core/clarinuserregistrations") + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(1))); + + // Manage UserMetadata and get token + getClient(adminToken).perform(post("/api/core/clarinusermetadata/manage?bitstreamUUID=" + bitstream.getID()) + .content(mapper.writeValueAsBytes(clarinUserMetadataRestList.toArray())) + .contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", notNullValue())) + .andExpect(jsonPath("$", not(CHECK_EMAIL_RESPONSE_CONTENT))); + + // Get created CLRUA + getClient(adminToken).perform(get("/api/core/clarinlruallowances") + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(1))); + + + // Get created User Metadata - there should be 2 records + getClient(adminToken).perform(get("/api/core/clarinusermetadata") + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(2))); + + // Second download + + // Manage UserMetadata and get token + getClient(adminToken).perform(post("/api/core/clarinusermetadata/manage?bitstreamUUID=" + bitstream2.getID()) + .content(mapper.writeValueAsBytes(clarinUserMetadataRestList.toArray())) + .contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", notNullValue())) + .andExpect(jsonPath("$", not(CHECK_EMAIL_RESPONSE_CONTENT))); + + // Get created two CLRUA + getClient(adminToken).perform(get("/api/core/clarinlruallowances") + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(2))); + + // Get created User Metadata - there should be 4 records + getClient(adminToken).perform(get("/api/core/clarinusermetadata") + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(4))); + + // The User Metadata should not have updated transaction ID after a new download - test for fixed issue + List allUserMetadata = clarinUserMetadataService.findAll(context); + ClarinLicenseResourceUserAllowance clrua1 = allUserMetadata.get(0).getTransaction(); + ClarinLicenseResourceUserAllowance clrua2 = allUserMetadata.get(3).getTransaction(); + assertThat(clrua1.getID(), not(clrua2.getID())); + + // Check that the user registration for test data full user has been created + // Test /api/core/clarinusermetadatas search by userRegistrationAndBitstream endpoint + getClient(adminToken).perform(get("/api/core/clarinusermetadata/search/byUserRegistrationAndBitstream") + .param("userRegUUID", String.valueOf(clarinUserRegistration.getID())) + .param("bitstreamUUID", String.valueOf(bitstream2.getID())) + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(2))); + + // Download again the second bitstream and the user metadata should be returned only from the last transaction + + // Create a new User Metadata + ClarinUserMetadataRest clarinUserMetadata3 = new ClarinUserMetadataRest(); + clarinUserMetadata3.setMetadataKey("NAME"); + clarinUserMetadata3.setMetadataValue("New Test"); + + ClarinUserMetadataRest clarinUserMetadata4 = new ClarinUserMetadataRest(); + clarinUserMetadata4.setMetadataKey("ADDRESS"); + clarinUserMetadata4.setMetadataValue("New Test"); + + List newUserMetadataRestList = new ArrayList<>(); + newUserMetadataRestList.add(clarinUserMetadata3); + newUserMetadataRestList.add(clarinUserMetadata4); + + // Manage UserMetadata and get token + getClient(adminToken).perform(post("/api/core/clarinusermetadata/manage?bitstreamUUID=" + bitstream2.getID()) + .content(mapper.writeValueAsBytes(newUserMetadataRestList.toArray())) + .contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", notNullValue())) + .andExpect(jsonPath("$", not(CHECK_EMAIL_RESPONSE_CONTENT))); + + // Get created two CLRUA + getClient(adminToken).perform(get("/api/core/clarinlruallowances") + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(3))); + + // Get created User Metadata from the new transaction - there should be 2 records + getClient(adminToken).perform(get("/api/core/clarinusermetadata/search/byUserRegistrationAndBitstream") + .param("userRegUUID", String.valueOf(clarinUserRegistration.getID())) + .param("bitstreamUUID", String.valueOf(bitstream2.getID())) + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(2))); + + // Delete all created user metadata - clean test environment + ClarinUserMetadataBuilder.deleteClarinUserMetadata(clarinUserRegistration.getID()); + } + + private WorkspaceItem createWorkspaceItemWithFile(boolean secondBitstream) { parentCommunity = CommunityBuilder.createCommunity(context) .withName("Parent Community") .build(); @@ -428,7 +574,11 @@ private WorkspaceItem createWorkspaceItemWithFile() { .withFulltext("simple-article.pdf", "/local/path/simple-article.pdf", pdf) .build(); - bitstream = witem.getItem().getBundles().get(0).getBitstreams().get(0); + if (secondBitstream) { + this.bitstream2 = witem.getItem().getBundles().get(0).getBitstreams().get(0); + } else { + this.bitstream = witem.getItem().getBundles().get(0).getBitstreams().get(0); + } return witem; } diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index c3223c48dc83..be485ad54184 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -135,17 +135,17 @@ featured.service.teitok.description = A web-based platform for viewing, creating ##### Shibboleth ##### # Turn off the discofeed, it is allowed by default -# shibboleth.discofeed.allowed = false +shibboleth.discofeed.allowed = false # File where is DiscoJuiceFeed response shibboleth.discofeed.url = https://lindat.mff.cuni.cz/Shibboleth.sso/DiscoFeed -# Configuration properties for DiscoJuice -discojuice.feeds = edugain, dfn, cesnet, surfnet2, haka, kalmar # CRON job refresh time definition - default is refresh in every 2 hours. discojuice.refresh = 0 0 */2 * * ? # Comma separated list of entityIDs; we try to guess country on these discojuice.rewriteCountries = https://idp.scc.kit.edu/idp/shibboleth, https://fedauth.london.edu/oala/metadata, https://youidlite.youid.net/idp/shibboleth, https://cavle.org/shibboleth +# Disable SSL check for specific requests e.g. discofeed. SSL check is enabled by default. +disable.ssl.check.specific.requests = false ##### Matomo statistics ##### # Auth token @@ -246,6 +246,8 @@ file.preview.enabled = false ### Storage service ### # Synchronization is NOT enabled by default sync.storage.service.enabled = true +# Upload large file by parts - check the checksum of every part +s3.upload.by.parts.enabled = true ### The build version is stored in the specific file ### diff --git a/scripts/pre-commit/checkstyle.py b/scripts/pre-commit/checkstyle.py new file mode 100644 index 000000000000..3d7c8623da52 --- /dev/null +++ b/scripts/pre-commit/checkstyle.py @@ -0,0 +1,23 @@ +import sys +import logging +import subprocess + +_logger = logging.getLogger() +logging.basicConfig(format='%(message)s', level=logging.DEBUG) + + +if __name__ == '__main__': + files = [x for x in sys.argv[1:] if x.lower().endswith('java')] + _logger.info(f'Found [{len(files)}] files from [{len(sys.argv) - 1}] input files') + + cmd = "mvn checkstyle:check" + + try: + with subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, text=True) as process: + for line in process.stdout: + print(line, end='') + except Exception as e: + _logger.critical(f'Error: {repr(e)}, ret code: {e.returncode}') + + # for filename in files: + # pass