Skip to content

Commit

Permalink
Fix merge conflict artifacts
Browse files Browse the repository at this point in the history
Signed-off-by: nscuro <[email protected]>
  • Loading branch information
nscuro committed Sep 20, 2024
1 parent 4d43d5b commit caef8f5
Show file tree
Hide file tree
Showing 7 changed files with 105 additions and 103 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ ON CONFLICT ("TOKEN")
@SqlUpdate("""
DELETE
FROM "BOM_UPLOAD"
WHERE "UPLOADED_AT" < (NOW() - :duration)
WHERE NOW() - "UPLOADED_AT" > :duration
""")
int deleteAllUploadsForRetentionDuration(@Bind Duration duration);

Expand Down
102 changes: 51 additions & 51 deletions src/main/java/org/dependencytrack/resources/v1/BomResource.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import alpine.Config;
import alpine.common.logging.Logger;
import alpine.model.ConfigProperty;
import alpine.notification.Notification;
import alpine.notification.NotificationLevel;
import alpine.server.auth.PermissionRequired;
Expand Down Expand Up @@ -90,6 +91,7 @@
import java.util.Base64;
import java.util.List;
import java.util.Set;
import java.util.UUID;

import static java.util.function.Predicate.not;
import static org.dependencytrack.common.ConfigKey.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL;
Expand Down Expand Up @@ -502,16 +504,16 @@ private Response process(QueryManager qm, Project project, List<FormDataBodyPart
}

private void validateAndStoreBom(final QueryManager qm, final UUID token, final byte[] bomBytes, final Project project) throws IOException {
validate(bomBytes, project);
validate(qm, bomBytes, project);

final int compressionLevel = Config.getInstance().getPropertyAsInt(BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL);
try (final BomUploadStorage storageProvider = PluginManager.getInstance().getExtension(BomUploadStorage.class)) {
storageProvider.storeBomCompressed(token, bomBytes, compressionLevel);
}
}

static void validate(final byte[] bomBytes, final Project project) {
if (!shouldValidate(project)) {
static void validate(final QueryManager qm, final byte[] bomBytes, final Project project) {
if (!shouldValidate(qm, project)) {
return;
}

Expand Down Expand Up @@ -554,59 +556,57 @@ private static void dispatchBomValidationFailedNotification(Project project, Str
.subject(new BomValidationFailed(project, bom, errors)));
}

private static boolean shouldValidate(final Project project) {
try (final var qm = new QueryManager()) {
final ConfigProperty validationModeProperty = qm.getConfigProperty(
BOM_VALIDATION_MODE.getGroupName(),
BOM_VALIDATION_MODE.getPropertyName()
);
private static boolean shouldValidate(final QueryManager qm, final Project project) {

Check warning on line 559 in src/main/java/org/dependencytrack/resources/v1/BomResource.java

View check run for this annotation

Codacy Production / Codacy Static Code Analysis

src/main/java/org/dependencytrack/resources/v1/BomResource.java#L559

The method 'shouldValidate(QueryManager, Project)' has an NPath complexity of 216, current threshold is 200
final ConfigProperty validationModeProperty = qm.getConfigProperty(
BOM_VALIDATION_MODE.getGroupName(),
BOM_VALIDATION_MODE.getPropertyName()
);

var validationMode = BomValidationMode.valueOf(BOM_VALIDATION_MODE.getDefaultPropertyValue());
try {
validationMode = BomValidationMode.valueOf(validationModeProperty.getPropertyValue());
} catch (RuntimeException e) {
LOGGER.warn("""
No BOM validation mode configured, or configured value is invalid; \
Assuming default mode %s""".formatted(validationMode), e);
}

if (validationMode == BomValidationMode.ENABLED) {
LOGGER.debug("Validating BOM because validation is enabled globally");
return true;
} else if (validationMode == BomValidationMode.DISABLED) {
LOGGER.debug("Not validating BOM because validation is disabled globally");
return false;
}
var validationMode = BomValidationMode.valueOf(BOM_VALIDATION_MODE.getDefaultPropertyValue());
try {
validationMode = BomValidationMode.valueOf(validationModeProperty.getPropertyValue());
} catch (RuntimeException e) {
LOGGER.warn("""
No BOM validation mode configured, or configured value is invalid; \
Assuming default mode %s""".formatted(validationMode), e);
}

// Other modes depend on tags. Does the project even have tags?
if (project.getTags() == null || project.getTags().isEmpty()) {
return validationMode == BomValidationMode.DISABLED_FOR_TAGS;
}
if (validationMode == BomValidationMode.ENABLED) {
LOGGER.debug("Validating BOM because validation is enabled globally");
return true;
} else if (validationMode == BomValidationMode.DISABLED) {
LOGGER.debug("Not validating BOM because validation is disabled globally");
return false;
}

final ConfigPropertyConstants tagsPropertyConstant = validationMode == BomValidationMode.ENABLED_FOR_TAGS
? BOM_VALIDATION_TAGS_INCLUSIVE
: BOM_VALIDATION_TAGS_EXCLUSIVE;
final ConfigProperty tagsProperty = qm.getConfigProperty(
tagsPropertyConstant.getGroupName(),
tagsPropertyConstant.getPropertyName()
);
// Other modes depend on tags. Does the project even have tags?
if (project.getTags() == null || project.getTags().isEmpty()) {
return validationMode == BomValidationMode.DISABLED_FOR_TAGS;
}

final Set<String> validationModeTags;
try {
final JsonReader jsonParser = Json.createReader(new StringReader(tagsProperty.getPropertyValue()));
final JsonArray jsonArray = jsonParser.readArray();
validationModeTags = Set.copyOf(jsonArray.getValuesAs(JsonString::getString));
} catch (RuntimeException e) {
LOGGER.warn("Tags of property %s:%s could not be parsed as JSON array"
.formatted(tagsPropertyConstant.getGroupName(), tagsPropertyConstant.getPropertyName()), e);
return validationMode == BomValidationMode.DISABLED_FOR_TAGS;
}
final ConfigPropertyConstants tagsPropertyConstant = validationMode == BomValidationMode.ENABLED_FOR_TAGS
? BOM_VALIDATION_TAGS_INCLUSIVE
: BOM_VALIDATION_TAGS_EXCLUSIVE;
final ConfigProperty tagsProperty = qm.getConfigProperty(
tagsPropertyConstant.getGroupName(),
tagsPropertyConstant.getPropertyName()
);

final boolean doTagsMatch = project.getTags().stream()
.map(org.dependencytrack.model.Tag::getName)
.anyMatch(validationModeTags::contains);
return (validationMode == BomValidationMode.ENABLED_FOR_TAGS && doTagsMatch)
|| (validationMode == BomValidationMode.DISABLED_FOR_TAGS && !doTagsMatch);
final Set<String> validationModeTags;
try {
final JsonReader jsonParser = Json.createReader(new StringReader(tagsProperty.getPropertyValue()));
final JsonArray jsonArray = jsonParser.readArray();
validationModeTags = Set.copyOf(jsonArray.getValuesAs(JsonString::getString));
} catch (RuntimeException e) {
LOGGER.warn("Tags of property %s:%s could not be parsed as JSON array"
.formatted(tagsPropertyConstant.getGroupName(), tagsPropertyConstant.getPropertyName()), e);
return validationMode == BomValidationMode.DISABLED_FOR_TAGS;
}

final boolean doTagsMatch = project.getTags().stream()
.map(org.dependencytrack.model.Tag::getName)
.anyMatch(validationModeTags::contains);
return (validationMode == BomValidationMode.ENABLED_FOR_TAGS && doTagsMatch)
|| (validationMode == BomValidationMode.DISABLED_FOR_TAGS && !doTagsMatch);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ private Response process(QueryManager qm, Project project, String encodedVexData
return Response.status(Response.Status.FORBIDDEN).entity("Access to the specified project is forbidden").build();
}
final byte[] decoded = Base64.getDecoder().decode(encodedVexData);
BomResource.validate(decoded, project);
BomResource.validate(qm, decoded, project);
final VexUploadEvent vexUploadEvent = new VexUploadEvent(project.getUuid(), decoded);
Event.dispatch(vexUploadEvent);
return Response.ok(Collections.singletonMap("token", vexUploadEvent.getChainIdentifier())).build();
Expand All @@ -281,7 +281,7 @@ private Response process(QueryManager qm, Project project, List<FormDataBodyPart
}
try (InputStream in = bodyPartEntity.getInputStream()) {
final byte[] content = IOUtils.toByteArray(new BOMInputStream((in)));
BomResource.validate(content, project);
BomResource.validate(qm, content, project);
final VexUploadEvent vexUploadEvent = new VexUploadEvent(project.getUuid(), content);
Event.dispatch(vexUploadEvent);
return Response.ok(Collections.singletonMap("token", vexUploadEvent.getChainIdentifier())).build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,15 @@ public int priority() {
public void init(final ConfigRegistry configRegistry) {
directoryPath = configRegistry.getOptionalValue(CONFIG_DIRECTORY)
.map(Paths::get)
.orElseGet(() -> {
final Path path = Config.getInstance().getDataDirectorty().toPath().resolve("bom-uploads");
try {
return Files.createDirectories(path);
} catch (IOException e) {
throw new IllegalStateException("""
Failed to create directory for BOM upload storage at %s\
""".formatted(path), e);
}
});
.orElseGet(() -> Config.getInstance().getDataDirectorty().toPath().resolve("bom-uploads"));

try {
Files.createDirectories(directoryPath);
} catch (IOException e) {
throw new IllegalStateException("""
Failed to create directory for BOM upload storage at %s\
""".formatted(directoryPath), e);
}

final boolean canRead = directoryPath.toFile().canRead();
final boolean canWrite = directoryPath.toFile().canWrite();
Expand Down
58 changes: 29 additions & 29 deletions src/main/resources/application.properties
Original file line number Diff line number Diff line change
Expand Up @@ -703,44 +703,44 @@ dt.kafka.topic.prefix=
# @category: Kafka
# @type: integer
# @required
alpine.kafka.processor.bom.upload.max.concurrency=-1
kafka.processor.bom.upload.max.concurrency=-1

# @category: Kafka
# @type: enum
# @valid-values: [key, partition, unordered]
# @required
alpine.kafka.processor.bom.upload.processing.order=key
kafka.processor.bom.upload.processing.order=key

# @category: Kafka
# @type: integer
# @required
alpine.kafka.processor.bom.upload.retry.initial.delay.ms=3000
kafka.processor.bom.upload.retry.initial.delay.ms=3000

# @category: Kafka
# @type: integer
# @required
alpine.kafka.processor.bom.upload.retry.multiplier=2
kafka.processor.bom.upload.retry.multiplier=2

# @category: Kafka
# @type: double
# @required
alpine.kafka.processor.bom.upload.retry.randomization.factor=0.3
kafka.processor.bom.upload.retry.randomization.factor=0.3

# @category: Kafka
# @type: integer
# @required
alpine.kafka.processor.bom.upload.retry.max.delay.ms=180000
kafka.processor.bom.upload.retry.max.delay.ms=180000

# @category: Kafka
# @type: string
# @required
alpine.kafka.processor.bom.upload.consumer.group.id=dtrack-apiserver-processor
kafka.processor.bom.upload.consumer.group.id=dtrack-apiserver-processor

# @category: Kafka
# @type: enum
# @valid-values: [earliest, latest, none]
# @required
alpine.kafka.processor.bom.upload.consumer.auto.offset.reset=earliest
kafka.processor.bom.upload.consumer.auto.offset.reset=earliest

# Defines the maximum number of threads to process records from the `dtrack.vulnerability` topic.
# <br/><br/>
Expand Down Expand Up @@ -789,16 +789,16 @@ kafka.processor.vuln.mirror.consumer.group.id=dtrack-apiserver-processor
# @required
kafka.processor.vuln.mirror.consumer.auto.offset.reset=earliest

# @category: Kafka
# @type: integer
# @required
kafka.processor.epss.mirror.max.concurrency=-1

# Defines the maximum number of threads to process records from the `dtrack.epss` topic.
# <br/><br/>
# A value of `-1` will cause the concurrency to match the number of partitions in the input topic.
# Parallelization is based on record keys. Concurrency can thus be higher than the number of partitions.
#
# @category: Kafka
# @type: integer
# @required
kafka.processor.epss.mirror.max.concurrency=-1

# @category: Kafka
# @type: enum
# @valid-values: [key, partition, unordered]
Expand Down Expand Up @@ -841,16 +841,16 @@ kafka.processor.epss.mirror.consumer.auto.offset.reset=earliest
# @required
kafka.processor.epss.mirror.max.batch.size=500

# @category: Kafka
# @type: integer
# @required
kafka.processor.repo.meta.analysis.result.max.concurrency=-1

# Defines the maximum number of threads to process records from the `dtrack.repo-meta-analysis.result` topic.
# <br/><br/>
# A value of `-1` will cause the concurrency to match the number of partitions in the input topic.
# Parallelization is based on record keys. Concurrency can thus be higher than the number of partitions.
#
# @category: Kafka
# @type: integer
# @required
kafka.processor.repo.meta.analysis.result.max.concurrency=-1

# @category: Kafka
# @type: enum
# @valid-values: [key, partition, unordered]
Expand Down Expand Up @@ -888,16 +888,16 @@ kafka.processor.repo.meta.analysis.result.consumer.group.id=dtrack-apiserver-pro
# @required
kafka.processor.repo.meta.analysis.result.consumer.auto.offset.reset=earliest

# @category: Kafka
# @type: integer
# @required
kafka.processor.vuln.scan.result.max.concurrency=-1

# Defines the maximum number of threads to process records from the `dtrack.vuln-analysis.result` topic.
# <br/><br/>
# A value of `-1` will cause the concurrency to match the number of partitions in the input topic.
# Parallelization is based on record keys. Concurrency can thus be higher than the number of partitions.
#
# @category: Kafka
# @type: integer
# @required
kafka.processor.vuln.scan.result.max.concurrency=-1

# @category: Kafka
# @type: enum
# @valid-values: [key, partition, unordered]
Expand Down Expand Up @@ -1614,10 +1614,10 @@ task.workflow.maintenance.lock.min.duration=PT1M
# @required
bom.upload.storage.compression.level=3

# Defines for how long uploaded BOMs will be retained.
# Defines for how long uploaded BOM files will be retained.
# <br/><br/>
# Uploaded BOMs will be deleted from storage after successful processing.
# A BOM upload will remain in storage if either deleting the BOM, or processing it failed.
# Uploaded BOM files will be deleted from storage after successful processing.
# A BOM upload will remain in storage if either deleting the BOM file, or processing it failed.
# Retention is thus only relevant for BOMs that either:
# <ul>
# <li>failed to be processed, or</li>
Expand All @@ -1633,8 +1633,8 @@ bom.upload.storage.compression.level=3
bom.upload.storage.retention.duration=PT3H

# Defines the BOM upload storage extension to use.
# When null, an enabled extension will be chosen based on its priority.
# It is recommended to explicitly configure a extension for predictable behavior.
# When not set, an enabled extension will be chosen based on its priority.
# It is recommended to explicitly configure an extension for predictable behavior.
# <br/><br/>
# <ul>
# <li>
Expand Down Expand Up @@ -1678,7 +1678,7 @@ bom.upload.storage.extension.local.enabled=false
# Has no effect unless bom.upload.storage.extension.local.enabled is `true`.
#
# @category: Storage
# @default: ${java.io.tmpdir}/bom-uploads
# @default: ${alpine.data.directory}/bom-uploads
# @type: string
# bom.upload.storage.extension.local.directory=

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1406,15 +1406,15 @@ public void informIssue3957Test() throws Exception {

@Test
public void informIssue3936Test() throws Exception{

final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false);
qm.persist(project);
List<String> boms = new ArrayList<>(Arrays.asList("bom-issue3936-authors.json", "bom-issue3936-author.json", "bom-issue3936-both.json"));
int i=0;
for(String bom : boms){
final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(bom));
qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier());
new BomUploadProcessingTask().inform(bomUploadEvent);
final var token = UUID.randomUUID();
final BomUploadedEvent bomUploadedEvent = createEvent(token, project, bom);
qm.createWorkflowSteps(token);
new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build());
assertBomProcessedNotification();
qm.getPersistenceManager().evictAll();
assertThat(qm.getAllComponents(project)).isNotEmpty();
Expand Down Expand Up @@ -1479,9 +1479,10 @@ public void informWithExistingDuplicateComponentPropertiesAndBomWithDuplicateCom
]
}
""".getBytes(StandardCharsets.UTF_8);
final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(bomBytes));
qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier());
new BomUploadProcessingTask().inform(bomUploadEvent);
final var token = UUID.randomUUID();
final BomUploadedEvent bomUploadedEvent = createEvent(token, project, bomBytes);
qm.createWorkflowSteps(token);
new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build());
assertBomProcessedNotification();

qm.getPersistenceManager().evictAll();
Expand Down Expand Up @@ -1523,9 +1524,10 @@ public void informWithEmptyComponentAndServiceNameTest() throws Exception {
}
""".getBytes(StandardCharsets.UTF_8);

final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(bomBytes));
qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier());
new BomUploadProcessingTask().inform(bomUploadEvent);
final var token = UUID.randomUUID();
final BomUploadedEvent bomUploadedEvent = createEvent(token, project, bomBytes);
qm.createWorkflowSteps(token);
new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build());
assertBomProcessedNotification();

qm.getPersistenceManager().evictAll();
Expand Down
Loading

0 comments on commit caef8f5

Please sign in to comment.