Skip to content

Commit

Permalink
PR review fixes
Browse files Browse the repository at this point in the history
Signed-off-by: mehab <[email protected]>
  • Loading branch information
mehab committed Oct 4, 2023
1 parent f1adf93 commit 83581dd
Show file tree
Hide file tree
Showing 17 changed files with 87 additions and 72 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import alpine.event.framework.Event;
import org.dependencytrack.model.Component;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

/**
* Defines an {@link Event} triggered when requesting a component to be analyzed for meta information.
Expand All @@ -12,7 +13,6 @@
* @param fetchLatestVersion Whether to fetch latest version meta information for a component.
*/
public record ComponentRepositoryMetaAnalysisEvent(String purlCoordinates, Boolean internal,
boolean fetchIntegrityData,
boolean fetchLatestVersion) implements Event {
FetchMeta fetchIntegrityData, FetchMeta fetchLatestVersion ) implements Event {

}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.persistence.QueryManager;
import org.dependencytrack.util.LockProvider;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
Expand Down Expand Up @@ -88,7 +89,7 @@ private void updateIntegrityMetaForPurls(QueryManager qm, List<String> purls) {
private void dispatchPurls(QueryManager qm, List<String> purls) {
for (final var purl : purls) {
ComponentProjection componentProjection = qm.getComponentByPurl(purl);
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates, componentProjection.internal, true, false));
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates, componentProjection.internal, FetchMeta.FETCH_INTEGRITY_DATA, FetchMeta.FETCH_UNSPECIFIED));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ static KafkaEvent<String, AnalysisCommand> convert(final ComponentRepositoryMeta

final var analysisCommand = AnalysisCommand.newBuilder()
.setComponent(componentBuilder)
.setFetchLatestVersion(event.fetchLatestVersion())
.setFetchIntegrityData(event.fetchIntegrityData())
.setFetchLatestVersion(event.fetchLatestVersion())
.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

public abstract class AbstractMetaHandler implements Handler {

ComponentProjectionWithPurl componentProjection;
ComponentProjection componentProjection;
QueryManager queryManager;
KafkaEventDispatcher kafkaEventDispatcher;
boolean fetchLatestVersion;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
package org.dependencytrack.event.kafka.componentmeta;

public record ComponentProjection(String purlCoordinates, Boolean internal, String purl) {
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

public class HandlerFactory {

public static Handler createHandler(ComponentProjectionWithPurl componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, boolean fetchLatestVersion) throws MalformedPackageURLException {
public static Handler createHandler(ComponentProjection componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, boolean fetchLatestVersion) throws MalformedPackageURLException {
PackageURL packageURL = new PackageURL(componentProjection.purl());
boolean result = RepoMetaConstants.SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK.contains(packageURL.getType());
if (result) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import org.dependencytrack.model.FetchStatus;
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.persistence.QueryManager;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

import java.time.Instant;
import java.util.Date;
Expand All @@ -13,7 +14,7 @@

public class SupportedMetaHandler extends AbstractMetaHandler {

public SupportedMetaHandler(ComponentProjectionWithPurl componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, boolean fetchLatestVersion) {
public SupportedMetaHandler(ComponentProjection componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, boolean fetchLatestVersion) {
this.componentProjection = componentProjection;
this.kafkaEventDispatcher = kafkaEventDispatcher;
this.queryManager = queryManager;
Expand All @@ -22,22 +23,27 @@ public SupportedMetaHandler(ComponentProjectionWithPurl componentProjection, Que

@Override
public IntegrityMetaComponent handle() {
IntegrityMetaComponent integrityMetaComponent = queryManager.getIntegrityMetaComponent(componentProjection.purl());
if (integrityMetaComponent != null) {
if (integrityMetaComponent.getStatus() == null || (integrityMetaComponent.getStatus() == FetchStatus.IN_PROGRESS && Date.from(Instant.now()).getTime() - integrityMetaComponent.getLastFetch().getTime() > TIME_SPAN)) {
integrityMetaComponent.setLastFetch(Date.from(Instant.now()));
IntegrityMetaComponent integrityMetaComponent1 = queryManager.updateIntegrityMetaComponent(integrityMetaComponent);
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), true, fetchLatestVersion));
return integrityMetaComponent1;
} else {
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), false, fetchLatestVersion));
return integrityMetaComponent;
}
FetchMeta fetchMeta;
if (fetchLatestVersion) {
fetchMeta = FetchMeta.FETCH_LATEST_VERSION;
} else {
fetchMeta = FetchMeta.FETCH_UNSPECIFIED;
}
IntegrityMetaComponent integrityMetaComponent = queryManager.getIntegrityMetaComponent(componentProjection.purl());
if (integrityMetaComponent == null) {
IntegrityMetaComponent integrityMetaComponent1 = queryManager.createIntegrityMetaComponent(createIntegrityMetaComponent(componentProjection.purl()));
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), true, fetchLatestVersion));
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), FetchMeta.FETCH_INTEGRITY_DATA, fetchMeta));
return integrityMetaComponent1;
}
if (integrityMetaComponent.getStatus() == null || (integrityMetaComponent.getStatus() == FetchStatus.IN_PROGRESS && Date.from(Instant.now()).getTime() - integrityMetaComponent.getLastFetch().getTime() > TIME_SPAN)) {
integrityMetaComponent.setLastFetch(Date.from(Instant.now()));
IntegrityMetaComponent integrityMetaComponent1 = queryManager.updateIntegrityMetaComponent(integrityMetaComponent);
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), FetchMeta.FETCH_INTEGRITY_DATA, fetchMeta));
return integrityMetaComponent1;
} else {
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), FetchMeta.FETCH_UNSPECIFIED, fetchMeta));
return integrityMetaComponent;
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.persistence.QueryManager;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

public class UnSupportedMetaHandler extends AbstractMetaHandler {

public UnSupportedMetaHandler(ComponentProjectionWithPurl componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, boolean fetchLatestVersion) {
public UnSupportedMetaHandler(ComponentProjection componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, boolean fetchLatestVersion) {
this.componentProjection = componentProjection;
this.kafkaEventDispatcher = kafkaEventDispatcher;
this.queryManager = queryManager;
Expand All @@ -16,7 +17,11 @@ public UnSupportedMetaHandler(ComponentProjectionWithPurl componentProjection, Q

@Override
public IntegrityMetaComponent handle() {
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), false, fetchLatestVersion));
if(fetchLatestVersion){
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), FetchMeta.FETCH_UNSPECIFIED, FetchMeta.FETCH_LATEST_VERSION));
} else{
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates(), componentProjection.internal(), FetchMeta.FETCH_UNSPECIFIED, FetchMeta.FETCH_UNSPECIFIED));
}
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import org.dependencytrack.event.ComponentVulnerabilityAnalysisEvent;
import org.dependencytrack.event.InternalComponentIdentificationEvent;
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.event.kafka.componentmeta.ComponentProjectionWithPurl;
import org.dependencytrack.event.kafka.componentmeta.ComponentProjection;
import org.dependencytrack.event.kafka.componentmeta.Handler;
import org.dependencytrack.event.kafka.componentmeta.HandlerFactory;
import org.dependencytrack.model.Component;
Expand Down Expand Up @@ -320,8 +320,8 @@ public Response createComponent(@PathParam("uuid") String uuid, Component jsonCo
component.setNotes(StringUtils.trimToNull(jsonComponent.getNotes()));

component = qm.createComponent(component, true);
ComponentProjectionWithPurl componentProjection =
new ComponentProjectionWithPurl(component.getPurlCoordinates().toString(),
ComponentProjection componentProjection =
new ComponentProjection(component.getPurlCoordinates().toString(),
component.isInternal(), component.getPurl().toString());
try {
Handler repoMetaHandler = HandlerFactory.createHandler(componentProjection, qm, kafkaEventDispatcher, true);
Expand Down Expand Up @@ -415,8 +415,8 @@ public Response updateComponent(Component jsonComponent) {
component.setNotes(StringUtils.trimToNull(jsonComponent.getNotes()));

component = qm.updateComponent(component, true);
ComponentProjectionWithPurl componentProjection =
new ComponentProjectionWithPurl(component.getPurlCoordinates().toString(),
ComponentProjection componentProjection =
new ComponentProjection(component.getPurlCoordinates().toString(),
component.isInternal(), component.getPurl().toString());
try {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@
import org.dependencytrack.notification.vo.BomProcessingFailed;
import org.dependencytrack.persistence.FlushHelper;
import org.dependencytrack.persistence.QueryManager;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;
import org.json.JSONArray;

import javax.jdo.PersistenceManager;
Expand Down Expand Up @@ -325,7 +326,7 @@ private void processBom(final Context ctx, final File bomFile) throws BomConsump
if (result) {
event = collectRepoMetaAnalysisEvents(component, qm);
} else {
event = new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), false, true);
event = new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), FetchMeta.FETCH_UNSPECIFIED, FetchMeta.FETCH_LATEST_VERSION);
}
repoMetaAnalysisEvents.add(event);
vulnAnalysisEvents.add(new ComponentVulnerabilityAnalysisEvent(
Expand Down Expand Up @@ -977,17 +978,16 @@ public String toString() {

private ComponentRepositoryMetaAnalysisEvent collectRepoMetaAnalysisEvents(Component component, QueryManager qm) {
IntegrityMetaComponent integrityMetaComponent = qm.getIntegrityMetaComponent(component.getPurl().toString());
if (integrityMetaComponent != null) {
if (integrityMetaComponent.getStatus() == null || (integrityMetaComponent.getStatus() == FetchStatus.IN_PROGRESS && (Date.from(Instant.now()).getTime() - integrityMetaComponent.getLastFetch().getTime()) > TIME_SPAN)) {
integrityMetaComponent.setLastFetch(Date.from(Instant.now()));
qm.getPersistenceManager().makePersistent(integrityMetaComponent);
return new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), true, true);
} else {
return new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), false, true);
}
} else {
if (integrityMetaComponent == null) {
qm.getPersistenceManager().makePersistent(AbstractMetaHandler.createIntegrityMetaComponent(component.getPurl().toString()));
return new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), true, true);
return new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), FetchMeta.FETCH_LATEST_VERSION, FetchMeta.FETCH_INTEGRITY_DATA);
}
if (integrityMetaComponent.getStatus() == null || (integrityMetaComponent.getStatus() == FetchStatus.IN_PROGRESS && (Date.from(Instant.now()).getTime() - integrityMetaComponent.getLastFetch().getTime()) > TIME_SPAN)) {
integrityMetaComponent.setLastFetch(Date.from(Instant.now()));
qm.getPersistenceManager().makePersistent(integrityMetaComponent);
return new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), FetchMeta.FETCH_LATEST_VERSION, FetchMeta.FETCH_INTEGRITY_DATA);
} else {
return new ComponentRepositoryMetaAnalysisEvent(component.getPurlCoordinates().toString(), component.isInternal(), FetchMeta.FETCH_UNSPECIFIED, FetchMeta.FETCH_LATEST_VERSION);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,18 @@
import alpine.common.logging.Logger;
import alpine.event.framework.Event;
import alpine.event.framework.Subscriber;
import com.github.packageurl.MalformedPackageURLException;
import net.javacrumbs.shedlock.core.LockConfiguration;
import net.javacrumbs.shedlock.core.LockExtender;
import net.javacrumbs.shedlock.core.LockingTaskExecutor;
import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent;
import org.dependencytrack.event.PortfolioRepositoryMetaAnalysisEvent;
import org.dependencytrack.event.ProjectRepositoryMetaAnalysisEvent;
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.event.kafka.componentmeta.ComponentProjectionWithPurl;
import org.dependencytrack.event.kafka.componentmeta.Handler;
import org.dependencytrack.event.kafka.componentmeta.HandlerFactory;
import org.dependencytrack.model.Component;
import org.dependencytrack.model.Project;
import org.dependencytrack.persistence.QueryManager;
import org.dependencytrack.util.LockProvider;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

import javax.jdo.PersistenceManager;
import javax.jdo.Query;
Expand Down Expand Up @@ -99,7 +97,7 @@ private void processProject(final UUID projectUuid) throws Exception {
List<ComponentProjection> components = fetchNextComponentsPage(pm, project, offset);
while (!components.isEmpty()) {
//latest version information needs to be fetched for project as either triggered because of fresh bom upload or individual project reanalysis
dispatchComponents(components, qm);
dispatchComponents(components);

offset += components.size();
components = fetchNextComponentsPage(pm, project, offset);
Expand All @@ -126,7 +124,7 @@ private void processPortfolio() throws Exception {
LockExtender.extendActiveLock(Duration.ofMinutes(5).plus(lockConfiguration.getLockAtLeastFor()), lockConfiguration.getLockAtLeastFor());
}
//latest version information does not need to be fetched for project as triggered for portfolio means it is a scheduled event happening
dispatchComponents(components, qm);
dispatchComponents(components);

offset += components.size();
components = fetchNextComponentsPage(pm, null, offset);
Expand All @@ -136,14 +134,9 @@ private void processPortfolio() throws Exception {
LOGGER.info("All components in portfolio submitted for repository meta analysis");
}

private void dispatchComponents(final List<ComponentProjection> components, QueryManager queryManager) {
private void dispatchComponents(final List<ComponentProjection> components) {
for (final var component : components) {
try {
Handler repoMetaHandler = HandlerFactory.createHandler(new ComponentProjectionWithPurl(component.purlCoordinates(), component.internal(), component.purlCoordinates()), queryManager, kafkaEventDispatcher, true);
repoMetaHandler.handle();
} catch (MalformedPackageURLException ex) {
LOGGER.warn("Unable to determine package url type for this purl %s".formatted(component.purlCoordinates()), ex);
}
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(component.purlCoordinates(), component.internal(), FetchMeta.FETCH_LATEST_VERSION, FetchMeta.FETCH_UNSPECIFIED));
}
}

Expand All @@ -165,7 +158,7 @@ private List<ComponentProjection> fetchNextComponentsPage(final PersistenceManag
}
}

public record ComponentProjection(String purlCoordinates, Boolean internal) {
private record ComponentProjection(String purlCoordinates, Boolean internal) {
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,15 @@ option java_package = "org.hyades.proto.repometaanalysis.v1";
message AnalysisCommand {
// The component that shall be analyzed.
Component component = 1;
bool fetch_integrity_data = 2;
bool fetch_latest_version = 3;
FetchMeta fetch_integrity_data = 2;
FetchMeta fetch_latest_version = 3;

}

enum FetchMeta{
FETCH_UNSPECIFIED = 0;

Check failure on line 20 in src/main/proto/org/hyades/repometaanalysis/v1/repo_meta_analysis.proto

View workflow job for this annotation

GitHub Actions / Buf

Enum value name "FETCH_UNSPECIFIED" should be prefixed with "FETCH_META_".
FETCH_INTEGRITY_DATA = 1;

Check failure on line 21 in src/main/proto/org/hyades/repometaanalysis/v1/repo_meta_analysis.proto

View workflow job for this annotation

GitHub Actions / Buf

Enum value name "FETCH_INTEGRITY_DATA" should be prefixed with "FETCH_META_".
FETCH_LATEST_VERSION = 2;

Check failure on line 22 in src/main/proto/org/hyades/repometaanalysis/v1/repo_meta_analysis.proto

View workflow job for this annotation

GitHub Actions / Buf

Enum value name "FETCH_LATEST_VERSION" should be prefixed with "FETCH_META_".
}

message AnalysisResult {
Expand Down
Loading

0 comments on commit 83581dd

Please sign in to comment.