Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added integrity analysis event on apiserver #339

Merged
merged 13 commits into from
Oct 11, 2023
Original file line number Diff line number Diff line change
@@ -1,21 +1,17 @@
package org.dependencytrack.event;

import alpine.event.framework.Event;
import com.github.packageurl.PackageURL;
import org.dependencytrack.model.Component;

import java.util.Optional;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

/**
* Defines an {@link Event} triggered when requesting a component to be analyzed for meta information.
*
* @param purlCoordinates The package URL coordinates of the {@link Component} to analyze
* @param internal Whether the {@link Component} is internal
* @param fetchMeta Whether component hash data or component meta data needs to be fetched from external api
*/
public record ComponentRepositoryMetaAnalysisEvent(String purlCoordinates, Boolean internal) implements Event {

public ComponentRepositoryMetaAnalysisEvent(final Component component) {
this(Optional.ofNullable(component.getPurlCoordinates()).map(PackageURL::canonicalize).orElse(null), component.isInternal());
}
public record ComponentRepositoryMetaAnalysisEvent(String purlCoordinates, Boolean internal,
FetchMeta fetchMeta) implements Event {

}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.persistence.QueryManager;
import org.dependencytrack.util.LockProvider;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
Expand Down Expand Up @@ -67,7 +68,7 @@ private void batchProcessPurls(QueryManager qm) {
List<String> purls = qm.fetchNextPurlsPage(offset);
while (!purls.isEmpty()) {
long cumulativeProcessingTime = System.currentTimeMillis() - startTime;
if(isLockToBeExtended(cumulativeProcessingTime, INTEGRITY_META_INITIALIZER_TASK_LOCK)) {
if (isLockToBeExtended(cumulativeProcessingTime, INTEGRITY_META_INITIALIZER_TASK_LOCK)) {
LockExtender.extendActiveLock(Duration.ofMinutes(5).plus(lockConfiguration.getLockAtLeastFor()), lockConfiguration.getLockAtLeastFor());
}
dispatchPurls(qm, purls);
Expand All @@ -88,7 +89,7 @@ private void updateIntegrityMetaForPurls(QueryManager qm, List<String> purls) {
private void dispatchPurls(QueryManager qm, List<String> purls) {
for (final var purl : purls) {
ComponentProjection componentProjection = qm.getComponentByPurl(purl);
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purlCoordinates, componentProjection.internal));
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(purl, componentProjection.internal, FetchMeta.FETCH_META_INTEGRITY_DATA));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ static KafkaEvent<ScanKey, ScanCommand> convert(final ComponentVulnerabilityAnal
}

static KafkaEvent<String, AnalysisCommand> convert(final ComponentRepositoryMetaAnalysisEvent event) {
if (event == null || event.purlCoordinates()== null) {
if (event == null || event.purlCoordinates() == null) {
return null;
}

Expand All @@ -58,6 +58,7 @@ static KafkaEvent<String, AnalysisCommand> convert(final ComponentRepositoryMeta

final var analysisCommand = AnalysisCommand.newBuilder()
.setComponent(componentBuilder)
.setFetchMeta(event.fetchMeta())
.build();

return new KafkaEvent<>(KafkaTopics.REPO_META_ANALYSIS_COMMAND, event.purlCoordinates(), analysisCommand, null);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package org.dependencytrack.event.kafka.componentmeta;

import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.model.FetchStatus;
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.persistence.QueryManager;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

import java.time.Instant;
import java.util.Date;

public abstract class AbstractMetaHandler implements Handler {

ComponentProjection componentProjection;
QueryManager queryManager;
KafkaEventDispatcher kafkaEventDispatcher;
FetchMeta fetchMeta;


public static IntegrityMetaComponent createIntegrityMetaComponent(String purl) {
IntegrityMetaComponent integrityMetaComponent = new IntegrityMetaComponent();
integrityMetaComponent.setStatus(FetchStatus.IN_PROGRESS);
integrityMetaComponent.setPurl(purl);
integrityMetaComponent.setLastFetch(Date.from(Instant.now()));
return integrityMetaComponent;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package org.dependencytrack.event.kafka.componentmeta;

import com.github.packageurl.PackageURL;

public record ComponentProjection(String purlCoordinates, Boolean internal, PackageURL purl) {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package org.dependencytrack.event.kafka.componentmeta;

import com.github.packageurl.MalformedPackageURLException;
import org.dependencytrack.model.IntegrityMetaComponent;

public interface Handler {
IntegrityMetaComponent handle() throws MalformedPackageURLException;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package org.dependencytrack.event.kafka.componentmeta;

import com.github.packageurl.MalformedPackageURLException;
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.persistence.QueryManager;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

public class HandlerFactory {

public static Handler createHandler(ComponentProjection componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, FetchMeta fetchMeta) throws MalformedPackageURLException {
boolean result = RepoMetaConstants.SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK.contains(componentProjection.purl().getType());
if (result) {
return new SupportedMetaHandler(componentProjection, queryManager, kafkaEventDispatcher, fetchMeta);
} else {
return new UnSupportedMetaHandler(componentProjection, queryManager, kafkaEventDispatcher, FetchMeta.FETCH_META_LATEST_VERSION);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package org.dependencytrack.event.kafka.componentmeta;

import java.util.List;

public class RepoMetaConstants {

public static final long TIME_SPAN = 60 * 60 * 1000L;
public static final List<String> SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK =List.of("maven", "npm", "pypi");
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package org.dependencytrack.event.kafka.componentmeta;

import com.github.packageurl.MalformedPackageURLException;
import com.github.packageurl.PackageURL;
import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent;
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.model.FetchStatus;
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.persistence.QueryManager;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

import java.time.Instant;
import java.util.Date;

import static org.dependencytrack.event.kafka.componentmeta.RepoMetaConstants.TIME_SPAN;

public class SupportedMetaHandler extends AbstractMetaHandler {

public SupportedMetaHandler(ComponentProjection componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, FetchMeta fetchMeta) {
this.componentProjection = componentProjection;
this.kafkaEventDispatcher = kafkaEventDispatcher;
this.queryManager = queryManager;
this.fetchMeta = fetchMeta;
}

@Override
public IntegrityMetaComponent handle() throws MalformedPackageURLException {
IntegrityMetaComponent persistentIntegrityMetaComponent = queryManager.getIntegrityMetaComponent(componentProjection.purl().toString());
if (persistentIntegrityMetaComponent == null) {
IntegrityMetaComponent integrityMetaComponent = queryManager.createIntegrityMetaComponent(createIntegrityMetaComponent(componentProjection.purl().toString()));
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purl().canonicalize(), componentProjection.internal(), fetchMeta));
return integrityMetaComponent;
}
if (persistentIntegrityMetaComponent.getStatus() == null || (persistentIntegrityMetaComponent.getStatus() == FetchStatus.IN_PROGRESS && Date.from(Instant.now()).getTime() - persistentIntegrityMetaComponent.getLastFetch().getTime() > TIME_SPAN)) {
persistentIntegrityMetaComponent.setLastFetch(Date.from(Instant.now()));
IntegrityMetaComponent updateIntegrityMetaComponent = queryManager.updateIntegrityMetaComponent(persistentIntegrityMetaComponent);
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purl().canonicalize(), componentProjection.internal(), fetchMeta));
return updateIntegrityMetaComponent;
} else {
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(componentProjection.purl().canonicalize(), componentProjection.internal(), fetchMeta));
return persistentIntegrityMetaComponent;
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package org.dependencytrack.event.kafka.componentmeta;

import com.github.packageurl.MalformedPackageURLException;
import com.github.packageurl.PackageURL;
import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent;
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.persistence.QueryManager;
import org.hyades.proto.repometaanalysis.v1.FetchMeta;

public class UnSupportedMetaHandler extends AbstractMetaHandler {

public UnSupportedMetaHandler(ComponentProjection componentProjection, QueryManager queryManager, KafkaEventDispatcher kafkaEventDispatcher, FetchMeta fetchMeta) {
this.componentProjection = componentProjection;
this.kafkaEventDispatcher = kafkaEventDispatcher;
this.queryManager = queryManager;
this.fetchMeta = fetchMeta;
}

@Override
public IntegrityMetaComponent handle() throws MalformedPackageURLException {
kafkaEventDispatcher.dispatchAsync(new ComponentRepositoryMetaAnalysisEvent(new PackageURL(componentProjection.purlCoordinates()).canonicalize(), componentProjection.internal(), fetchMeta));
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ private void synchronizeMetaInformationForComponent(final QueryManager queryMana
trx.begin();
pm.makePersistent(repositoryMetaComponentResult);
trx.commit();
break; // this means that transaction was successful and we do not need to retry
}
} catch (JDODataStoreException e) {
// TODO: DataNucleus doesn't map constraint violation exceptions very well,
Expand All @@ -99,8 +100,9 @@ private void synchronizeMetaInformationForComponent(final QueryManager queryMana
}
}
//snychronize integrity meta information if available
IntegrityMetaComponent res = synchronizeIntegrityMetaResult(record, queryManager, purl);
if (res == null) {
if (result.hasIntegrityMeta()) {
synchronizeIntegrityMetaResult(record, queryManager, purl);
} else {
LOGGER.debug("Incoming result for component with purl %s does not include component integrity info".formatted(purl));
}
}
Expand Down Expand Up @@ -146,41 +148,39 @@ private RepositoryMetaComponent createRepositoryMetaResult(Record<String, Analys
}
}

private IntegrityMetaComponent synchronizeIntegrityMetaResult(final Record<String, AnalysisResult> incomingAnalysisResultRecord, QueryManager queryManager, PackageURL purl) {
private void synchronizeIntegrityMetaResult(final Record<String, AnalysisResult> incomingAnalysisResultRecord, QueryManager queryManager, PackageURL purl) {
final AnalysisResult result = incomingAnalysisResultRecord.value();

IntegrityMetaComponent persistentIntegrityMetaComponent = queryManager.getIntegrityMetaComponent(purl.toString());
if (persistentIntegrityMetaComponent == null) {
persistentIntegrityMetaComponent = new IntegrityMetaComponent();
}

if (persistentIntegrityMetaComponent.getStatus().equals(FetchStatus.PROCESSED)) {
LOGGER.warn("""
Received hash information for %s that has already been processed; Discarding
""".formatted(purl));
return null;
return;
}
if (result.hasIntegrityMeta()) {
if (result.getIntegrityMeta().hasMd5() || result.getIntegrityMeta().hasSha1() || result.getIntegrityMeta().hasSha256()
|| result.getIntegrityMeta().hasSha512() || result.getIntegrityMeta().hasCurrentVersionLastModified()) {
Optional.ofNullable(result.getIntegrityMeta().getMd5()).ifPresent(persistentIntegrityMetaComponent::setMd5);
Optional.ofNullable(result.getIntegrityMeta().getSha1()).ifPresent(persistentIntegrityMetaComponent::setSha1);
Optional.ofNullable(result.getIntegrityMeta().getSha256()).ifPresent(persistentIntegrityMetaComponent::setSha256);
Optional.ofNullable(result.getIntegrityMeta().getSha512()).ifPresent(persistentIntegrityMetaComponent::setSha512);
persistentIntegrityMetaComponent.setPurl(result.getComponent().getPurl());
persistentIntegrityMetaComponent.setRepositoryUrl(result.getIntegrityMeta().getMetaSourceUrl());
persistentIntegrityMetaComponent.setPublishedAt(result.getIntegrityMeta().hasCurrentVersionLastModified() ? new Date(result.getIntegrityMeta().getCurrentVersionLastModified().getSeconds() * 1000) : null);
persistentIntegrityMetaComponent.setStatus(FetchStatus.PROCESSED);
} else {
persistentIntegrityMetaComponent.setMd5(null);
persistentIntegrityMetaComponent.setSha256(null);
persistentIntegrityMetaComponent.setSha1(null);
persistentIntegrityMetaComponent.setSha512(null);
persistentIntegrityMetaComponent.setPurl(purl.toString());
persistentIntegrityMetaComponent.setRepositoryUrl(result.getIntegrityMeta().getMetaSourceUrl());
persistentIntegrityMetaComponent.setStatus(FetchStatus.NOT_AVAILABLE);
}
return queryManager.updateIntegrityMetaComponent(persistentIntegrityMetaComponent);
if (result.getIntegrityMeta().hasMd5() || result.getIntegrityMeta().hasSha1() || result.getIntegrityMeta().hasSha256()
|| result.getIntegrityMeta().hasSha512() || result.getIntegrityMeta().hasCurrentVersionLastModified()) {
Optional.ofNullable(result.getIntegrityMeta().getMd5()).ifPresent(persistentIntegrityMetaComponent::setMd5);
Optional.ofNullable(result.getIntegrityMeta().getSha1()).ifPresent(persistentIntegrityMetaComponent::setSha1);
Optional.ofNullable(result.getIntegrityMeta().getSha256()).ifPresent(persistentIntegrityMetaComponent::setSha256);
Optional.ofNullable(result.getIntegrityMeta().getSha512()).ifPresent(persistentIntegrityMetaComponent::setSha512);
persistentIntegrityMetaComponent.setPurl(result.getComponent().getPurl());
persistentIntegrityMetaComponent.setRepositoryUrl(result.getIntegrityMeta().getMetaSourceUrl());
persistentIntegrityMetaComponent.setPublishedAt(result.getIntegrityMeta().hasCurrentVersionLastModified() ? new Date(result.getIntegrityMeta().getCurrentVersionLastModified().getSeconds() * 1000) : null);
persistentIntegrityMetaComponent.setStatus(FetchStatus.PROCESSED);
} else {
return null;
persistentIntegrityMetaComponent.setMd5(null);
persistentIntegrityMetaComponent.setSha256(null);
persistentIntegrityMetaComponent.setSha1(null);
persistentIntegrityMetaComponent.setSha512(null);
persistentIntegrityMetaComponent.setPurl(purl.toString());
persistentIntegrityMetaComponent.setRepositoryUrl(result.getIntegrityMeta().getMetaSourceUrl());
persistentIntegrityMetaComponent.setStatus(FetchStatus.NOT_AVAILABLE);
}
queryManager.updateIntegrityMetaComponent(persistentIntegrityMetaComponent);
}

}
3 changes: 2 additions & 1 deletion src/main/java/org/dependencytrack/model/FetchStatus.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
package org.dependencytrack.model;

public enum FetchStatus {
//request processed successfully
PROCESSED,
TIMED_OUT,
//fetching information for this component is in progress
IN_PROGRESS,
NOT_AVAILABLE
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@
*/
package org.dependencytrack.model;

import alpine.server.json.TrimmedStringDeserializer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;

import javax.jdo.annotations.Column;
import javax.jdo.annotations.Extension;
Expand All @@ -31,6 +33,7 @@
import javax.jdo.annotations.Unique;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import java.io.Serializable;
import java.util.Date;

Expand Down Expand Up @@ -67,13 +70,16 @@ public void setSha512(String sha512) {
}

@Persistent
@Column(name = "SHA_512", jdbcType = "VARCHAR", length = 128)
@Column(name = "SHA512", jdbcType = "VARCHAR", length = 128)
@Pattern(regexp = "^[0-9a-fA-F]{128}$", message = "The SHA-512 hash must be a valid 128 character HEX number")
private String sha512;

@Persistent
@Column(name = "PURL", allowsNull = "false")
@Column(name = "PURL", allowsNull = "false", jdbcType = "VARCHAR", length = 1024)
@Index(name = "PURL_IDX")
@Size(max = 1024)
@com.github.packageurl.validator.PackageURL
@JsonDeserialize(using = TrimmedStringDeserializer.class)
@Unique
@NotNull
private String purl;
Expand Down
Loading