Skip to content

Commit

Permalink
feat: stop data flow when policy is not valid anymore
Browse files Browse the repository at this point in the history
  • Loading branch information
ndr-brt committed Sep 28, 2023
1 parent 101203a commit 8b37684
Show file tree
Hide file tree
Showing 41 changed files with 687 additions and 387 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.function.Function;

import static java.lang.String.format;
import static org.eclipse.edc.spi.response.ResponseStatus.FATAL_ERROR;
Expand All @@ -51,18 +52,28 @@ public void register(int priority, DataFlowController controller) {
@Override
public @NotNull StatusResult<DataFlowResponse> initiate(TransferProcess transferProcess, Policy policy) {
try {
return controllers.stream()
.sorted(Comparator.comparingInt(a -> -a.priority))
.map(PrioritizedDataFlowController::controller)
.filter(controller -> controller.canHandle(transferProcess))
.findFirst()
.map(controller -> controller.initiateFlow(transferProcess, policy))
.orElseGet(() -> StatusResult.failure(FATAL_ERROR, controllerNotFound(transferProcess.getId())));
return chooseControllerAndApply(transferProcess, controller -> controller.initiateFlow(transferProcess, policy));
} catch (Exception e) {
return StatusResult.failure(FATAL_ERROR, runtimeException(transferProcess.getId(), e.getLocalizedMessage()));
}
}

@Override
public @NotNull StatusResult<Void> terminate(TransferProcess transferProcess) {
return chooseControllerAndApply(transferProcess, controller -> controller.terminate(transferProcess));
}

@NotNull
private <T> StatusResult<T> chooseControllerAndApply(TransferProcess transferProcess, Function<DataFlowController, StatusResult<T>> function) {
return controllers.stream()
.sorted(Comparator.comparingInt(a -> -a.priority))
.map(PrioritizedDataFlowController::controller)
.filter(controller -> controller.canHandle(transferProcess))
.findFirst()
.map(function)
.orElseGet(() -> StatusResult.failure(FATAL_ERROR, controllerNotFound(transferProcess.getId())));
}

private String runtimeException(String id, String message) {
return format("Unable to process transfer %s. Data flow controller throws an exception: %s", id, message);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.junit.jupiter.api.Test;

import static org.assertj.core.api.Assertions.assertThat;
import static org.eclipse.edc.junit.assertions.AbstractResultAssert.assertThat;
import static org.eclipse.edc.spi.response.ResponseStatus.FATAL_ERROR;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
Expand All @@ -37,7 +38,7 @@ class DataFlowManagerImplTest {
private final DataFlowManagerImpl manager = new DataFlowManagerImpl();

@Test
void should_initiate_flow_on_correct_controller() {
void initiate_shouldInitiateFlowOnCorrectController() {
var controller = mock(DataFlowController.class);
var dataRequest = DataRequest.Builder.newInstance().destinationType("test-dest-type").build();
var policy = Policy.Builder.newInstance().build();
Expand All @@ -54,7 +55,7 @@ void should_initiate_flow_on_correct_controller() {
}

@Test
void should_return_fatal_error_if_no_controller_can_handle_the_request() {
void initiate_shouldReturnFatalError_whenNoControllerCanHandleTheRequest() {
var controller = mock(DataFlowController.class);
var dataRequest = DataRequest.Builder.newInstance().destinationType("test-dest-type").build();
var dataAddress = DataAddress.Builder.newInstance().type("test-type").build();
Expand All @@ -71,7 +72,7 @@ void should_return_fatal_error_if_no_controller_can_handle_the_request() {
}

@Test
void should_catch_exceptions_and_return_fatal_error() {
void initiate_shouldCatchExceptionsAndReturnFatalError() {
var controller = mock(DataFlowController.class);
var dataRequest = DataRequest.Builder.newInstance().destinationType("test-dest-type").build();
var dataAddress = DataAddress.Builder.newInstance().type("test-type").build();
Expand All @@ -91,7 +92,7 @@ void should_catch_exceptions_and_return_fatal_error() {
}

@Test
void shouldChooseHighestPriorityController() {
void initiate_shouldChooseHighestPriorityController() {
var highPriority = createDataFlowController();
var lowPriority = createDataFlowController();
manager.register(1, lowPriority);
Expand All @@ -103,6 +104,23 @@ void shouldChooseHighestPriorityController() {
verifyNoInteractions(lowPriority);
}

@Test
void terminate_shouldChooseControllerAndTerminate() {
var controller = mock(DataFlowController.class);
var dataRequest = DataRequest.Builder.newInstance().destinationType("test-dest-type").build();
var dataAddress = DataAddress.Builder.newInstance().type("test-type").build();
var transferProcess = TransferProcess.Builder.newInstance().dataRequest(dataRequest).contentDataAddress(dataAddress).build();

when(controller.canHandle(any())).thenReturn(true);
when(controller.terminate(any())).thenReturn(StatusResult.success());
manager.register(controller);

var result = manager.terminate(transferProcess);

assertThat(result).isSucceeded();
verify(controller).terminate(transferProcess);
}

private DataFlowController createDataFlowController() {
var dataFlowController = mock(DataFlowController.class);
when(dataFlowController.canHandle(any())).thenReturn(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import org.eclipse.edc.connector.api.client.spi.transferprocess.TransferProcessApiClient;
import org.eclipse.edc.connector.dataplane.framework.manager.DataPlaneManagerImpl;
import org.eclipse.edc.connector.dataplane.framework.pipeline.PipelineServiceImpl;
import org.eclipse.edc.connector.dataplane.framework.pipeline.PipelineServiceTransferServiceImpl;
import org.eclipse.edc.connector.dataplane.framework.registry.TransferServiceRegistryImpl;
import org.eclipse.edc.connector.dataplane.framework.registry.TransferServiceSelectionStrategy;
import org.eclipse.edc.connector.dataplane.spi.manager.DataPlaneManager;
Expand Down Expand Up @@ -100,10 +99,9 @@ public void initialize(ServiceExtensionContext context) {
var pipelineService = new PipelineServiceImpl(monitor);
pipelineService.registerFactory(new OutputStreamDataSinkFactory()); // Added by default to support synchronous data transfer, i.e. pull data
context.registerService(PipelineService.class, pipelineService);
var transferService = new PipelineServiceTransferServiceImpl(pipelineService);

var transferServiceRegistry = new TransferServiceRegistryImpl(transferServiceSelectionStrategy);
transferServiceRegistry.registerTransferService(transferService);
transferServiceRegistry.registerTransferService(pipelineService);
context.registerService(TransferServiceRegistry.class, transferServiceRegistry);

var numThreads = context.getSetting(TRANSFER_THREADS, DEFAULT_TRANSFER_THREADS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.eclipse.edc.connector.dataplane.spi.store.DataPlaneStore;
import org.eclipse.edc.spi.entity.StatefulEntity;
import org.eclipse.edc.spi.query.Criterion;
import org.eclipse.edc.spi.response.StatusResult;
import org.eclipse.edc.spi.result.Result;
import org.eclipse.edc.spi.types.domain.transfer.DataFlowRequest;
import org.eclipse.edc.statemachine.Processor;
Expand All @@ -42,6 +43,7 @@
import static org.eclipse.edc.connector.dataplane.spi.DataFlowStates.FAILED;
import static org.eclipse.edc.connector.dataplane.spi.DataFlowStates.RECEIVED;
import static org.eclipse.edc.spi.persistence.StateEntityStore.hasState;
import static org.eclipse.edc.spi.response.ResponseStatus.FATAL_ERROR;

/**
* Default data manager implementation.
Expand Down Expand Up @@ -100,6 +102,29 @@ public DataFlowStates transferState(String processId) {
.map(DataFlowStates::from).orElse(null);
}

@Override
public StatusResult<Void> terminate(String dataFlowId) {
var result = store.findByIdAndLease(dataFlowId);
if (result.succeeded()) {
var dataFlow = result.getContent();
var transferService = transferServiceRegistry.resolveTransferService(dataFlow.toRequest());

if (transferService == null) {
return StatusResult.failure(FATAL_ERROR, "TransferService cannot be resolved for DataFlow %s".formatted(dataFlowId));
}

var terminateResult = transferService.terminate(dataFlow);
if (terminateResult.failed()) {
return StatusResult.failure(FATAL_ERROR, "DataFlow %s cannot be terminated: %s".formatted(dataFlowId, terminateResult.getFailureDetail()));
}
dataFlow.transitToCompleted();
store.save(dataFlow);
return StatusResult.success();
} else {
return StatusResult.from(result).map(it -> null);
}
}

private boolean processReceived(DataFlow dataFlow) {
var request = dataFlow.toRequest();
var transferService = transferServiceRegistry.resolveTransferService(request);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package org.eclipse.edc.connector.dataplane.framework.pipeline;

import io.opentelemetry.instrumentation.annotations.WithSpan;
import org.eclipse.edc.connector.dataplane.spi.DataFlow;
import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSink;
import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSinkFactory;
import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSource;
Expand All @@ -28,7 +29,9 @@
import org.jetbrains.annotations.Nullable;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;

import static java.lang.String.format;
Expand All @@ -40,6 +43,7 @@
public class PipelineServiceImpl implements PipelineService {
private final List<DataSourceFactory> sourceFactories = new ArrayList<>();
private final List<DataSinkFactory> sinkFactories = new ArrayList<>();
private final Map<String, DataSource> sources = new HashMap<>();
private final Monitor monitor;

public PipelineServiceImpl(Monitor monitor) {
Expand Down Expand Up @@ -91,17 +95,8 @@ public CompletableFuture<StreamResult<Void>> transfer(DataFlowRequest request) {
return noSinkFactory(request);
}
var source = sourceFactory.createSource(request);
var sink = sinkFactory.createSink(request);
monitor.debug(() -> format("Transferring from %s to %s.", request.getSourceDataAddress().getType(), request.getDestinationDataAddress().getType()));
return sink.transfer(source);
}
sources.put(request.getProcessId(), source);

@Override
public CompletableFuture<StreamResult<Void>> transfer(DataSource source, DataFlowRequest request) {
var sinkFactory = getSinkFactory(request);
if (sinkFactory == null) {
return noSinkFactory(request);
}
var sink = sinkFactory.createSink(request);
monitor.debug(() -> format("Transferring from %s to %s.", request.getSourceDataAddress().getType(), request.getDestinationDataAddress().getType()));
return sink.transfer(source);
Expand All @@ -114,10 +109,28 @@ public CompletableFuture<StreamResult<Void>> transfer(DataSink sink, DataFlowReq
return noSourceFactory(request);
}
var source = sourceFactory.createSource(request);
sources.put(request.getProcessId(), source);

monitor.debug(() -> format("Transferring from %s to %s.", request.getSourceDataAddress().getType(), request.getDestinationDataAddress().getType()));
return sink.transfer(source);
}

@Override
public StreamResult<Void> terminate(DataFlow dataFlow) {
var source = sources.get(dataFlow.getId());
if (source == null) {
return StreamResult.notFound();
} else {
try {
source.close();
sources.remove(dataFlow.getId());
return StreamResult.success();
} catch (Exception e) {
return StreamResult.error("Cannot terminate DataFlow %s: %s".formatted(dataFlow.getId(), e.getMessage()));
}
}
}

@Override
public void registerFactory(DataSourceFactory factory) {
sourceFactories.add(factory);
Expand Down

This file was deleted.

Loading

0 comments on commit 8b37684

Please sign in to comment.