Skip to content

Commit

Permalink
fix(artifacts): Automated triggers with artifact constraints are brok…
Browse files Browse the repository at this point in the history
…en if you have 2 or more of the same type (backport #4579) (#4586)

Co-authored-by: Cameron Motevasselani <[email protected]>
Co-authored-by: Nemesis Osorio <[email protected]>
fix(artifacts): Automated triggers with artifact constraints are broken if you have 2 or more of the same type (#4579)
fix(artifacts): resolving git conflicts from #4579 for release-1.30.x (#4589)
  • Loading branch information
3 people authored Nov 7, 2023
1 parent 1fc563b commit 4740c16
Show file tree
Hide file tree
Showing 4 changed files with 159 additions and 94 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import com.netflix.spinnaker.kork.annotations.NonnullByDefault;
import com.netflix.spinnaker.kork.artifacts.model.Artifact;
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact;
import com.netflix.spinnaker.kork.web.exceptions.InvalidRequestException;
import com.netflix.spinnaker.orca.api.pipeline.models.PipelineExecution;
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution;
import com.netflix.spinnaker.orca.pipeline.model.StageContext;
Expand All @@ -49,7 +48,6 @@
import javax.annotation.CheckReturnValue;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -141,31 +139,7 @@ private List<Artifact> getAllArtifacts(
contextParameterProcessor.process(
boundArtifactMap, contextParameterProcessor.buildExecutionContext(stage), true);

Artifact evaluatedArtifact =
objectMapper.convertValue(evaluatedBoundArtifactMap, Artifact.class);
return getBoundInlineArtifact(evaluatedArtifact, stage.getExecution())
.orElse(evaluatedArtifact);
}

private Optional<Artifact> getBoundInlineArtifact(
@Nullable Artifact artifact, PipelineExecution execution) {
if (ObjectUtils.anyNull(
artifact, execution.getTrigger(), execution.getTrigger().getArtifacts())) {
return Optional.empty();
}
try {
ExpectedArtifact expectedArtifact =
ExpectedArtifact.builder().matchArtifact(artifact).build();
return ArtifactResolver.getInstance(execution.getTrigger().getArtifacts(), true)
.resolveExpectedArtifacts(List.of(expectedArtifact))
.getResolvedExpectedArtifacts()
.stream()
.findFirst()
.flatMap(this::getBoundArtifact);
} catch (InvalidRequestException e) {
log.debug("Could not match inline artifact with trigger bound artifacts", e);
return Optional.empty();
}
return objectMapper.convertValue(evaluatedBoundArtifactMap, Artifact.class);
}

public @Nullable Artifact getBoundArtifactForId(StageExecution stage, @Nullable String id) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,29 +81,6 @@ class ArtifactUtilsSpec extends Specification {
artifact.name == 'build/libs/my-jar-100.jar'
}

def "should bind stage-inlined artifacts to trigger artifacts"() {
setup:
def execution = pipeline {
stage {
name = "upstream stage"
type = "stage1"
refId = "1"
}
}

execution.trigger = new DefaultTrigger('manual')
execution.trigger.artifacts.add(Artifact.builder().type('http/file').name('build/libs/my-jar-100.jar').build())

when:
def artifact = makeArtifactUtils().getBoundArtifactForStage(execution.stages[0], null, Artifact.builder()
.type('http/file')
.name('build/libs/my-jar-\\d+.jar')
.build())

then:
artifact.name == 'build/libs/my-jar-100.jar'
}

def "should find upstream artifacts in small pipeline"() {
when:
def desired = execution.getStages().find { it.name == "desired" }
Expand Down Expand Up @@ -515,6 +492,120 @@ class ArtifactUtilsSpec extends Specification {
initialArtifacts == finalArtifacts
}

def "resolve expected artifact using default artifact"() {
given:
def matchArtifact = Artifact
.builder()
.name("my-artifact")
.artifactAccount("embedded-artifact")
.type("embedded/base64")
.build()
def defaultArtifact = Artifact
.builder()
.name("default-artifact")
.artifactAccount("embedded-artifact")
.type("embedded/base64")
.reference("bmVtZXNpcwo=")
.build()
def expectedArtifact = ExpectedArtifact
.builder()
.matchArtifact(matchArtifact)
.defaultArtifact(defaultArtifact)
.useDefaultArtifact(true)
.build()

def pipeline = [
id : "01HE3GXEJX05143Y7JSGTRRB40",
trigger : [
type: "manual",
// not passing artifacts in trigger
],
expectedArtifacts: [expectedArtifact],
]
def artifactUtils = makeArtifactUtils()

when:
artifactUtils.resolveArtifacts(pipeline)
List<ExpectedArtifact> resolvedArtifacts = objectMapper.convertValue(
pipeline.trigger.resolvedExpectedArtifacts,
new TypeReference<List<ExpectedArtifact>>() {}
)

then:
pipeline.trigger.artifacts.size() == 1
pipeline.trigger.expectedArtifacts.size() == 1
pipeline.trigger.resolvedExpectedArtifacts.size() == 1
resolvedArtifacts*.getBoundArtifact() == [defaultArtifact]
}

def "resolve expected artifact using prior artifact"() {
given:
def artifactName = "my-artifact-name"
def priorArtifact = Artifact
.builder()
.name(artifactName)
.artifactAccount("embedded-artifact")
.type("embedded/base64")
.reference("b3NvcmlvCg==")
.build()

def pipelineId = "01HE3GXEJX05143Y7JSGTRRB41"
def priorExecution = pipeline {
id:
pipelineId
status:
ExecutionStatus.SUCCEEDED
stage {
refId = "1"
outputs.artifacts = [priorArtifact]
}
}

ExecutionRepository.ExecutionCriteria criteria = new ExecutionRepository.ExecutionCriteria();
criteria.setPageSize(1);
criteria.setSortType(ExecutionRepository.ExecutionComparator.START_TIME_OR_ID);

def executionRepositoryMock = Mock(ExecutionRepository) {
retrievePipelinesForPipelineConfigId(pipelineId, criteria) >> Observable.just(priorExecution)
}

def matchArtifact = Artifact
.builder()
.name(artifactName)
.artifactAccount("embedded-artifact")
.type("embedded/base64")
.build()
def expectedArtifact = ExpectedArtifact
.builder()
.matchArtifact(matchArtifact)
.usePriorArtifact(true)
.build()

def pipeline = [
id : pipelineId,
trigger : [
type: "manual",
// not passing artifacts in trigger
],
expectedArtifacts: [expectedArtifact],
]

def artifactUtils = makeArtifactUtilsWithStub(executionRepositoryMock)

when:
artifactUtils.resolveArtifacts(pipeline)
List<ExpectedArtifact> resolvedArtifacts = objectMapper.convertValue(
pipeline.trigger.resolvedExpectedArtifacts,
new TypeReference<List<ExpectedArtifact>>() {}
)

then:
pipeline.trigger.artifacts.size() == 1
pipeline.trigger.expectedArtifacts.size() == 1
pipeline.trigger.resolvedExpectedArtifacts.size() == 1
resolvedArtifacts*.getBoundArtifact() == [priorArtifact]
}

private List<Artifact> extractTriggerArtifacts(Map<String, Object> trigger) {
return objectMapper.convertValue(trigger.artifacts, new TypeReference<List<Artifact>>(){});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,15 +87,6 @@ class DependentPipelineStarter implements ApplicationContextAware {
it.expectedArtifactIds ?: []
}

// we are following a similar approach as triggers above
// expectedArtifacts can be used in triggers and stages
// for now we identified DeployManifestStage
// in ResolveDeploySourceManifestTask using ManifestEvaluator.getRequiredArtifacts
def requiredArtifactIds = pipelineConfig.get("stages", []).collectMany {
it.requiredArtifactIds ?: []
}
expectedArtifactIds.addAll(requiredArtifactIds)

pipelineConfig.trigger = [
type : "pipeline",
user : authenticationDetails?.user ?: user ?: "[anonymous]",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper
import com.netflix.spectator.api.NoopRegistry
import com.netflix.spinnaker.kork.artifacts.model.Artifact
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact
import com.netflix.spinnaker.kork.web.exceptions.InvalidRequestException
import com.netflix.spinnaker.orca.api.pipeline.models.PipelineExecution
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution
import com.netflix.spinnaker.orca.api.pipeline.ExecutionPreprocessor
Expand Down Expand Up @@ -577,41 +578,48 @@ class DependentPipelineStarterSpec extends Specification {
result.trigger.artifacts.findAll { it.name == "gcr.io/project/image" }.version.containsAll(["42", "1337"])
}

def "should find expected artifacts when pipeline has requiredArtifactIds and triggered by pipeline stage"() {
def "should fail pipeline when parent pipeline does not provide expected artifacts"() {
given:
def requiredArtifactId = "docker-artifact-id"
def expectedImage = Artifact.builder().type("docker/image").name("docker.io/org/image").build()
ArrayList<ExpectedArtifact> expectedArtifacts = [
ExpectedArtifact.builder().id(requiredArtifactId).matchArtifact(expectedImage).build()
]
def artifact = Artifact.builder().type("embedded/base64").name("baked-manifest").build()
def expectedArtifactId = "826018cd-e278-4493-a6a5-4b0a0166a843"
def expectedArtifact = ExpectedArtifact
.builder()
.id(expectedArtifactId)
.matchArtifact(artifact)
.build()

def parentPipeline = pipeline {
name = "my-parent-pipeline"
authentication = new PipelineExecution.AuthenticationDetails("username", "account1")
pipelineConfigId = "fe0b3537-3101-46a1-8e08-ab57cf65a207"
stage {
id = "my-stage-1"
refId = "1"
// not passing artifacts
}
}

def triggeredPipelineConfig = [
name : "triggered-by-stage",
id : "triggered-id",
stages : [
[
name : "Deploy (Manifest)",
type : "deployManifest",
requiredArtifactIds: [requiredArtifactId]
name: "My Stage",
type: "bakeManifest",
]
],
expectedArtifacts: [
expectedArtifact
],
triggers : [
[
type : "pipeline",
pipeline : parentPipeline.pipelineConfigId,
expectedArtifactIds: [expectedArtifactId]
]
],
expectedArtifacts: expectedArtifacts,
triggers : [],
]

Artifact testArtifact = Artifact.builder().type("docker/image").name("docker.io/org/image").version("alpine").build()

def parentPipeline = pipeline {
name = "parent-pipeline"
authentication = new PipelineExecution.AuthenticationDetails("username", "account1")
pipelineConfigId = "f837d603-bcc8-41c4-8ebc-bf0b23f59108"
stage {
id = "stage1"
refId = "1"
outputs = [artifacts: [testArtifact]]
}
}

def executionLauncher = Mock(ExecutionLauncher)
def applicationContext = new StaticApplicationContext()
applicationContext.beanFactory.registerSingleton("pipelineLauncher", executionLauncher)
Expand All @@ -625,31 +633,32 @@ class DependentPipelineStarterSpec extends Specification {
)

and:
executionLauncher.start(*_) >> { _, p ->
def error
executionLauncher.fail(_, _, _) >> { PIPELINE, processedPipeline, artifactError ->
error = artifactError
return pipeline {
name = p.name
id = p.name
trigger = mapper.convertValue(p.trigger, Trigger)
name = processedPipeline.name
id = processedPipeline.name
trigger = mapper.convertValue(processedPipeline.trigger, Trigger)
}
}
artifactUtils.getArtifactsForPipelineId(*_) >> {
return new ArrayList<Artifact>();
}

when:
def result = dependentPipelineStarter.trigger(
triggeredPipelineConfig,
null,
parentPipeline,
[:],
"stage1",
"my-stage-1",
buildAuthenticatedUser("username", [])
)

then:
result.trigger.artifacts.size() == 1
result.trigger.artifacts*.name.contains(testArtifact.name)
result.trigger.artifacts.findAll { it.name == "docker.io/org/image" }.version.containsAll(["alpine"])
1 * artifactUtils.resolveArtifacts(_)
error != null
error instanceof InvalidRequestException
error.message == "Unmatched expected artifact " + expectedArtifact + " could not be resolved."
result.trigger.artifacts.size() == 0
}

def "should resolve expressions in trigger"() {
Expand Down

0 comments on commit 4740c16

Please sign in to comment.