Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(dependencies): Upgrade Spring Boot to 2.2.1 #3333

Merged
merged 9 commits into from
Dec 17, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
fiatVersion=1.9.2
enablePublishing=false
spinnakerGradleVersion=7.0.1
korkVersion=6.22.2
korkVersion=7.2.1
org.gradle.parallel=true
keikoVersion=2.14.2
keikoVersion=3.0.0
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package com.netflix.spinnaker.orca.pipeline.model

import com.fasterxml.jackson.annotation.JsonAnyGetter
import com.fasterxml.jackson.annotation.JsonAnySetter
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.netflix.spinnaker.kork.artifacts.model.Artifact
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact
Expand All @@ -31,8 +32,11 @@ interface Trigger {
val parameters: Map<String, Any>
val artifacts: List<Artifact>
val notifications: List<Map<String, Any>>
@get:JsonProperty("rebake")
var isRebake: Boolean
@get:JsonProperty("dryRun")
var isDryRun: Boolean
@get:JsonProperty("strategy")
var isStrategy: Boolean
var resolvedExpectedArtifacts: List<ExpectedArtifact>
@set:JsonAnySetter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import static java.util.stream.Collectors.toList;
import static org.apache.commons.lang3.StringUtils.isEmpty;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.kork.artifacts.model.Artifact;
Expand Down Expand Up @@ -228,14 +229,14 @@ public ArtifactResolver(

public void resolveArtifacts(@Nonnull Map pipeline) {
Map<String, Object> trigger = (Map<String, Object>) pipeline.get("trigger");

List<?> originalExpectedArtifacts =
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

could you describe what problem you ran into here and why all the extra code was necessary to work around it - not sure I totally understand the justification just from looking at it

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was also committed in its own commit to help document the change: 688e1f9 It should explain the problem at the method level.
Feel free to read these issues/PRs to better grasp the overall context: https://docs.google.com/spreadsheets/d/112ul54xZGcN5pgKfd4hJlcstDGue-0LWghyNQ8lqBEc
A better fix would require a lot more time/effort, although probably not too much for someone with deep knowlege of the codebase.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts")).orElse(emptyList());

List<ExpectedArtifact> expectedArtifacts =
Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts"))
.map(
list ->
list.stream()
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.collect(toList()))
.orElse(emptyList());
originalExpectedArtifacts.stream()
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.collect(toList());

List<Artifact> receivedArtifactsFromPipeline =
Optional.ofNullable((List<?>) pipeline.get("receivedArtifacts"))
Expand Down Expand Up @@ -288,12 +289,41 @@ public void resolveArtifacts(@Nonnull Map pipeline) {
objectMapper.readValue(
objectMapper.writeValueAsString(expectedArtifacts),
List.class)); // Add the actual expectedArtifacts we included in the ids.

updateExpectedArtifacts(originalExpectedArtifacts, expectedArtifacts);
} catch (IOException e) {
throw new ArtifactResolutionException(
"Failed to store artifacts in trigger: " + e.getMessage(), e);
}
}

private void updateExpectedArtifacts(
List<?> originalExpectedArtifacts, List<ExpectedArtifact> updatedExpectedArtifacts)
throws JsonProcessingException {

for (Object artifact : originalExpectedArtifacts) {
if (artifact instanceof ExpectedArtifact) {
ExpectedArtifact ea = (ExpectedArtifact) artifact;
ea.setBoundArtifact(
findExpectedArtifactById(updatedExpectedArtifacts, ea.getId()).getBoundArtifact());
} else {
Map<String, Object> ea = (Map<String, Object>) artifact;
ea.put(
"boundArtifact",
objectMapper.readValue(
objectMapper.writeValueAsString(
findExpectedArtifactById(updatedExpectedArtifacts, (String) ea.get("id"))
.getBoundArtifact()),
Map.class));
}
}
}

private ExpectedArtifact findExpectedArtifactById(
List<ExpectedArtifact> expectedArtifacts, String id) {
return expectedArtifacts.stream().filter(it -> id.equals(it.getId())).findFirst().get();
}

private List<Artifact> getPriorArtifacts(final Map<String, Object> pipeline) {
// set pageSize to a single record to avoid hydrating all of the stored Executions for
// the pipeline, since getArtifactsForPipelineId only uses the most recent Execution from the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -462,10 +462,10 @@ class ArtifactResolverSpec extends Specification {
bound.findAll({ a -> expectedBound.contains(a) }).size() == bound.size()
}

def "resolveArtifacts sets the bound artifact on an expected artifact"() {
def "resolveArtifacts sets the bound artifact on an expected artifact when the expectedArtifact is ExpectedArtifact"() {
given:
def matchArtifact = Artifact.builder().type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def receivedArtifact = Artifact.builder().name("my-artifact").type("docker/image").build()
def pipeline = [
id: "abc",
Expand All @@ -483,10 +483,31 @@ class ArtifactResolverSpec extends Specification {
pipeline.expectedArtifacts[0].boundArtifact == receivedArtifact
}

def "resolveArtifacts sets the bound artifact on an expected artifact when the expectedArtifact is Map<String, Object>"() {
given:
def matchArtifact = Artifact.builder().type("docker/.*").build()
def expectedArtifact = toMap(ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build())
def receivedArtifact = toMap(Artifact.builder().name("my-artifact").type("docker/image").build())
def pipeline = [
id: "abc",
trigger: [:],
expectedArtifacts: [expectedArtifact],
receivedArtifacts: [receivedArtifact],
]
def artifactResolver = makeArtifactResolver()

when:
artifactResolver.resolveArtifacts(pipeline)

then:
pipeline.expectedArtifacts.size() == 1
pipeline.expectedArtifacts[0].boundArtifact == receivedArtifact
}

def "resolveArtifacts adds received artifacts to the trigger, skipping duplicates"() {
given:
def matchArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def receivedArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/image").build()
def triggerArtifact = Artifact.builder().name("my-trigger-artifact").type("docker/image").build()
def bothArtifact = Artifact.builder().name("my-both-artifact").type("docker/image").build()
Expand All @@ -512,7 +533,7 @@ class ArtifactResolverSpec extends Specification {
def "resolveArtifacts is idempotent"() {
given:
def matchArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def receivedArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/image").build()
def triggerArtifact = Artifact.builder().name("my-trigger-artifact").type("docker/image").build()
def bothArtifact = Artifact.builder().name("my-both-artifact").type("docker/image").build()
Expand All @@ -539,4 +560,8 @@ class ArtifactResolverSpec extends Specification {
private List<Artifact> extractTriggerArtifacts(Map<String, Object> trigger) {
return objectMapper.convertValue(trigger.artifacts, new TypeReference<List<Artifact>>(){});
}

private Map<String, Object> toMap(Object value) {
return objectMapper.convertValue(value, Map.class)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ class ContextParameterProcessorSpec extends Specification {
result.test == source.test
summary[escapedExpression].size() == 1
summary[escapedExpression][0].level as String == ExpressionEvaluationSummary.Result.Level.ERROR.name()
summary[escapedExpression][0].exceptionType == SpelEvaluationException
summary[escapedExpression][0].exceptionType == SpelEvaluationException.typeName

where:
testCase | desc
Expand All @@ -184,7 +184,7 @@ class ContextParameterProcessorSpec extends Specification {
result.test == source.test
summary[escapedExpression].size() == 1
summary[escapedExpression][0].level as String == ExpressionEvaluationSummary.Result.Level.ERROR.name()
summary[escapedExpression][0].exceptionType == SpelEvaluationException
summary[escapedExpression][0].exceptionType == SpelEvaluationException.typeName

where:
testCase | desc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down Expand Up @@ -234,6 +235,7 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down Expand Up @@ -303,6 +305,7 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact;
import com.netflix.spinnaker.orca.front50.PipelineModelMutator;
import com.netflix.spinnaker.orca.pipelinetemplate.exceptions.TemplateLoaderException;
import com.netflix.spinnaker.orca.pipelinetemplate.loader.TemplateLoader;
Expand All @@ -29,6 +28,7 @@
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.RenderContext;
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.RenderUtil;
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.Renderer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -128,7 +128,6 @@ private void applyConfigurationsFromTemplate(
}
}

@SuppressWarnings("unchecked")
private void applyConfigurations(
PipelineConfiguration configuration, Map<String, Object> pipeline) {
if (configuration.getConcurrentExecutions() != null) {
Expand Down Expand Up @@ -165,7 +164,7 @@ private void applyConfigurations(
TemplateMerge.mergeDistinct(
pipelineTemplateObjectMapper.convertValue(
pipeline.get("expectedArtifacts"),
new TypeReference<List<ExpectedArtifact>>() {}),
new TypeReference<List<HashMap<String, Object>>>() {}),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what was the reason for removing the ExpectedArtifact here?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've committed this change in its own commit within the PR so it'd be documented: 27ed129
The test added in the commit would fail without this code change.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks!

configuration.getExpectedArtifacts()));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,7 @@ public List<V2StageDefinition> getStages() {
return Collections.emptyList();
}
ObjectMapper oj = new ObjectMapper();
TypeReference v2StageDefTypeRef = new TypeReference<List<V2StageDefinition>>() {};
return oj.convertValue(pipelineStages, v2StageDefTypeRef);
return oj.convertValue(pipelineStages, new TypeReference<List<V2StageDefinition>>() {});
}

public void setStages(List<V2StageDefinition> stages) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,44 @@ class TemplatedPipelineModelMutatorSpec extends Specification {
0 * subject.applyConfigurationsFromTemplate(_, _, pipeline)
}

def "should merge expectedArtifacts when configured to inherit them"() {
given:
def pipeline = [
config: [
schema: '1',
pipeline: [
template: [
source: 'static-template'
]
],
configuration: [
inherit: ['expectedArtifacts'],
expectedArtifacts: [
[
id: 'artifact1'
] as NamedHashMap
]
]
]
]

when:
subject.mutate(pipeline)

then:
1 * templateLoader.load(_) >> { [new PipelineTemplate(
schema: '1',
configuration: new Configuration(
expectedArtifacts: [
[
id: 'artifact2'
] as NamedHashMap
]
)
)]}
pipeline.expectedArtifacts.size() == 2
}

def "should apply configurations from template if template is statically sourced"() {
given:
def pipeline = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.Primary
import redis.clients.jedis.Jedis
import redis.clients.jedis.JedisCluster
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool
import java.time.Clock
import java.util.Optional

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import redis.clients.jedis.HostAndPort
import redis.clients.jedis.Jedis
import redis.clients.jedis.JedisCluster
import redis.clients.jedis.Protocol
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool
import java.net.URI
import java.time.Clock
import java.time.Duration
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import com.fasterxml.jackson.module.kotlin.readValue
import com.netflix.spinnaker.orca.q.pending.PendingExecutionService
import com.netflix.spinnaker.q.Message
import redis.clients.jedis.Jedis
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool

class RedisPendingExecutionService(
private val pool: Pool<Jedis>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.test.context.junit4.SpringRunner
import redis.clients.jedis.Jedis
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool

@Configuration
class RedisTestConfig {
Expand Down
3 changes: 0 additions & 3 deletions orca-redis/orca-redis.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@ apply from: "$rootDir/gradle/kotlin.gradle"
apply from: "$rootDir/gradle/spock.gradle"

dependencies {
api("redis.clients:jedis:2.10.2") {
force = true
}
api("com.netflix.spinnaker.kork:kork-jedis")

implementation(project(":orca-core"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import java.net.URI;
import redis.clients.jedis.Protocol;
import redis.clients.util.JedisURIHelper;
import redis.clients.jedis.util.JedisURIHelper;

public class RedisConnectionInfo {
public boolean hasPassword() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@

import org.jetbrains.annotations.NotNull;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.params.SetParams;

public class RedisClusterNotificationClusterLock implements NotificationClusterLock {

private final JedisCluster cluster;

public RedisClusterNotificationClusterLock(JedisCluster cluster) {
Expand All @@ -13,6 +15,10 @@ public RedisClusterNotificationClusterLock(JedisCluster cluster) {
@Override
public boolean tryAcquireLock(@NotNull String notificationType, long lockTimeoutSeconds) {
String key = "lock:" + notificationType;
return "OK".equals(cluster.set(key, "\uD83D\uDD12", "NX", "EX", lockTimeoutSeconds));
// assuming lockTimeoutSeconds will be < 2147483647
return "OK"
.equals(
cluster.set(
key, "\uD83D\uDD12", SetParams.setParams().nx().ex((int) lockTimeoutSeconds)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import com.netflix.spinnaker.kork.jedis.RedisClientDelegate;
import com.netflix.spinnaker.kork.jedis.RedisClientSelector;
import javax.annotation.Nonnull;
import redis.clients.jedis.params.SetParams;

public class RedisNotificationClusterLock implements NotificationClusterLock {

Expand All @@ -32,7 +33,14 @@ public boolean tryAcquireLock(@Nonnull String notificationType, long lockTimeout
String key = "lock:" + notificationType;
return redisClientDelegate.withCommandsClient(
client -> {
return "OK".equals(client.set(key, "\uD83D\uDD12", "NX", "EX", lockTimeoutSeconds));
return "OK"
.equals(
client
// assuming lockTimeoutSeconds will be < 2147483647
.set(
key,
"\uD83D\uDD12",
SetParams.setParams().nx().ex((int) lockTimeoutSeconds)));
});
}
}
Loading