Skip to content
This repository was archived by the owner on Oct 2, 2025. It is now read-only.

Commit 91de7c3

Browse files
fix(artifacts): Automated triggers with artifact constraints are broken if you have 2 or more of the same type (backport #4579) (#4587)
* fix(artifacts): Automated triggers with artifact constraints are broken if you have 2 or more of the same type (#4579) * refactor(artifacts): partially reverting #4397 * refactor(artifacts): reverted #4489 * refactor(artifacts): reverted #4526 * test(artifacts): added test when parent pipeline does not provide expected artifacts * test(artifacts): resolve artifacts for default and prior artifacts --------- Co-authored-by: Cameron Motevasselani <[email protected]> (cherry picked from commit 645059d) # Conflicts: # orca-core/src/test/groovy/com/netflix/spinnaker/orca/pipeline/util/ArtifactUtilsSpec.groovy * fix(artifacts): resolving git conflicts from #4579 for release-1.31.x (#4590) --------- Co-authored-by: Nemesis Osorio <[email protected]>
1 parent 6ad5cd0 commit 91de7c3

File tree

4 files changed

+159
-94
lines changed

4 files changed

+159
-94
lines changed

orca-core/src/main/java/com/netflix/spinnaker/orca/pipeline/util/ArtifactUtils.java

Lines changed: 1 addition & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
import com.netflix.spinnaker.kork.annotations.NonnullByDefault;
2929
import com.netflix.spinnaker.kork.artifacts.model.Artifact;
3030
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact;
31-
import com.netflix.spinnaker.kork.web.exceptions.InvalidRequestException;
3231
import com.netflix.spinnaker.orca.api.pipeline.models.PipelineExecution;
3332
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution;
3433
import com.netflix.spinnaker.orca.pipeline.model.StageContext;
@@ -49,7 +48,6 @@
4948
import javax.annotation.CheckReturnValue;
5049
import javax.annotation.Nonnull;
5150
import javax.annotation.Nullable;
52-
import org.apache.commons.lang3.ObjectUtils;
5351
import org.apache.commons.lang3.StringUtils;
5452
import org.slf4j.Logger;
5553
import org.slf4j.LoggerFactory;
@@ -141,31 +139,7 @@ private List<Artifact> getAllArtifacts(
141139
contextParameterProcessor.process(
142140
boundArtifactMap, contextParameterProcessor.buildExecutionContext(stage), true);
143141

144-
Artifact evaluatedArtifact =
145-
objectMapper.convertValue(evaluatedBoundArtifactMap, Artifact.class);
146-
return getBoundInlineArtifact(evaluatedArtifact, stage.getExecution())
147-
.orElse(evaluatedArtifact);
148-
}
149-
150-
private Optional<Artifact> getBoundInlineArtifact(
151-
@Nullable Artifact artifact, PipelineExecution execution) {
152-
if (ObjectUtils.anyNull(
153-
artifact, execution.getTrigger(), execution.getTrigger().getArtifacts())) {
154-
return Optional.empty();
155-
}
156-
try {
157-
ExpectedArtifact expectedArtifact =
158-
ExpectedArtifact.builder().matchArtifact(artifact).build();
159-
return ArtifactResolver.getInstance(execution.getTrigger().getArtifacts(), true)
160-
.resolveExpectedArtifacts(List.of(expectedArtifact))
161-
.getResolvedExpectedArtifacts()
162-
.stream()
163-
.findFirst()
164-
.flatMap(this::getBoundArtifact);
165-
} catch (InvalidRequestException e) {
166-
log.debug("Could not match inline artifact with trigger bound artifacts", e);
167-
return Optional.empty();
168-
}
142+
return objectMapper.convertValue(evaluatedBoundArtifactMap, Artifact.class);
169143
}
170144

171145
public @Nullable Artifact getBoundArtifactForId(StageExecution stage, @Nullable String id) {

orca-core/src/test/groovy/com/netflix/spinnaker/orca/pipeline/util/ArtifactUtilsSpec.groovy

Lines changed: 114 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -81,29 +81,6 @@ class ArtifactUtilsSpec extends Specification {
8181
artifact.name == 'build/libs/my-jar-100.jar'
8282
}
8383

84-
def "should bind stage-inlined artifacts to trigger artifacts"() {
85-
setup:
86-
def execution = pipeline {
87-
stage {
88-
name = "upstream stage"
89-
type = "stage1"
90-
refId = "1"
91-
}
92-
}
93-
94-
execution.trigger = new DefaultTrigger('manual')
95-
execution.trigger.artifacts.add(Artifact.builder().type('http/file').name('build/libs/my-jar-100.jar').build())
96-
97-
when:
98-
def artifact = makeArtifactUtils().getBoundArtifactForStage(execution.stages[0], null, Artifact.builder()
99-
.type('http/file')
100-
.name('build/libs/my-jar-\\d+.jar')
101-
.build())
102-
103-
then:
104-
artifact.name == 'build/libs/my-jar-100.jar'
105-
}
106-
10784
def "should find upstream artifacts in small pipeline"() {
10885
when:
10986
def desired = execution.getStages().find { it.name == "desired" }
@@ -515,6 +492,120 @@ class ArtifactUtilsSpec extends Specification {
515492
initialArtifacts == finalArtifacts
516493
}
517494

495+
def "resolve expected artifact using default artifact"() {
496+
given:
497+
def matchArtifact = Artifact
498+
.builder()
499+
.name("my-artifact")
500+
.artifactAccount("embedded-artifact")
501+
.type("embedded/base64")
502+
.build()
503+
def defaultArtifact = Artifact
504+
.builder()
505+
.name("default-artifact")
506+
.artifactAccount("embedded-artifact")
507+
.type("embedded/base64")
508+
.reference("bmVtZXNpcwo=")
509+
.build()
510+
def expectedArtifact = ExpectedArtifact
511+
.builder()
512+
.matchArtifact(matchArtifact)
513+
.defaultArtifact(defaultArtifact)
514+
.useDefaultArtifact(true)
515+
.build()
516+
517+
def pipeline = [
518+
id : "01HE3GXEJX05143Y7JSGTRRB40",
519+
trigger : [
520+
type: "manual",
521+
// not passing artifacts in trigger
522+
],
523+
expectedArtifacts: [expectedArtifact],
524+
]
525+
def artifactUtils = makeArtifactUtils()
526+
527+
when:
528+
artifactUtils.resolveArtifacts(pipeline)
529+
List<ExpectedArtifact> resolvedArtifacts = objectMapper.convertValue(
530+
pipeline.trigger.resolvedExpectedArtifacts,
531+
new TypeReference<List<ExpectedArtifact>>() {}
532+
)
533+
534+
then:
535+
pipeline.trigger.artifacts.size() == 1
536+
pipeline.trigger.expectedArtifacts.size() == 1
537+
pipeline.trigger.resolvedExpectedArtifacts.size() == 1
538+
resolvedArtifacts*.getBoundArtifact() == [defaultArtifact]
539+
}
540+
541+
def "resolve expected artifact using prior artifact"() {
542+
given:
543+
def artifactName = "my-artifact-name"
544+
def priorArtifact = Artifact
545+
.builder()
546+
.name(artifactName)
547+
.artifactAccount("embedded-artifact")
548+
.type("embedded/base64")
549+
.reference("b3NvcmlvCg==")
550+
.build()
551+
552+
def pipelineId = "01HE3GXEJX05143Y7JSGTRRB41"
553+
def priorExecution = pipeline {
554+
id:
555+
pipelineId
556+
status:
557+
ExecutionStatus.SUCCEEDED
558+
stage {
559+
refId = "1"
560+
outputs.artifacts = [priorArtifact]
561+
}
562+
}
563+
564+
ExecutionRepository.ExecutionCriteria criteria = new ExecutionRepository.ExecutionCriteria();
565+
criteria.setPageSize(1);
566+
criteria.setSortType(ExecutionRepository.ExecutionComparator.START_TIME_OR_ID);
567+
568+
def executionRepositoryMock = Mock(ExecutionRepository) {
569+
retrievePipelinesForPipelineConfigId(pipelineId, criteria) >> Observable.just(priorExecution)
570+
}
571+
572+
def matchArtifact = Artifact
573+
.builder()
574+
.name(artifactName)
575+
.artifactAccount("embedded-artifact")
576+
.type("embedded/base64")
577+
.build()
578+
def expectedArtifact = ExpectedArtifact
579+
.builder()
580+
.matchArtifact(matchArtifact)
581+
.usePriorArtifact(true)
582+
.build()
583+
584+
def pipeline = [
585+
id : pipelineId,
586+
trigger : [
587+
type: "manual",
588+
// not passing artifacts in trigger
589+
],
590+
expectedArtifacts: [expectedArtifact],
591+
]
592+
593+
def artifactUtils = makeArtifactUtilsWithStub(executionRepositoryMock)
594+
595+
when:
596+
artifactUtils.resolveArtifacts(pipeline)
597+
List<ExpectedArtifact> resolvedArtifacts = objectMapper.convertValue(
598+
pipeline.trigger.resolvedExpectedArtifacts,
599+
new TypeReference<List<ExpectedArtifact>>() {}
600+
)
601+
602+
then:
603+
pipeline.trigger.artifacts.size() == 1
604+
pipeline.trigger.expectedArtifacts.size() == 1
605+
pipeline.trigger.resolvedExpectedArtifacts.size() == 1
606+
resolvedArtifacts*.getBoundArtifact() == [priorArtifact]
607+
}
608+
518609
private List<Artifact> extractTriggerArtifacts(Map<String, Object> trigger) {
519610
return objectMapper.convertValue(trigger.artifacts, new TypeReference<List<Artifact>>(){});
520611
}

orca-front50/src/main/groovy/com/netflix/spinnaker/orca/front50/DependentPipelineStarter.groovy

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -87,15 +87,6 @@ class DependentPipelineStarter implements ApplicationContextAware {
8787
it.expectedArtifactIds ?: []
8888
}
8989

90-
// we are following a similar approach as triggers above
91-
// expectedArtifacts can be used in triggers and stages
92-
// for now we identified DeployManifestStage
93-
// in ResolveDeploySourceManifestTask using ManifestEvaluator.getRequiredArtifacts
94-
def requiredArtifactIds = pipelineConfig.get("stages", []).collectMany {
95-
it.requiredArtifactIds ?: []
96-
}
97-
expectedArtifactIds.addAll(requiredArtifactIds)
98-
9990
pipelineConfig.trigger = [
10091
type : "pipeline",
10192
user : authenticationDetails?.user ?: user ?: "[anonymous]",

orca-front50/src/test/groovy/com/netflix/spinnaker/orca/front50/DependentPipelineStarterSpec.groovy

Lines changed: 44 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper
2121
import com.netflix.spectator.api.NoopRegistry
2222
import com.netflix.spinnaker.kork.artifacts.model.Artifact
2323
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact
24+
import com.netflix.spinnaker.kork.web.exceptions.InvalidRequestException
2425
import com.netflix.spinnaker.orca.api.pipeline.models.PipelineExecution
2526
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution
2627
import com.netflix.spinnaker.orca.api.pipeline.ExecutionPreprocessor
@@ -577,41 +578,48 @@ class DependentPipelineStarterSpec extends Specification {
577578
result.trigger.artifacts.findAll { it.name == "gcr.io/project/image" }.version.containsAll(["42", "1337"])
578579
}
579580

580-
def "should find expected artifacts when pipeline has requiredArtifactIds and triggered by pipeline stage"() {
581+
def "should fail pipeline when parent pipeline does not provide expected artifacts"() {
581582
given:
582-
def requiredArtifactId = "docker-artifact-id"
583-
def expectedImage = Artifact.builder().type("docker/image").name("docker.io/org/image").build()
584-
ArrayList<ExpectedArtifact> expectedArtifacts = [
585-
ExpectedArtifact.builder().id(requiredArtifactId).matchArtifact(expectedImage).build()
586-
]
583+
def artifact = Artifact.builder().type("embedded/base64").name("baked-manifest").build()
584+
def expectedArtifactId = "826018cd-e278-4493-a6a5-4b0a0166a843"
585+
def expectedArtifact = ExpectedArtifact
586+
.builder()
587+
.id(expectedArtifactId)
588+
.matchArtifact(artifact)
589+
.build()
590+
591+
def parentPipeline = pipeline {
592+
name = "my-parent-pipeline"
593+
authentication = new PipelineExecution.AuthenticationDetails("username", "account1")
594+
pipelineConfigId = "fe0b3537-3101-46a1-8e08-ab57cf65a207"
595+
stage {
596+
id = "my-stage-1"
597+
refId = "1"
598+
// not passing artifacts
599+
}
600+
}
587601

588602
def triggeredPipelineConfig = [
589603
name : "triggered-by-stage",
590604
id : "triggered-id",
591605
stages : [
592606
[
593-
name : "Deploy (Manifest)",
594-
type : "deployManifest",
595-
requiredArtifactIds: [requiredArtifactId]
607+
name: "My Stage",
608+
type: "bakeManifest",
609+
]
610+
],
611+
expectedArtifacts: [
612+
expectedArtifact
613+
],
614+
triggers : [
615+
[
616+
type : "pipeline",
617+
pipeline : parentPipeline.pipelineConfigId,
618+
expectedArtifactIds: [expectedArtifactId]
596619
]
597620
],
598-
expectedArtifacts: expectedArtifacts,
599-
triggers : [],
600621
]
601622

602-
Artifact testArtifact = Artifact.builder().type("docker/image").name("docker.io/org/image").version("alpine").build()
603-
604-
def parentPipeline = pipeline {
605-
name = "parent-pipeline"
606-
authentication = new PipelineExecution.AuthenticationDetails("username", "account1")
607-
pipelineConfigId = "f837d603-bcc8-41c4-8ebc-bf0b23f59108"
608-
stage {
609-
id = "stage1"
610-
refId = "1"
611-
outputs = [artifacts: [testArtifact]]
612-
}
613-
}
614-
615623
def executionLauncher = Mock(ExecutionLauncher)
616624
def applicationContext = new StaticApplicationContext()
617625
applicationContext.beanFactory.registerSingleton("pipelineLauncher", executionLauncher)
@@ -625,31 +633,32 @@ class DependentPipelineStarterSpec extends Specification {
625633
)
626634

627635
and:
628-
executionLauncher.start(*_) >> { _, p ->
636+
def error
637+
executionLauncher.fail(_, _, _) >> { PIPELINE, processedPipeline, artifactError ->
638+
error = artifactError
629639
return pipeline {
630-
name = p.name
631-
id = p.name
632-
trigger = mapper.convertValue(p.trigger, Trigger)
640+
name = processedPipeline.name
641+
id = processedPipeline.name
642+
trigger = mapper.convertValue(processedPipeline.trigger, Trigger)
633643
}
634644
}
635-
artifactUtils.getArtifactsForPipelineId(*_) >> {
636-
return new ArrayList<Artifact>();
637-
}
638645

639646
when:
640647
def result = dependentPipelineStarter.trigger(
641648
triggeredPipelineConfig,
642649
null,
643650
parentPipeline,
644651
[:],
645-
"stage1",
652+
"my-stage-1",
646653
buildAuthenticatedUser("username", [])
647654
)
648655

649656
then:
650-
result.trigger.artifacts.size() == 1
651-
result.trigger.artifacts*.name.contains(testArtifact.name)
652-
result.trigger.artifacts.findAll { it.name == "docker.io/org/image" }.version.containsAll(["alpine"])
657+
1 * artifactUtils.resolveArtifacts(_)
658+
error != null
659+
error instanceof InvalidRequestException
660+
error.message == "Unmatched expected artifact " + expectedArtifact + " could not be resolved."
661+
result.trigger.artifacts.size() == 0
653662
}
654663

655664
def "should resolve expressions in trigger"() {

0 commit comments

Comments
 (0)