-
Notifications
You must be signed in to change notification settings - Fork 28
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Azure Pipelines #160
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,129 @@ | ||
package models | ||
|
||
import ( | ||
"fmt" | ||
"gopkg.in/yaml.v3" | ||
) | ||
|
||
// https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/pipeline?view=azure-pipelines | ||
type AzurePipeline struct { | ||
Path string `json:"path" yaml:"-"` | ||
|
||
Stages []AzureStage `json:"stages"` | ||
Pr AzurePr `json:"pr"` | ||
} | ||
|
||
func (o AzurePipeline) IsValid() bool { | ||
return len(o.Stages) > 0 && len(o.Stages[0].Jobs) > 0 | ||
} | ||
|
||
// https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/stages-stage?view=azure-pipelines | ||
type AzureStage struct { | ||
Stage string `json:"stage"` | ||
Jobs []AzureJob `json:"jobs"` | ||
} | ||
|
||
// https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/jobs-job?view=azure-pipelines | ||
type AzureJob struct { | ||
Job string `json:"job"` | ||
Steps []AzureStep `json:"steps"` | ||
} | ||
|
||
// https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps?view=azure-pipelines | ||
type AzureStep struct { | ||
Task string `json:"task,omitempty"` | ||
Script string `json:"script,omitempty"` | ||
Powershell string `json:"powershell,omitempty"` | ||
Pwsh string `json:"pwsh,omitempty"` | ||
Bash string `json:"bash,omitempty"` | ||
Checkout string `json:"checkout,omitempty"` | ||
|
||
Lines map[string]int `json:"lines" yaml:"-"` | ||
} | ||
|
||
// https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/pr?view=azure-pipelines | ||
type AzurePr struct { | ||
Disabled bool `json:"disabled" yaml:"-"` | ||
|
||
Branches *AzureIncludeExclude `json:"branches"` | ||
Paths *AzureIncludeExclude `json:"paths"` | ||
Tags *AzureIncludeExclude `json:"tags"` | ||
Drafts bool `json:"drafts"` | ||
} | ||
|
||
type AzureIncludeExclude struct { | ||
Include StringList `json:"include"` | ||
Exclude StringList `json:"exclude"` | ||
} | ||
|
||
func (o *AzurePipeline) UnmarshalYAML(node *yaml.Node) error { | ||
type pipeline AzurePipeline | ||
var p pipeline | ||
if err := node.Decode(&p); err != nil { | ||
return err | ||
} | ||
|
||
if len(p.Stages) == 0 { | ||
stage := AzureStage{} | ||
if err := node.Decode(&stage); err != nil { | ||
return err | ||
} | ||
|
||
if len(stage.Jobs) == 0 { | ||
job := AzureJob{} | ||
if err := node.Decode(&job); err != nil { | ||
return err | ||
} | ||
|
||
stage.Jobs = append(stage.Jobs, job) | ||
} | ||
|
||
p.Stages = append(p.Stages, stage) | ||
} | ||
|
||
*o = AzurePipeline(p) | ||
return nil | ||
} | ||
|
||
func (o *AzurePr) UnmarshalYAML(node *yaml.Node) error { | ||
o.Drafts = true | ||
|
||
switch node.Kind { | ||
case yaml.ScalarNode: | ||
if node.Value == "none" { | ||
o.Disabled = true | ||
return nil | ||
} | ||
return fmt.Errorf("invalid scalar value for AzurePr: %s", node.Value) | ||
case yaml.SequenceNode: | ||
o.Branches = &AzureIncludeExclude{} | ||
return node.Decode(&o.Branches.Include) | ||
case yaml.MappingNode: | ||
type pr AzurePr | ||
return node.Decode((*pr)(o)) | ||
} | ||
|
||
return nil | ||
} | ||
|
||
func (o *AzureStep) UnmarshalYAML(node *yaml.Node) error { | ||
type step AzureStep | ||
var s step | ||
if err := node.Decode(&s); err != nil { | ||
return err | ||
} | ||
|
||
if node.Kind == yaml.MappingNode { | ||
s.Lines = map[string]int{"start": node.Line} | ||
for i := 0; i < len(node.Content); i += 2 { | ||
key := node.Content[i].Value | ||
switch key { | ||
case "task", "script", "powershell", "pwsh", "bash", "checkout": | ||
s.Lines[key] = node.Content[i+1].Line | ||
} | ||
} | ||
} | ||
|
||
*o = AzureStep(s) | ||
return nil | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,145 @@ | ||
package models | ||
|
||
import ( | ||
"github.com/stretchr/testify/assert" | ||
"gopkg.in/yaml.v3" | ||
"testing" | ||
) | ||
|
||
func TestAzurePipeline(t *testing.T) { | ||
lines := map[string]int{"bash": 1, "start": 1} | ||
cases := []struct { | ||
input string | ||
expected AzurePipeline | ||
error bool | ||
}{ | ||
{ | ||
input: `steps: [bash: asdf]`, | ||
expected: AzurePipeline{ | ||
Stages: []AzureStage{ | ||
{ | ||
Jobs: []AzureJob{ | ||
{ | ||
Steps: []AzureStep{ | ||
{ | ||
Bash: "asdf", | ||
Lines: lines, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
{ | ||
input: `stages: [{stage: build, jobs: [{job: test, steps: [bash: asdf]}]}]`, | ||
expected: AzurePipeline{ | ||
Stages: []AzureStage{ | ||
{ | ||
Stage: "build", | ||
Jobs: []AzureJob{ | ||
{ | ||
Job: "test", | ||
Steps: []AzureStep{ | ||
{ | ||
Bash: "asdf", | ||
Lines: lines, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
{ | ||
input: `jobs: [{job: test, steps: [bash: asdf]}]`, | ||
expected: AzurePipeline{ | ||
Stages: []AzureStage{ | ||
{ | ||
Jobs: []AzureJob{ | ||
{ | ||
Job: "test", | ||
Steps: []AzureStep{ | ||
{ | ||
Bash: "asdf", | ||
Lines: lines, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
} | ||
|
||
for i, c := range cases { | ||
var result AzurePipeline | ||
err := yaml.Unmarshal([]byte(c.input), &result) | ||
if c.error { | ||
assert.NotNil(t, err, i) | ||
} else { | ||
assert.Nil(t, err) | ||
assert.Equal(t, c.expected, result, i) | ||
} | ||
} | ||
} | ||
|
||
func TestAzurePr(t *testing.T) { | ||
cases := []struct { | ||
input string | ||
expected AzurePr | ||
error bool | ||
}{ | ||
{ | ||
input: `asdf`, | ||
error: true, | ||
}, | ||
{ | ||
input: `none`, | ||
expected: AzurePr{ | ||
Disabled: true, | ||
Drafts: true, | ||
}, | ||
}, | ||
{ | ||
input: `[main, dev]`, | ||
expected: AzurePr{ | ||
Branches: &AzureIncludeExclude{ | ||
Include: StringList{"main", "dev"}, | ||
}, | ||
Drafts: true, | ||
}, | ||
}, | ||
{ | ||
input: `{branches: {include: [main, dev]}}`, | ||
expected: AzurePr{ | ||
Branches: &AzureIncludeExclude{ | ||
Include: StringList{"main", "dev"}, | ||
}, | ||
Drafts: true, | ||
}, | ||
}, | ||
{ | ||
input: `{drafts: false, branches: {include: [main, dev]}}`, | ||
expected: AzurePr{ | ||
Branches: &AzureIncludeExclude{ | ||
Include: StringList{"main", "dev"}, | ||
}, | ||
}, | ||
}, | ||
} | ||
|
||
for i, c := range cases { | ||
var result AzurePr | ||
err := yaml.Unmarshal([]byte(c.input), &result) | ||
if c.error { | ||
assert.NotNil(t, err, i) | ||
} else { | ||
assert.Nil(t, err) | ||
assert.Equal(t, c.expected, result, i) | ||
} | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
package poutine.inventory | ||
|
||
import rego.v1 | ||
|
||
build_dependencies contains dep if { | ||
pkg := input.packages[_] | ||
pipeline := pkg.azure_pipelines[_] | ||
stage := pipeline.stages[_] | ||
job := stage.jobs[_] | ||
step := job.steps[_] | ||
|
||
not contains(step.task, "$") | ||
dep := sprintf("pkg:azurepipelinestask/%s", [step.task]) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -77,3 +77,27 @@ results contains poutine.finding(rule, pkg.purl, { | |
exprs := gl_injections(script) | ||
count(exprs) > 0 | ||
} | ||
|
||
# Azure Pipelines | ||
patterns.azure contains `\$\((Build\.(SourceBranchName|SourceBranch|SourceVersionMessage)|System\.PullRequest\.SourceBranch)\)` | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Practically speaking it looks like only That being said, the other may or may not be exploitable when the PR gets merged though... likely , I'll need to test, but good let's keep the regex like this |
||
|
||
azure_injections(str) = {expr | | ||
match := regex.find_n(patterns.azure[_], str, -1)[_] | ||
expr := regex.find_all_string_submatch_n(`\$\(([^\)]+)\)`, match, 1)[0][1] | ||
} | ||
|
||
results contains poutine.finding(rule, pkg.purl, { | ||
"path": pipeline.path, | ||
"job": job.job, | ||
"step": step_id, | ||
"line": step.lines[attr], | ||
"details": sprintf("Sources: %s", [concat(" ", exprs)]), | ||
}) if { | ||
some attr in {"script", "powershell", "pwsh", "bash"} | ||
pkg := input.packages[_] | ||
pipeline := pkg.azure_pipelines[_] | ||
job := pipeline.stages[_].jobs[_] | ||
step := job.steps[step_id] | ||
exprs := azure_injections(step[attr]) | ||
count(exprs) > 0 | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Great. I was wondering how you would handle this polymorphic thing.