Skip to content

Commit

Permalink
Move conflicting retention check to translator (#418)
Browse files Browse the repository at this point in the history
* Add error log before conflicting retention values for the same log group is found and stops the agent

* use fmt message for both logs

* move duplicate retention logic to translator

* fix test

* check for null during log group name conversion

* fix test case

* refactor checkDuplicateRetention to util folder

* restore old test case

* add test case to window events

* adjust comments and fix tests

* simplify function names

* add retention key
  • Loading branch information
aateeqi authored Apr 5, 2022
1 parent a23dae8 commit 4fd52bf
Showing 7 changed files with 306 additions and 166 deletions.
26 changes: 0 additions & 26 deletions plugins/inputs/logfile/logfile.go
Original file line number Diff line number Diff line change
@@ -149,8 +149,6 @@ func (t *LogFile) FindLogSrc() []logs.LogSrc {

t.cleanUpStoppedTailerSrc()

// If a log group has differing retentionInDays values defined in multiple places, stop the agent
t.checkForDuplicateRetentionSettings()
// Create a "tailer" for each file
for i := range t.FileConfig {
fileconfig := &t.FileConfig[i]
@@ -398,30 +396,6 @@ func (t *LogFile) cleanUpStoppedTailerSrc() {
}
}

// checkForDuplicateRetentionSettings: Checks if a log group has retention set differently in multiple places and stops the agent if found
func (t *LogFile) checkForDuplicateRetentionSettings() {
configMap := make(map[string]int)
for i := range t.FileConfig {
fileConfig := &t.FileConfig[i]
logGroup := strings.ToLower(fileConfig.LogGroupName)
// if retention is 0, -1 or less it's either invalid or default
if fileConfig.RetentionInDays < 1 {
continue
}
// if the configMap[logGroup] exists, retention has been set for the same logGroup somewhere
if configMap[logGroup] != 0 {
// different retentions has been set for the same log group, panic and stop the agent
if configMap[logGroup] != fileConfig.RetentionInDays {
panic(fmt.Sprintf("error: The Log Group has differing retentionInDays values defined in two or more places. Log Group Name: %v", fileConfig.LogGroupName))
}
// The same retention for a log group has been configured in multiple places. Unset it so that the retention api is only called once
fileConfig.RetentionInDays = -1
} else {
configMap[logGroup] = fileConfig.RetentionInDays
}
}
}

// Compressed file should be skipped.
// This func is to determine whether the file is compressed or not based on the file name suffix.
func isCompressedFile(filename string) bool {
134 changes: 0 additions & 134 deletions plugins/inputs/logfile/logfile_test.go
Original file line number Diff line number Diff line change
@@ -1080,137 +1080,3 @@ func TestGenerateLogGroupName(t *testing.T) {
logGroupName,
expectLogGroup))
}

func TestCheckForDuplicateRetentionSettingsPanics(t *testing.T) {
tt := NewLogFile()
logGroupName := "DuplicateLogGroupName"
tt.FileConfig = []FileConfig{{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 1,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 3,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 3,
},
}
assert.Panics(t, func() { tt.checkForDuplicateRetentionSettings() }, "Did not panic after finding duplicate log group")
}

func TestCheckForDuplicateRetentionSettingsWithDefaultRetention(t *testing.T) {
tt := NewLogFile()
logGroupName := "DuplicateLogGroupName"
tt.FileConfig = []FileConfig{{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: -1,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: -1,
},
}
assert.NotPanics(t, func() { tt.checkForDuplicateRetentionSettings() }, "Panicked when no duplicate retention settings exists")
}

func TestCheckForDuplicateRetentionWithDefaultAndNonDefaultValue(t *testing.T) {
tt := NewLogFile()
logGroupName := "DuplicateLogGroupName"
tt.FileConfig = []FileConfig{{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: -1,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 3,
},
}
assert.NotPanics(t, func() { tt.checkForDuplicateRetentionSettings() }, "Panicked when no duplicate retention settings exists")
}

func TestCheckForDuplicateRetentionSettingsDifferentLogGroups(t *testing.T) {
tt := NewLogFile()
tt.FileConfig = []FileConfig{{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: "logGroupName1",
RetentionInDays: 5,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: "logGroupName2",
RetentionInDays: 3,
},
}
assert.NotPanics(t, func() { tt.checkForDuplicateRetentionSettings() }, "Panicked when no duplicate retention settings exists")
}

func TestCheckDuplicateRetentionWithDefaultAndSetLogGroups(t *testing.T) {
tt := NewLogFile()
logGroupName := "DuplicateLogGroupName"
tt.FileConfig = []FileConfig{{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 3,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 5,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: -1,
},
}
assert.Panics(t, func() { tt.checkForDuplicateRetentionSettings() }, "Did not panic after finding duplicate log group")
}

func TestCheckForDuplicateRetentionSettingsWithDefault(t *testing.T) {
tt := NewLogFile()
logGroupName := "DuplicateLogGroupName"
tt.FileConfig = []FileConfig{{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 5,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 5,
},
{
FilePath: "SampleFilePath",
FromBeginning: true,
LogGroupName: logGroupName,
RetentionInDays: 5,
},
}
assert.NotPanics(t, func() { tt.checkForDuplicateRetentionSettings() }, "Panicked when no duplicate retention settings exists")
assert.Equal(t, tt.FileConfig[0].RetentionInDays, 5)
assert.Equal(t, tt.FileConfig[1].RetentionInDays, -1)
assert.Equal(t, tt.FileConfig[2].RetentionInDays, -1)
}
Original file line number Diff line number Diff line change
@@ -5,16 +5,16 @@ package collect_list

import (
"encoding/json"
"io/ioutil"
"path/filepath"
"sort"

"github.com/aws/amazon-cloudwatch-agent/translator"
"github.com/aws/amazon-cloudwatch-agent/translator/context"
"github.com/aws/amazon-cloudwatch-agent/translator/jsonconfig/mergeJsonRule"
"github.com/aws/amazon-cloudwatch-agent/translator/jsonconfig/mergeJsonUtil"
"github.com/aws/amazon-cloudwatch-agent/translator/translate/agent"
parent "github.com/aws/amazon-cloudwatch-agent/translator/translate/logs/logs_collected/files"
logUtil "github.com/aws/amazon-cloudwatch-agent/translator/translate/logs/util"
"io/ioutil"
"path/filepath"
"sort"
)

type Rule translator.Rule
@@ -63,7 +63,7 @@ func (f *FileConfig) ApplyRule(input interface{}) (returnKey string, returnVal i
}
res = append(res, result)
}

logUtil.ValidateLogRetentionSettings(res, GetCurPath())
outputLogConfig(res)
} else {
returnKey = ""
Original file line number Diff line number Diff line change
@@ -394,7 +394,7 @@ func TestEncoding_Invalid(t *testing.T) {
assert.Equal(t, expectVal, val)
assert.False(t, translator.IsTranslateSuccess())
assert.Equal(t, 1, len(translator.ErrorMessages))
assert.Equal(t, "Under path : /logs/logs_collected/files/collect_list/encoding | Error : Encoding xxx is an invalid value.", translator.ErrorMessages[0])
assert.Equal(t, "Under path : /logs/logs_collected/files/collect_list/encoding | Error : Encoding xxx is an invalid value.", translator.ErrorMessages[len(translator.ErrorMessages)-1])
}

func TestAutoRemoval(t *testing.T) {
@@ -611,3 +611,115 @@ func TestLogFilters(t *testing.T) {
}}
assert.Equal(t, expectVal, val)
}

func TestRetentionDifferentLogGroups(t *testing.T) {
f := new(FileConfig)
var input interface{}
e := json.Unmarshal([]byte(`{
"collect_list":[
{
"file_path":"path1",
"log_group_name":"test2",
"retention_in_days":3
},
{
"file_path":"path1",
"log_group_name":"test1",
"retention_in_days":3
}
]
}`), &input)
if e != nil {
assert.Fail(t, e.Error())
}
_, val := f.ApplyRule(input)
expectVal := []interface{}{map[string]interface{}{
"file_path": "path1",
"log_group_name": "test2",
"pipe": false,
"retention_in_days": 3,
"from_beginning": true,
}, map[string]interface{}{
"file_path": "path1",
"log_group_name": "test1",
"pipe": false,
"retention_in_days": 3,
"from_beginning": true,
}}
assert.Equal(t, expectVal, val)
}

func TestDuplicateRetention(t *testing.T) {
f := new(FileConfig)
var input interface{}
e := json.Unmarshal([]byte(`{
"collect_list":[
{
"file_path":"path1",
"log_group_name":"test1",
"retention_in_days":3
},
{
"file_path":"path1",
"log_group_name":"test1",
"retention_in_days":3
}
]
}`), &input)
if e != nil {
assert.Fail(t, e.Error())
}
_, val := f.ApplyRule(input)
expectVal := []interface{}{map[string]interface{}{
"file_path": "path1",
"log_group_name": "test1",
"pipe": false,
"retention_in_days": 3,
"from_beginning": true,
}, map[string]interface{}{
"file_path": "path1",
"log_group_name": "test1",
"pipe": false,
"retention_in_days": -1,
"from_beginning": true,
}}
assert.Equal(t, expectVal, val)
}

func TestConflictingRetention(t *testing.T) {
f := new(FileConfig)
var input interface{}
e := json.Unmarshal([]byte(`{
"collect_list":[
{
"file_path":"path1",
"log_group_name":"test1",
"retention_in_days":3
},
{
"file_path":"path1",
"log_group_name":"test1",
"retention_in_days":5
}
]
}`), &input)
if e != nil {
assert.Fail(t, e.Error())
}
_, val := f.ApplyRule(input)
expectVal := []interface{}{map[string]interface{}{
"file_path": "path1",
"log_group_name": "test1",
"pipe": false,
"retention_in_days": 3,
"from_beginning": true,
}, map[string]interface{}{
"file_path": "path1",
"log_group_name": "test1",
"pipe": false,
"retention_in_days": -1,
"from_beginning": true,
}}
assert.Equal(t, "Under path : /logs/logs_collected/files/collect_list/ | Error : Different retention_in_days values can't be set for the same log group: test1", translator.ErrorMessages[len(translator.ErrorMessages)-1])
assert.Equal(t, expectVal, val)
}
Original file line number Diff line number Diff line change
@@ -10,7 +10,9 @@ import (
"github.com/aws/amazon-cloudwatch-agent/translator/jsonconfig/mergeJsonRule"
"github.com/aws/amazon-cloudwatch-agent/translator/jsonconfig/mergeJsonUtil"
parent "github.com/aws/amazon-cloudwatch-agent/translator/translate/logs/logs_collected/windows_events"
logUtil "github.com/aws/amazon-cloudwatch-agent/translator/translate/logs/util"
"github.com/aws/amazon-cloudwatch-agent/translator/util"

)

type Rule translator.Rule
@@ -57,6 +59,7 @@ func (c *CollectList) ApplyRule(input interface{}) (returnKey string, returnVal
result = append(result, singleTransformedConfig)
}
}
logUtil.ValidateLogRetentionSettings(result, GetCurPath())
return EventConfigTomlKey, result
}

Loading

0 comments on commit 4fd52bf

Please sign in to comment.