diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index e8945a005949..ffeb6ae017aa 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -38,6 +38,12 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d *Filebeat* +- Fix counter for number of events published in `httpjson` input. {pull}31993[31993] +- Fix handling of Checkpoint event for R81. {issue}32380[32380] {pull}32458[32458] +- Fix a hang on `apt-get update` stage in packaging. {pull}32580[32580] +- gcp-pubsub input: Restart Pub/Sub client on all errors. {issue}32550[32550] {pull}32712[32712] +- Fix not parsing as json when `json` and `ndjson` content types have charset information in `aws-s3` input {pull}32767[32767] +- Update `cloud.region` parsing in cloudtrail fileset. {pull}32763[32763] - Fix file.path field in cloudtrail fileset to use json.digestS3Object. {pull}32759[32759] *Heartbeat* diff --git a/x-pack/filebeat/input/awss3/input_integration_test.go b/x-pack/filebeat/input/awss3/input_integration_test.go index 32b61bfcd539..a171a2eca620 100644 --- a/x-pack/filebeat/input/awss3/input_integration_test.go +++ b/x-pack/filebeat/input/awss3/input_integration_test.go @@ -16,6 +16,11 @@ import ( "io/ioutil" "os" "path/filepath" +<<<<<<< HEAD +======= + "runtime" + "strings" +>>>>>>> b40349ce5a (allow for json/ndjson content type with charset (#32767)) "testing" "time" @@ -86,7 +91,6 @@ file_selectors: - regex: 'events-array.json$' expand_event_list_from_field: Events - content_type: application/json include_s3_metadata: - last-modified - x-amz-version-id @@ -95,7 +99,6 @@ file_selectors: - Content-Type - regex: '\.(?:nd)?json(\.gz)?$' - content_type: application/json - regex: 'multiline.txt$' parsers: @@ -115,7 +118,6 @@ file_selectors: - regex: 'events-array.json$' expand_event_list_from_field: Events - content_type: application/json include_s3_metadata: - last-modified - x-amz-version-id @@ -124,7 +126,6 @@ file_selectors: - Content-Type - regex: '\.(?:nd)?json(\.gz)?$' - content_type: application/json - regex: 'multiline.txt$' parsers: @@ -324,11 +325,26 @@ func uploadS3TestFiles(t *testing.T, region, bucket string, filenames ...string) t.Fatalf("Failed to open file %q, %v", filename, err) } + contentType := "" + if strings.HasSuffix(filename, "ndjson") || strings.HasSuffix(filename, "ndjson.gz") { + contentType = contentTypeNDJSON + "; charset=UTF-8" + } else if strings.HasSuffix(filename, "json") || strings.HasSuffix(filename, "json.gz") { + contentType = contentTypeJSON + "; charset=UTF-8" + } + // Upload the file to S3. +<<<<<<< HEAD result, err := uploader.Upload(&s3manager.UploadInput{ Bucket: aws.String(bucket), Key: aws.String(filepath.Base(filename)), Body: bytes.NewReader(data), +======= + result, err := uploader.Upload(context.Background(), &s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(filepath.Base(filename)), + Body: bytes.NewReader(data), + ContentType: aws.String(contentType), +>>>>>>> b40349ce5a (allow for json/ndjson content type with charset (#32767)) }) if err != nil { t.Fatalf("Failed to upload file %q: %v", filename, err) diff --git a/x-pack/filebeat/input/awss3/s3_objects.go b/x-pack/filebeat/input/awss3/s3_objects.go index 9626a84a4adc..ba34d8e516e8 100644 --- a/x-pack/filebeat/input/awss3/s3_objects.go +++ b/x-pack/filebeat/input/awss3/s3_objects.go @@ -151,7 +151,7 @@ func (p *s3ObjectProcessor) ProcessS3Object() error { // Process object content stream. switch { - case contentType == contentTypeJSON || contentType == contentTypeNDJSON: + case strings.HasPrefix(contentType, contentTypeJSON) || strings.HasPrefix(contentType, contentTypeNDJSON): err = p.readJSON(reader) default: err = p.readFile(reader)