Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge remote-tracking branch 'elastic/master' into receive-grpc-config
Browse files Browse the repository at this point in the history
jalvz committed Dec 4, 2020
2 parents 3673da9 + 945e483 commit 38f43fc
Showing 16 changed files with 524 additions and 82 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
{
"description": "Default enrichment for APM events",
"processors": [
{
"pipeline": {
"name": "metrics-apm.internal-0.1.0-apm_user_agent"
}
},
{
"pipeline": {
"name": "metrics-apm.internal-0.1.0-apm_user_geo"
}
},
{
"pipeline": {
"name": "metrics-apm.internal-0.1.0-apm_ingest_timestamp"
}
},
{
"pipeline": {
"name": "metrics-apm.internal-0.1.0-apm_remove_span_metadata"
}
}
]
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
title: APM internal metrics
type: metrics
dataset: apm.internal
ingest_pipeline: apm
Original file line number Diff line number Diff line change
@@ -3,22 +3,22 @@
"processors": [
{
"pipeline": {
"name": "apm_user_agent"
"name": "logs-apm.error-0.1.0-apm_user_agent"
}
},
{
"pipeline": {
"name": "apm_user_geo"
"name": "logs-apm.error-0.1.0-apm_user_geo"
}
},
{
"pipeline": {
"name": "apm_ingest_timestamp"
"name": "logs-apm.error-0.1.0-apm_ingest_timestamp"
}
},
{
"pipeline": {
"name": "apm_remove_span_metadata"
"name": "logs-apm.error-0.1.0-apm_remove_span_metadata"
}
}
]
Original file line number Diff line number Diff line change
@@ -3,22 +3,22 @@
"processors": [
{
"pipeline": {
"name": "apm_user_agent"
"name": "metrics-apm-0.1.0-apm_user_agent"
}
},
{
"pipeline": {
"name": "apm_user_geo"
"name": "metrics-apm-0.1.0-apm_user_geo"
}
},
{
"pipeline": {
"name": "apm_ingest_timestamp"
"name": "metrics-apm-0.1.0-apm_ingest_timestamp"
}
},
{
"pipeline": {
"name": "apm_remove_span_metadata"
"name": "metrics-apm-0.1.0-apm_remove_span_metadata"
}
}
]
1 change: 0 additions & 1 deletion apmpackage/apm/0.1.0/data_stream/metrics/manifest.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
title: APM application metrics
type: metrics
dataset: apm
ingest_pipeline: apm
Original file line number Diff line number Diff line change
@@ -3,22 +3,22 @@
"processors": [
{
"pipeline": {
"name": "apm_user_agent"
"name": "profiles-apm-0.1.0-apm_user_agent"
}
},
{
"pipeline": {
"name": "apm_user_geo"
"name": "profiles-apm-0.1.0-apm_user_geo"
}
},
{
"pipeline": {
"name": "apm_ingest_timestamp"
"name": "profiles-apm-0.1.0-apm_ingest_timestamp"
}
},
{
"pipeline": {
"name": "apm_remove_span_metadata"
"name": "profiles-apm-0.1.0-apm_remove_span_metadata"
}
}
]
1 change: 0 additions & 1 deletion apmpackage/apm/0.1.0/data_stream/profiles/manifest.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
title: APM profiles
type: metrics
dataset: apm.profiling
ingest_pipeline: apm

This file was deleted.

Original file line number Diff line number Diff line change
@@ -3,22 +3,22 @@
"processors": [
{
"pipeline": {
"name": "apm_user_agent"
"name": "traces-apm-0.1.0-apm_user_agent"
}
},
{
"pipeline": {
"name": "apm_user_geo"
"name": "traces-apm-0.1.0-apm_user_geo"
}
},
{
"pipeline": {
"name": "apm_ingest_timestamp"
"name": "traces-apm-0.1.0-apm_ingest_timestamp"
}
},
{
"pipeline": {
"name": "apm_remove_span_metadata"
"name": "traces-apm-0.1.0-apm_remove_span_metadata"
}
}
]
1 change: 0 additions & 1 deletion apmpackage/apm/0.1.0/data_stream/traces/manifest.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
title: APM traces
type: traces
dataset: apm
ingest_pipeline: apm
87 changes: 78 additions & 9 deletions apmpackage/cmd/gen-package/genpipelines.go
Original file line number Diff line number Diff line change
@@ -19,11 +19,68 @@ package main

import (
"encoding/json"
"errors"
"io/ioutil"
"os"
"path/filepath"
)

var streamMappings = map[string]string{
"logs": "logs-apm.error",
"traces": "traces-apm",
"metrics": "metrics-apm",
"internal_metrics": "metrics-apm.internal",
"profiles": "profiles-apm",
}

type PipelineDef struct {
ID string `json:"id"`
Body PipelineBody `json:"body"`
}

type PipelineBody struct {
Description string `json:"description"`
Processors []Processor `json:"processors"`
}

type Processor struct {
Pipeline *Pipeline `json:"pipeline,omitempty"`
m map[string]interface{}
}

type Pipeline struct {
Name string `json:"name"`
}

type _Processor Processor

func (p *Processor) UnmarshalJSON(bytes []byte) error {
aux := _Processor{}
err := json.Unmarshal(bytes, &aux)
if err != nil {
return err
}

*p = Processor(aux)
m := make(map[string]interface{})

err = json.Unmarshal(bytes, &m)
if err != nil {
return err
}
delete(m, "pipeline")
p.m = m
return nil
}

func (p *Processor) MarshalJSON() ([]byte, error) {
aux := _Processor(*p)
if p.Pipeline != nil {
return json.Marshal(aux)
}
return json.Marshal(p.m)
}

func generatePipelines(version, dataStream string) {
pipelines, err := os.Open("ingest/pipeline/definition.json")
if err != nil {
@@ -36,29 +93,41 @@ func generatePipelines(version, dataStream string) {
panic(err)
}

var definitions = make([]map[string]interface{}, 0)
var definitions = make([]PipelineDef, 0)
err = json.Unmarshal(bytes, &definitions)
if err != nil {
panic(err)
}

os.MkdirAll(pipelinesPath(version, dataStream), 0755)

var apmPipeline PipelineBody
for _, definition := range definitions {
pipeline, ok := definition["body"]
if !ok {
continue
}
id, ok := definition["id"]
if !ok {
pipeline := definition.Body
if definition.ID == "apm" {
apmPipeline = pipeline
continue
}

out, err := json.MarshalIndent(pipeline, "", " ")
if err != nil {
panic(err)
}
fName := filepath.Join(pipelinesPath(version, dataStream), id.(string)+".json")
fName := filepath.Join(pipelinesPath(version, dataStream), definition.ID+".json")
ioutil.WriteFile(fName, out, 0644)
}

for _, p := range apmPipeline.Processors {
if p.Pipeline == nil {
// should not happen, lets panic loudly
panic(errors.New("expected pipeline processor"))
}
// name is updated to match the one generated by Fleet when installs the pipelines
p.Pipeline.Name = streamMappings[dataStream] + "-" + version + "-" + p.Pipeline.Name
}
out, err := json.MarshalIndent(apmPipeline, "", " ")
if err != nil {
panic(err)
}
fName := filepath.Join(pipelinesPath(version, dataStream), "default.json")
ioutil.WriteFile(fName, out, 0644)
}
7 changes: 1 addition & 6 deletions apmpackage/cmd/gen-package/main.go
Original file line number Diff line number Diff line change
@@ -23,7 +23,6 @@ import (
"io/ioutil"
"log"
"os"
"path/filepath"

"github.com/elastic/apm-server/cmd"
"github.com/elastic/beats/v7/libbeat/common"
@@ -46,11 +45,6 @@ func main() {
for dataStream := range inputFields {
generatePipelines(packageVersion, dataStream)
}
// hack, remove when bugfix comes to Kibana
bad := filepath.Join(pipelinesPath(packageVersion, "logs"), "apm.json")
good := filepath.Join(pipelinesPath(packageVersion, "logs"), "default.json")
os.Rename(bad, good)

generateDocs(inputFields, packageVersion)
log.Printf("Package fields and docs generated for version %s (stack %s)", packageVersion, stackVersion.String())
}
@@ -66,6 +60,7 @@ func clear(version string) {
if f.IsDir() {
os.Remove(ecsFilePath(version, f.Name()))
os.Remove(fieldsFilePath(version, f.Name()))
os.RemoveAll(pipelinesPath(version, f.Name()))
}
}
ioutil.WriteFile(docsFilePath(version), nil, 0644)
17 changes: 17 additions & 0 deletions changelogs/7.10.asciidoc
Original file line number Diff line number Diff line change
@@ -3,8 +3,25 @@

https://github.com/elastic/apm-server/compare/7.9\...7.10[View commits]

* <<release-notes-7.10.1>>
* <<release-notes-7.10.0>>


[float]
[[release-notes-7.10.1]]
=== APM Server version 7.10.1

https://github.com/elastic/apm-server/compare/v7.10.0\...v7.10.1[View commits]

[float]
==== Added
* Upgrade Go to 1.14.12 {pull}4478[4478]

[float]
==== Bug fixes
* Add maxLen=1024 requirement to `metadata.system.container.id` {pull}4429[4429]


[float]
[[release-notes-7.10.0]]
=== APM Server version 7.10.0
312 changes: 312 additions & 0 deletions systemtest/approvals/TestRUMErrorSourcemapping.approved.json

Large diffs are not rendered by default.

75 changes: 75 additions & 0 deletions systemtest/rum_test.go
Original file line number Diff line number Diff line change
@@ -18,10 +18,14 @@
package systemtest_test

import (
"bytes"
"io"
"io/ioutil"
"mime/multipart"
"net/http"
"net/url"
"os"
"path/filepath"
"strings"
"testing"

@@ -62,3 +66,74 @@ func TestRUMXForwardedFor(t *testing.T) {
"@timestamp", "timestamp.us",
)
}

func TestRUMErrorSourcemapping(t *testing.T) {
systemtest.CleanupElasticsearch(t)
srv := apmservertest.NewUnstartedServer(t)
srv.Config.RUM = &apmservertest.RUMConfig{Enabled: true}
err := srv.Start()
require.NoError(t, err)

uploadSourcemap(t, srv, "../testdata/sourcemap/bundle.js.map",
"http://localhost:8000/test/e2e/../e2e/general-usecase/bundle.js.map", // bundle filepath
"apm-agent-js", // service name
"1.0.1", // service version
)
systemtest.Elasticsearch.ExpectDocs(t, "apm-*-sourcemap", nil)

sendRUMEventsPayload(t, srv, "../testdata/intake-v2/errors_rum.ndjson")
result := systemtest.Elasticsearch.ExpectDocs(t, "apm-*-error", nil)

systemtest.ApproveEvents(
t, t.Name(), result.Hits.Hits,
// RUM timestamps are set by the server based on the time the payload is received.
"@timestamp", "timestamp.us",
)
}

func sendRUMEventsPayload(t *testing.T, srv *apmservertest.Server, payloadFile string) {
t.Helper()

f, err := os.Open(payloadFile)
require.NoError(t, err)
defer f.Close()

req, _ := http.NewRequest("POST", srv.URL+"/intake/v2/rum/events", f)
req.Header.Add("Content-Type", "application/x-ndjson")
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()

respBody, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
require.Equal(t, http.StatusAccepted, resp.StatusCode, string(respBody))
}

func uploadSourcemap(t *testing.T, srv *apmservertest.Server, sourcemapFile, bundleFilepath, serviceName, serviceVersion string) {
t.Helper()

var data bytes.Buffer
mw := multipart.NewWriter(&data)
require.NoError(t, mw.WriteField("service_name", serviceName))
require.NoError(t, mw.WriteField("service_version", serviceVersion))
require.NoError(t, mw.WriteField("bundle_filepath", bundleFilepath))

f, err := os.Open(sourcemapFile)
require.NoError(t, err)
defer f.Close()
sourcemapFileWriter, err := mw.CreateFormFile("sourcemap", filepath.Base(sourcemapFile))
require.NoError(t, err)
_, err = io.Copy(sourcemapFileWriter, f)
require.NoError(t, err)
require.NoError(t, mw.Close())

req, _ := http.NewRequest("POST", srv.URL+"/assets/v1/sourcemaps", &data)
req.Header.Add("Content-Type", mw.FormDataContentType())
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()

respBody, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
require.Equal(t, http.StatusAccepted, resp.StatusCode, string(respBody))
}
22 changes: 0 additions & 22 deletions tests/system/test_integration_sourcemap.py
Original file line number Diff line number Diff line change
@@ -62,17 +62,6 @@ def test_duplicated_sourcemap_warning(self):
self.assert_no_logged_warnings(
["WARN.*Overriding sourcemap", "WARN.*2 sourcemaps found for service"])

def test_rum_error(self):
# use an uncleaned path to test that path is cleaned in upload
path = 'http://localhost:8000/test/e2e/../e2e/general-usecase/bundle.js.map'
self.upload_sourcemap(bundle_filepath=path)
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
1)
self.assert_no_logged_warnings()
self.check_rum_error_sourcemap(True)

def test_backend_span(self):
# ensure source mapping is not applied to backend events
# load event for which a sourcemap would be applied when sent to rum endpoint,
@@ -164,17 +153,6 @@ def test_sourcemap_mapping_cache_usage(self):
self.assert_no_logged_warnings()
self.check_rum_error_sourcemap(True)

def test_rum_error_changed_index(self):
# use an uncleaned path to test that path is cleaned in upload
path = 'http://localhost:8000/test/e2e/../e2e/general-usecase/bundle.js.map'
self.upload_sourcemap(bundle_filepath=path)
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
1)
self.assert_no_logged_warnings()
self.check_rum_error_sourcemap(True)


@integration_test
class SourcemappingCacheIntegrationTest(BaseSourcemapTest):

0 comments on commit 38f43fc

Please sign in to comment.