Skip to content

Commit

Permalink
'feature/labels-cloud-ephemeral-components' of github.com:v1v/integr…
Browse files Browse the repository at this point in the history
…ations into feature/labels-cloud-ephemeral-components

* 'feature/labels-cloud-ephemeral-components' of github.com:v1v/integrations: (23 commits)
  use branch
  standardise labels/tags
  Update .ci/Jenkinsfile
  [ci][terraform][aws] tags with metadata
  Bump github.com/elastic/elastic-package from 0.46.0 to 0.47.0 (elastic#3182)
  Spring boot package [Memory - data stream] (elastic#2979)
  [cisco_ise] Add Cisco ISE package (elastic#2855)
  [apache_spark][executor] Add Apache Spark package with Executor data stream (elastic#2943)
  [apache_spark][driver] Add Apache Spark package with Driver data stream (elastic#2945)
  cisco_duo: simplify grok expression for handling ports (elastic#3170)
  cisco_duo: fix handling of IP addresses with port numbers (elastic#3117)
  [sophos] Various improvements and log samples from 18.5 (elastic#3127)
  [cisco_asa] fix visualizations (elastic#3146)
  [apache_spark][application] Add Apache Spark package with Application data stream (elastic#2941)
  Fix pagination bug that skipped events when more than one page is present. (elastic#3140)
  Cis k8s name migration (elastic#3113)
  remove exported fields; these will be added to Kibana docs instead (elastic#3093)
  Exclude ecosystem as owner of subdirectories of packages (elastic#3132)
  [awsfargate] bump package version (elastic#3130)
  add first csp rule template (elastic#3081)
  ...
  • Loading branch information
v1v committed Apr 25, 2022
2 parents 48cae03 + 811a011 commit 02b4c7f
Show file tree
Hide file tree
Showing 303 changed files with 40,565 additions and 10,002 deletions.
4 changes: 3 additions & 1 deletion .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Everything outside of packages is maintained by the ecosystem team.
* @elastic/ecosystem
/packages/*
/packages/

# CODEOWNERS file is checked by CI.
/.github/CODEOWNERS
Expand Down Expand Up @@ -35,11 +35,13 @@
/packages/cisco_duo @elastic/security-external-integrations
/packages/cisco_ftd @elastic/security-external-integrations
/packages/cisco_ios @elastic/security-external-integrations
/packages/cisco_ise @elastic/security-external-integrations
/packages/cisco @elastic/security-external-integrations
/packages/cisco_meraki @elastic/security-external-integrations
/packages/cisco_nexus @elastic/security-external-integrations
/packages/cisco_secure_endpoint @elastic/security-external-integrations
/packages/cisco_umbrella @elastic/security-external-integrations
/packages/cloud_security_posture @elastic/cloud-security-posture
/packages/cloudflare @elastic/security-external-integrations
/packages/cockroachdb @elastic/integrations
/packages/containerd @elastic/obs-cloudnative-monitoring
Expand Down
7 changes: 7 additions & 0 deletions dev/codeowners/codeowners.go
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,13 @@ func (codeowners *githubOwners) checkSingleField(field string) error {
if matches || strings.HasPrefix(field, path) {
return fmt.Errorf("%q would remove owners for %q", field, path)
}

if strings.HasPrefix(path, field) {
_, err := filepath.Rel(field, path)
if err == nil {
return fmt.Errorf("%q would remove owners for %q", field, path)
}
}
}

// Excluding other files is fine.
Expand Down
4 changes: 4 additions & 0 deletions dev/codeowners/codeowners_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,10 @@ func TestReadGithubOwners(t *testing.T) {
codeownersPath: "testdata/CODEOWNERS-invalid-override",
valid: false,
},
{
codeownersPath: "testdata/CODEOWNERS-invalid-override-wildcard",
valid: false,
},
}

for _, c := range cases {
Expand Down
2 changes: 1 addition & 1 deletion dev/codeowners/testdata/CODEOWNERS-invalid-override
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# This is not valid because there is an override that would remove owners of a directory.

/testdata/devexp @elastic/integrations @elastic/integrations-developer-experience
/testdata/*
/testdata/

5 changes: 5 additions & 0 deletions dev/codeowners/testdata/CODEOWNERS-invalid-override-wildcard
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# This is not valid because there is an override that would remove owners of a directory.

/testdata/devexp @elastic/integrations @elastic/integrations-developer-experience
/testdata/*

1 change: 1 addition & 0 deletions dev/codeowners/testdata/CODEOWNERS-multiple-owners
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# This is a valid test file with multiple owners for a path

/testdata/devexp @elastic/integrations @elastic/integrations-developer-experience
/testdata/devexp/manifest.yml @elastic/integrations
/testdata/integration @elastic/integrations
2 changes: 1 addition & 1 deletion dev/codeowners/testdata/CODEOWNERS-valid
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This is a valid test file
* @elastic/ecosystem

/testdata/*
/testdata/

/testdata/devexp @elastic/integrations-developer-experience
/testdata/integration @elastic/integrations
18 changes: 9 additions & 9 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ go 1.17

require (
github.com/blang/semver v3.5.1+incompatible
github.com/elastic/elastic-package v0.46.0
github.com/elastic/elastic-package v0.47.0
github.com/elastic/package-registry v1.8.0
github.com/magefile/mage v1.13.0
github.com/pkg/errors v0.9.1
Expand Down Expand Up @@ -146,17 +146,17 @@ require (
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
helm.sh/helm/v3 v3.8.1 // indirect
helm.sh/helm/v3 v3.8.2 // indirect
howett.net/plist v0.0.0-20201203080718-1454fab16a06 // indirect
k8s.io/api v0.23.5 // indirect
k8s.io/apiextensions-apiserver v0.23.4 // indirect
k8s.io/apimachinery v0.23.5 // indirect
k8s.io/cli-runtime v0.23.5 // indirect
k8s.io/client-go v0.23.5 // indirect
k8s.io/component-base v0.23.4 // indirect
k8s.io/api v0.23.6 // indirect
k8s.io/apiextensions-apiserver v0.23.5 // indirect
k8s.io/apimachinery v0.23.6 // indirect
k8s.io/cli-runtime v0.23.6 // indirect
k8s.io/client-go v0.23.6 // indirect
k8s.io/component-base v0.23.5 // indirect
k8s.io/klog/v2 v2.30.0 // indirect
k8s.io/kube-openapi v0.0.0-20211115234752-e816edb12b65 // indirect
k8s.io/kubectl v0.23.4 // indirect
k8s.io/kubectl v0.23.5 // indirect
k8s.io/utils v0.0.0-20211208161948-7d6a63dca704 // indirect
sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 // indirect
sigs.k8s.io/kustomize/api v0.10.1 // indirect
Expand Down
144 changes: 111 additions & 33 deletions go.sum

Large diffs are not rendered by default.

26 changes: 25 additions & 1 deletion packages/apache_spark/_dev/build/docs/README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Apache Spark
# Apache Spark Integration

The Apache Spark integration collects and parses data using the Jolokia Metricbeat Module.

Expand Down Expand Up @@ -63,6 +63,30 @@ Follow the same set of steps for Spark Worker, Driver and Executor.

## Metrics

### Application

This is the `application` data stream.

{{event "application"}}

{{fields "application"}}

### Driver

This is the `driver` data stream.

{{event "driver"}}

{{fields "driver"}}

### Executor

This is the `executor` data stream.

{{event "executor"}}

{{fields "executor"}}

### Nodes

This is the `nodes` data stream.
Expand Down
15 changes: 0 additions & 15 deletions packages/apache_spark/_dev/deploy/docker/Dockerfile

This file was deleted.

44 changes: 44 additions & 0 deletions packages/apache_spark/_dev/deploy/docker/application/wordcount.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

import sys
import signal
import time

from operator import add
from datetime import datetime

from pyspark.sql import SparkSession

if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: wordcount <file>", file=sys.stderr)
sys.exit(-1)

spark = SparkSession.builder.master(sys.argv[2]).appName("PythonWordCount").getOrCreate()

t_end = time.time() + 60 * 15

# Run loop for 15 mins
while time.time() < t_end:
lines = spark.read.text(sys.argv[1]).rdd.map(lambda r: r[0])
counts = lines.flatMap(lambda x: x.split(" ")).map(lambda x: (x, 1)).reduceByKey(add)
output = counts.collect()
for (word, count) in output:
print("%s: %i" % (word, count))

spark.stop()
27 changes: 23 additions & 4 deletions packages/apache_spark/_dev/deploy/docker/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,28 @@
version: '2'
version: '2.3'
services:
apache_spark:
hostname: apache-spark-main
build:
context: .
dockerfile: Dockerfile
image: docker.io/bitnami/spark@sha256:cb19b1bdebc0bc9dc20ea13f2109763be6a73b357b144a01efd94902540f6d27
ports:
- 7777
- 7779
- 7780
environment:
- SPARK_MAIN_URL=spark://apache-spark-main:7077
- SPARK_WORKER_MEMORY=1024G
- SPARK_WORKER_CORES=8
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
volumes:
- ./jolokia-agent:/usr/share/java/
- ./application:/opt/bitnami/spark/examples/src/main/python/
- ./jolokia-configs:/spark/conf/
- ./docker-entrypoint/docker-entrypoint.sh:/opt/bitnami/scripts/spark/docker-entrypoint.sh
healthcheck:
interval: 1s
retries: 120
timeout: 120s
test: |-
curl -f -s http://localhost:7777/jolokia/version -o /dev/null
entrypoint: /opt/bitnami/scripts/spark/docker-entrypoint.sh /opt/bitnami/scripts/spark/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/bin/bash

echo 'export SPARK_MASTER_OPTS="$SPARK_MASTER_OPTS -javaagent:/usr/share/java/jolokia-agent.jar=config=/spark/conf/jolokia-master.properties"' >> "/opt/bitnami/spark/conf/spark-env.sh"
echo 'export SPARK_WORKER_OPTS="$SPARK_WORKER_OPTS -javaagent:/usr/share/java/jolokia-agent.jar=config=/spark/conf/jolokia-worker.properties"' >> "/opt/bitnami/spark/conf/spark-env.sh"

echo '*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink' >> "/opt/bitnami/spark/conf/metrics.properties"
echo '*.source.jvm.class=org.apache.spark.metrics.source.JvmSource' >> "/opt/bitnami/spark/conf/metrics.properties"

echo 'spark.driver.extraJavaOptions -javaagent:/usr/share/java/jolokia-agent.jar=config=/spark/conf/jolokia-driver.properties' >> "/opt/bitnami/spark/conf/spark-defaults.conf"
echo 'spark.executor.extraJavaOptions -javaagent:/usr/share/java/jolokia-agent.jar=config=/spark/conf/jolokia-executor.properties' >> "/opt/bitnami/spark/conf/spark-defaults.conf"

# shellcheck disable=SC1091

set -o errexit
set -o nounset
set -o pipefail
#set -o xtrace

# Load libraries
. /opt/bitnami/scripts/libbitnami.sh
. /opt/bitnami/scripts/libspark.sh

# Load Spark environment variables
eval "$(spark_env)"

print_welcome_page

if [ ! $EUID -eq 0 ] && [ -e "$LIBNSS_WRAPPER_PATH" ]; then
echo "spark:x:$(id -u):$(id -g):Spark:$SPARK_HOME:/bin/false" > "$NSS_WRAPPER_PASSWD"
echo "spark:x:$(id -g):" > "$NSS_WRAPPER_GROUP"
echo "LD_PRELOAD=$LIBNSS_WRAPPER_PATH" >> "$SPARK_CONFDIR/spark-env.sh"
fi

if [[ "$1" = "/opt/bitnami/scripts/spark/run.sh" ]]; then
info "** Starting Spark setup **"
/opt/bitnami/scripts/spark/setup.sh
info "** Spark setup finished! **"
fi

eval "$(spark_env)"
cd /opt/bitnami/spark/sbin
./start-worker.sh $SPARK_MAIN_URL --cores $SPARK_WORKER_CORES --memory $SPARK_WORKER_MEMORY &
cd /opt/bitnami/spark/examples/src/main/python/
/opt/bitnami/spark/bin/spark-submit wordcount.py wordcount.py $SPARK_MAIN_URL &

echo ""
exec "$@"
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
[Spark-Master]
stats: http://127.0.0.1:7777/jolokia/read
[Spark-Master]
stats: http://127.0.0.1:7777/jolokia/read
[Spark-Worker]
stats: http://127.0.0.1:7778/jolokia/read
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
host=0.0.0.0
port=7779
agentContext=/jolokia
backlog=100

policyLocation=file:///spark/conf/jolokia.policy
historyMaxEntries=10
debug=false
debugMaxEntries=100
maxDepth=15
maxCollectionSize=1000
maxObjects=0
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
host=0.0.0.0
port=7780
agentContext=/jolokia
backlog=100

policyLocation=file:///spark/conf/jolokia.policy
historyMaxEntries=10
debug=false
debugMaxEntries=100
maxDepth=15
maxCollectionSize=1000
maxObjects=0
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
host=0.0.0.0
port=7777
agentContext=/jolokia
backlog=100

policyLocation=file:///spark/conf/jolokia.policy
historyMaxEntries=10
debug=false
debugMaxEntries=100
maxDepth=15
maxCollectionSize=1000
maxObjects=0
host=0.0.0.0
port=7777
agentContext=/jolokia
backlog=100

policyLocation=file:///spark/conf/jolokia.policy
historyMaxEntries=10
debug=false
debugMaxEntries=100
maxDepth=15
maxCollectionSize=1000
maxObjects=0
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
host=0.0.0.0
port=7778
agentContext=/jolokia
backlog=100

policyLocation=file:///spark/conf/jolokia.policy
historyMaxEntries=10
debug=false
debugMaxEntries=100
maxDepth=15
maxCollectionSize=1000
maxObjects=0
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<restrict>
<http>
<method>get</method>
<method>post</method>
</http>
<commands>
<command>read</command>
<command>list</command>
<command>search</command>
<command>version</command>
</commands>
</restrict>
<?xml version="1.0" encoding="utf-8"?>
<restrict>
<http>
<method>get</method>
<method>post</method>
</http>
<commands>
<command>read</command>
<command>list</command>
<command>search</command>
<command>version</command>
</commands>
</restrict>
Loading

0 comments on commit 02b4c7f

Please sign in to comment.