Skip to content

Commit

Permalink
Merge pull request #390 from Security-Onion-Solutions/2.4/dev
Browse files Browse the repository at this point in the history
2.4.60
  • Loading branch information
TOoSmOotH authored Mar 20, 2024
2 parents eecd305 + 12b4ebb commit 9e439b9
Show file tree
Hide file tree
Showing 98 changed files with 14,598 additions and 684 deletions.
11 changes: 8 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

FROM ghcr.io/security-onion-solutions/golang:1.21.5-alpine as builder
ARG VERSION=0.0.0
RUN apk update && apk add libpcap-dev bash git musl-dev gcc npm python3 py3-pip py3-virtualenv
RUN apk update && apk add libpcap-dev bash git musl-dev gcc npm python3 py3-pip py3-virtualenv python3-dev openssl-dev linux-headers
COPY . /build
WORKDIR /build
RUN if [ "$VERSION" != "0.0.0" ]; then mkdir gitdocs && cd gitdocs && \
Expand All @@ -22,6 +22,9 @@ RUN if [ "$VERSION" != "0.0.0" ]; then mkdir gitdocs && cd gitdocs && \
RUN npm install jest jest-environment-jsdom --global
RUN ./build.sh "$VERSION"

RUN pip3 install sigma-cli pysigma-backend-elasticsearch pysigma-pipeline-windows yara-python --break-system-packages
RUN sed -i 's/#!\/usr\/bin\/python3/#!\/usr\/bin\/env python/g' /usr/bin/sigma

FROM ghcr.io/security-onion-solutions/python:3-slim

ARG UID=939
Expand All @@ -30,8 +33,8 @@ ARG VERSION=0.0.0
ARG ELASTIC_VERSION=0.0.0
ARG WAZUH_VERSION=0.0.0

RUN apt update -y
RUN apt install -y bash tzdata ca-certificates wget curl tcpdump unzip tshark
RUN apt update -y
RUN apt install -y bash tzdata ca-certificates wget curl tcpdump unzip
RUN update-ca-certificates
RUN addgroup --gid "$GID" socore
RUN adduser --disabled-password --uid "$UID" --ingroup socore --gecos '' socore
Expand All @@ -46,6 +49,8 @@ COPY --from=builder /build/LICENSE .
COPY --from=builder /build/README.md .
COPY --from=builder /build/sensoroni.json .
COPY --from=builder /build/gitdocs/_build/html ./html/docs
COPY --from=builder /usr/lib/python3.11/site-packages /usr/local/lib/python3.9/site-packages
COPY --from=builder /usr/bin/sigma /usr/bin/sigma
RUN find html/js -name "*test*.js" -delete
RUN chmod u+x scripts/*
RUN chown 939:939 scripts/*
Expand Down
26 changes: 14 additions & 12 deletions Dockerfile.kratos
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,17 @@
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.

FROM ghcr.io/security-onion-solutions/golang:alpine AS builder
FROM ghcr.io/security-onion-solutions/golang:1.21 AS builder

ARG OWNER=ory
ARG VERSION=v1.0.0
ARG VERSION=v1.1.0

RUN addgroup --system ory; \
adduser --system ory --no-create-home --disabled-password --ingroup ory --disabled-login

RUN apt-get update && apt-get upgrade -y && apt-get install -y git gcc bash
RUN mkdir -p /var/lib/sqlite

RUN addgroup -S ory; \
adduser -S ory -G ory -D -H -s /bin/nologin
RUN apk -U --no-cache add build-base git gcc bash

RUN mkdir -p /go/src/github.com/$OWNER
WORKDIR /go/src/github.com/$OWNER
Expand All @@ -23,23 +26,22 @@ RUN git checkout $VERSION

ENV GO111MODULE on
ENV CGO_ENABLED 1
ENV CGO_CPPFLAGS -DSQLITE_DEFAULT_FILE_PERMISSIONS=0600

RUN go mod download
RUN go build -tags sqlite -a
RUN go build -tags sqlite -ldflags="-X 'github.com/ory/kratos/driver/config.Version=${VERSION}' -X 'github.com/ory/kratos/driver/config.Date=$(date -I)' -X 'github.com/ory/kratos/driver/config.Commit=$(git rev-parse --short HEAD)'"


FROM ghcr.io/security-onion-solutions/alpine:latest
FROM ghcr.io/security-onion-solutions/ubuntu:23.04

ENV DSN=sqlite:///kratos-data/db.sqlite?_fk=true

ARG UID=928
ARG GID=928
ARG OWNER=ory

RUN addgroup --gid "$GID" -S kratos; \
adduser -u "$UID" -S kratos -G kratos -D -H -s /bin/nologin
RUN apk add -U --no-cache ca-certificates

RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf
RUN groupadd --system kratos --gid "$GID" ; \
useradd --system kratos --no-create-home -g kratos --uid "$UID"

RUN echo "#!/bin/sh" > /start-kratos.sh
RUN echo "kratos -c /kratos-conf/kratos.yaml migrate sql -e --yes >> /kratos-log/kratos-migrate.log 2>&1" >> /start-kratos.sh
Expand Down
12 changes: 6 additions & 6 deletions agent/jobmanager.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,14 +101,14 @@ func (mgr *JobManager) ProcessJob(job *model.Job) (io.ReadCloser, error) {
defer mgr.lock.RUnlock()
var reader io.ReadCloser
var err error

job.Size = 0
for _, processor := range mgr.jobProcessors {
reader, err = processor.ProcessJob(job, reader)
if err != nil {
log.WithError(err).WithFields(log.Fields{
"jobId": job.Id,
}).Error("Failed to process job; job processing aborted")
break
}
}
if err != nil && reader != nil {
// Don't fail all processors if at least one provided some data.
err = nil
}
return reader, err
}
Expand Down
37 changes: 33 additions & 4 deletions agent/jobmanager_test.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
// Copyright 2020-2023 Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
// or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
// https://securityonion.net/license; you may not use this file except in compliance with the
// Elastic License 2.0.

package agent

import (
Expand Down Expand Up @@ -46,10 +51,14 @@ func (jp *idJobProcessor) GetDataEpoch() time.Time {
}

// panicProcessor is a JobProcessor that always returns an error.
type panicProcessor struct{}
type panicProcessor struct {
processCount int
errorString string
}

func (jp *panicProcessor) ProcessJob(job *model.Job, reader io.ReadCloser) (io.ReadCloser, error) {
return reader, errors.New("panic")
jp.processCount++
return reader, errors.New(jp.errorString)
}

func (jp *panicProcessor) CleanupJob(*model.Job) {}
Expand All @@ -64,7 +73,7 @@ func TestProcessJob(t *testing.T) {
jm := &JobManager{}

jm.AddJobProcessor(&idJobProcessor{})
jm.AddJobProcessor(&panicProcessor{})
jm.AddJobProcessor(&panicProcessor{errorString: "panic"})

// prep model
job := &model.Job{
Expand All @@ -79,7 +88,27 @@ func TestProcessJob(t *testing.T) {
assert.NoError(t, rerr)

assert.Equal(t, "101", string(data))
assert.ErrorContains(t, err, "panic")
assert.Nil(t, err)
}

func TestProcessJobContinuesIfNoDataAvailable(t *testing.T) {
// prep test object
jm := &JobManager{}

proc := panicProcessor{errorString: "No data available"}
jm.AddJobProcessor(&proc)
jm.AddJobProcessor(&proc)

// prep model
job := &model.Job{
Id: 101,
}

// test
_, err := jm.ProcessJob(job)

assert.Equal(t, 2, proc.processCount)
assert.ErrorContains(t, err, "No data available")
}

func TestUpdateDataEpoch(t *testing.T) {
Expand Down
25 changes: 24 additions & 1 deletion agent/modules/importer/importer.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,32 @@ func (importer *Importer) ProcessJob(job *model.Job, reader io.ReadCloser) (io.R
}).Debug("Executed tcpdump")
if err == nil {
var file *os.File
var info os.FileInfo
file, err = os.Open(pcapOutputFilepath)
if err == nil {
reader = file
info, err = os.Stat(pcapOutputFilepath)
if err != nil {
log.WithError(err).WithFields(log.Fields{
"pcapPath": pcapOutputFilepath,
}).Error("Failed to collect output file stats")
} else {
size := int(info.Size())
log.WithFields(log.Fields{
"pcapPath": pcapOutputFilepath,
"pcapSize": size,
"jobSize": job.Size,
}).Debug("Found matching packets")
if job.Size > size {
log.Warn("Discarding Importer job output since existing job already has more content from another processor")
} else {
job.Size = size
reader = file
log.WithFields(log.Fields{
"pcapStreamErr": err,
"pcapStreamSize": size,
}).Debug("Finished processing PCAP via Importer")
}
}
}
}
}
Expand Down
24 changes: 23 additions & 1 deletion agent/modules/stenoquery/stenoquery.go
Original file line number Diff line number Diff line change
Expand Up @@ -134,9 +134,31 @@ func (steno *StenoQuery) ProcessJob(job *model.Job, reader io.ReadCloser) (io.Re
}).Debug("Executed stenoread")
if err == nil {
var file *os.File
var info os.FileInfo
file, err = os.Open(pcapFilepath)
if err == nil {
reader = file
info, err = os.Stat(pcapFilepath)
if err != nil {
log.WithError(err).WithFields(log.Fields{
"pcapPath": pcapFilepath,
}).Error("Failed to collect output file stats")
} else {
size := int(info.Size())
log.WithFields(log.Fields{
"pcapPath": pcapFilepath,
"pcapBytes": size,
}).Debug("Found matching packets")
if job.Size > size {
log.Warn("Discarding Stenographer job output since existing job already has more content from another processor")
} else {
job.Size = size
reader = file
log.WithFields(log.Fields{
"pcapStreamErr": err,
"pcapStreamSize": size,
}).Debug("Finished processing PCAP via Stenographer")
}
}
}
}
}
Expand Down
Loading

0 comments on commit 9e439b9

Please sign in to comment.