Skip to content

Commit

Permalink
Add better data capture from suricata unix streams (#45)
Browse files Browse the repository at this point in the history
  • Loading branch information
AHarmlessPyro authored Oct 21, 2022
1 parent 31a8e05 commit a992809
Show file tree
Hide file tree
Showing 6 changed files with 159 additions and 54 deletions.
14 changes: 7 additions & 7 deletions backend/src/suricata_setup/generics/scripts/suricata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ default-log-dir: /etc/suricata-logs

# Global stats configuration
stats:
enabled: yes
enabled: no
# The interval field (in seconds) controls the interval at
# which stats are updated in the log.
interval: 8
Expand Down Expand Up @@ -153,12 +153,12 @@ outputs:

types:
- alert:
payload: yes # enable dumping payload in Base64
payload-buffer-size: 4kb # max size of payload buffer to output in eve-log
payload-printable: yes # enable dumping payload in printable (lossy) format
packet: yes # enable dumping of packet (without stream segments)
payload: no # enable dumping payload in Base64
payload-buffer-size: 1000kb # max size of payload buffer to output in eve-log
payload-printable: no # enable dumping payload in printable (lossy) format
packet: no # enable dumping of packet (without stream segments)
# metadata: yes # enable inclusion of app layer metadata with alert. Default yes
http-body: yes # Requires metadata; enable dumping of HTTP body in Base64
http-body: no # Requires metadata; enable dumping of HTTP body in Base64
http-body-printable: yes # Requires metadata; enable dumping of HTTP body in printable format

# Enable the logging of tagged packets for rules using the
Expand Down Expand Up @@ -221,7 +221,7 @@ outputs:
# to dump all HTTP headers for every HTTP request and/or response
dump-all-headers: both
payload: yes # enable dumping payload in Base64
payload-buffer-size: 4kb # max size of payload buffer to output in eve-log
payload-buffer-size: 1000kb # max size of payload buffer to output in eve-log
payload-printable: yes # enable dumping payload in printable (lossy) format
packet: yes # enable dumping of packet (without stream segments)
# metadata: yes # enable inclusion of app layer metadata with alert. Default yes
Expand Down
99 changes: 64 additions & 35 deletions ingestors/suricata/index.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
import net from "net"
import fs from "fs"
import process from "process"
import { conns, EVENTS, ALERT } from "./interface"
import { conns, EVENTS, ALERT, RecordHolderWithTimestamp } from "./interface"
import { program } from "commander"
import { prepareResponse, compileHost, pushAlert } from "./utils"
import ndjson from "ndjson"

var server: net.Server
var connections: Record<number, net.Socket> = {}
var http_meta: Record<string, conns> = {}
var http_meta: Record<string, RecordHolderWithTimestamp<conns>> = {}
var alerts: Record<string, RecordHolderWithTimestamp<ALERT>> = {}
const msSendTimeout = 10
// Just offset by enough to not conflict with timeout
const msCleanupTimeout = 10011


var url = ""
var api_key = ""
Expand All @@ -29,40 +35,33 @@ function createServer(socket: string) {
delete connections[self]
})

// Messages are buffers. use toString
stream.on("data", function (msg) {
msg
.toString()
.split("\n")
.filter(_msg => !!_msg)
.forEach((_msg, i, a) => {
try {
const jsonmsg = JSON.parse(_msg)
if (EVENTS.HTTP === (jsonmsg["event_type"] as string)) {
compileHost(jsonmsg, http_meta)
}
if (EVENTS.ALERT === (jsonmsg["event_type"] as string)) {
// compileAlert(jsonmsg);
const alert: ALERT = jsonmsg
// Get first metadata for the given connection.
let meta = http_meta[alert.flow_id].metas.shift()
if (meta) {
let resp = prepareResponse(alert, meta)
pushAlert(resp, url, api_key)
}
}
} catch (err) {
console.log(
`/////////////////// ERROR ///////////////////`,
)
console.log(err)
console.log(
`/////////////////// MESSAGE ///////////////////`,
)
console.log(_msg)
stream.pipe(ndjson.parse())
.on('data', function (obj) {
// obj is a javascript object
try {

const jsonmsg = obj
const flow_id = jsonmsg.flow_id

if (EVENTS.HTTP === (jsonmsg["event_type"] as string)) {
compileHost(jsonmsg, http_meta)
}
})
})
if (EVENTS.ALERT === (jsonmsg["event_type"] as string)) {
alerts[flow_id] = { value: jsonmsg, timestamp: Date.now() }
}
} catch (err) {
console.log(
`/////////////////// ERROR ///////////////////`,
)
console.log(err)
console.log(
`/////////////////// MESSAGE ///////////////////`,
)
console.log(JSON.stringify(obj))
}
})


})
.listen(socket)
.on("connection", function (socket) {
Expand Down Expand Up @@ -128,6 +127,36 @@ function main() {
process.on("SIGINT", cleanup)
}

function processAlerts() {
Object.entries(http_meta).forEach(([k, v]) => {
const flow_id = k
// Find if both events are present in their respective things
if (flow_id in alerts) {

let curr_alert = alerts[flow_id].value
delete alerts[flow_id]
let curr_http = v.value.metas.shift()

// Get first metadata for the given connection.
let resp = prepareResponse(curr_alert, curr_http)
pushAlert(resp, url, api_key)
}
})
}

function cleanup() {
let new_meta = {}
Object.entries(http_meta).filter((([k, v]) => ((Date.now() - v.timestamp) > msCleanupTimeout))).forEach(([k, v]) => { new_meta[k] = v })
http_meta = new_meta

let new_alerts = {}
Object.entries(alerts).filter((([k, v]) => ((Date.now() - v.timestamp) > msCleanupTimeout))).forEach(([k, v]) => { new_meta[k] = v })
alerts = new_alerts
}

setInterval(processAlerts, msSendTimeout)
setInterval(cleanup, msCleanupTimeout)

process.title = "METLO"

main()
5 changes: 5 additions & 0 deletions ingestors/suricata/interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,3 +121,8 @@ export interface RESPONSE {
destinationPort: number
}
}

export interface RecordHolderWithTimestamp<T> {
value: T
timestamp: number
}
3 changes: 2 additions & 1 deletion ingestors/suricata/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"@types/node": "^18.6.5",
"axios": "^0.27.2",
"commander": "^9.4.0",
"ndjson": "^2.0.0",
"ts-node": "^10.9.1",
"tsc": "^2.0.4",
"typescript": "^4.7.4"
Expand All @@ -20,4 +21,4 @@
"build": "tsc",
"format": "prettier --write '**/*.{ts,tsx}'"
}
}
}
26 changes: 15 additions & 11 deletions ingestors/suricata/utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { ALERT, RESPONSE, HOST, conns } from "./interface"
import { ALERT, RESPONSE, HOST, conns, RecordHolderWithTimestamp } from "./interface"
import axios from "axios"

export function compileHost(jsonmsg: any, http_meta: Record<string, conns>) {
export function compileHost(jsonmsg: any, http_meta: Record<string, RecordHolderWithTimestamp<conns>>) {
// Get Response and Request headers from json blob
const resp_headers = jsonmsg.http.response_headers
const req_headers = jsonmsg.http.request_headers
Expand All @@ -13,24 +13,28 @@ export function compileHost(jsonmsg: any, http_meta: Record<string, conns>) {
// store flow metadata. Each connection in a pipeline comes sequentially
// Store them sequentially
if (host.flow_id in http_meta) {
http_meta[host.flow_id].metas.push({
http_meta[host.flow_id].value.metas.push({
timestamp: host.timestamp,
metadata: host,
})
http_meta[host.flow_id].timestamp = Date.now()
} else {
http_meta[host.flow_id] = {
flowId: host.flow_id,
metas: [
{
timestamp: host.timestamp,
metadata: host,
},
],
timestamp: Date.now(),
value: {
flowId: host.flow_id,
metas: [
{
timestamp: host.timestamp,
metadata: host,
},
]
},
}
}
}

export function pushAlert(resp: RESPONSE, url: string, api_key: string) {
export async function pushAlert(resp: RESPONSE, url: string, api_key: string) {
axios
.post(url, {
...resp,
Expand Down
66 changes: 66 additions & 0 deletions ingestors/suricata/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,16 @@ form-data@^4.0.0:
combined-stream "^1.0.8"
mime-types "^2.1.12"

inherits@^2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==

json-stringify-safe@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==

make-error@^1.1.1:
version "1.3.6"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
Expand All @@ -138,11 +148,62 @@ mime-types@^2.1.12:
dependencies:
mime-db "1.52.0"

minimist@^1.2.5:
version "1.2.7"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18"
integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==

ndjson@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ndjson/-/ndjson-2.0.0.tgz#320ac86f6fe53f5681897349b86ac6f43bfa3a19"
integrity sha512-nGl7LRGrzugTtaFcJMhLbpzJM6XdivmbkdlaGcrk/LXg2KL/YBC6z1g70xh0/al+oFuVFP8N8kiWRucmeEH/qQ==
dependencies:
json-stringify-safe "^5.0.1"
minimist "^1.2.5"
readable-stream "^3.6.0"
split2 "^3.0.0"
through2 "^4.0.0"

prettier@^2.7.1:
version "2.7.1"
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64"
integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==

readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
dependencies:
inherits "^2.0.3"
string_decoder "^1.1.1"
util-deprecate "^1.0.1"

safe-buffer@~5.2.0:
version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==

split2@^3.0.0:
version "3.2.2"
resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f"
integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==
dependencies:
readable-stream "^3.0.0"

string_decoder@^1.1.1:
version "1.3.0"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
dependencies:
safe-buffer "~5.2.0"

through2@^4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764"
integrity sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==
dependencies:
readable-stream "3"

ts-node@^10.9.1:
version "10.9.1"
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b"
Expand Down Expand Up @@ -172,6 +233,11 @@ typescript@^4.7.4:
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.4.tgz#1a88596d1cf47d59507a1bcdfb5b9dfe4d488235"
integrity sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==

util-deprecate@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==

v8-compile-cache-lib@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
Expand Down

0 comments on commit a992809

Please sign in to comment.