Skip to content
This repository has been archived by the owner on Apr 19, 2021. It is now read-only.

Commit

Permalink
Merge pull request #8 from weslambert/master
Browse files Browse the repository at this point in the history
Logtash updates
  • Loading branch information
dougburks authored Feb 27, 2018
2 parents 137ef54 + 4973980 commit f11c342
Show file tree
Hide file tree
Showing 5 changed files with 100 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ filter {
json => false
target => "domain_age"
}
if [domain_age] and [domain_age] !~ "No whois record"{
if [domain_age] and "No whois record" not in [domain_age] {
date {
match => [ "domain_age", "YYYY-MM-dd HH:mm:ss'; '",
"YYYY-MM-dd HH:mm:ss';'",
Expand Down
6 changes: 3 additions & 3 deletions configfiles/1100_preprocess_bro_conn.conf
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ filter {
#missed_bytes
#history field
rename => { "orig_pkts" => "original_packets" }
rename => { "orig_ip_bytes" => "original_ipbytes" }
rename => { "orig_ip_bytes" => "original_ip_bytes" }
rename => { "resp_pkts" => "respond_packets" }
rename => { "resp_ip_bytes" => "respond_ipbytes" }
rename => { "resp_ip_bytes" => "respond_ip_bytes" }
#tunnel_parents
rename => { "orig_cc" => "original_country_code" }
rename => { "resp_cc" => "respond_country_code" }
Expand All @@ -43,7 +43,7 @@ filter {
gsub => [ "message", "[\"']", "" ]
}
csv {
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","service","duration","original_bytes","respond_bytes","connection_state","local_orig","local_respond","missed_bytes","history","original_packets","original_ipbytes","respond_packets","respond_ipbytes","tunnel_parents","original_country_code","respond_country_code","sensor_name"]
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","service","duration","original_bytes","respond_bytes","connection_state","local_orig","local_respond","missed_bytes","history","original_packets","original_ip_bytes","respond_packets","respond_ip_bytes","tunnel_parents","original_country_code","respond_country_code","sensor_name"]

# If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal <tab> in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces.
separator => " "
Expand Down
64 changes: 63 additions & 1 deletion configfiles/6000_bro.conf
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,42 @@ filter {
remove_field => ["virtual_host"]
}
}
if [x_originating_ip] == "-" {
mutate {
remove_field => ["x_originating_ip"]
}
}
if [basic_constraints_path_length] == "-" {
mutate {
remove_field => ["basic_constraints_path_length"]
}
}
if [data_channel_source_ip] == "-" {
mutate {
remove_field => ["data_channel_source_ip"]
}
}
if [data_channel_destination_ip] == "-" {
mutate {
remove_field => ["data_channel_destination_ip"]
}
}
if [desktop_width] == "-" {
mutate {
remove_field => ["desktop_width"]
}
}
if [desktop_height] == "-" {
mutate {
remove_field => ["desktop_height"]
}
}
if [height] == "-" {
mutate {
remove_field => ["height"]
}
}


# I renamed conn_uids to uid so that it is easy to pivot to all things tied to a connection
mutate {
Expand All @@ -98,12 +134,33 @@ filter {
replace => [ "missing_bytes", "0" ]
}
}
# If pverflow_bytes is set to "-" change it to 0 so it is an integer
# If overflow_bytes is set to "-" change it to 0 so it is an integer
if [overflow_bytes] == "-" {
mutate {
replace => [ "overflow_bytes", "0" ]
}
}
if [dcc_file_size] == "-" {
mutate {
replace => [ "dcc_file_size", "0" ]
}
}
if [authentication_attempts] == "-" {
mutate {
replace => [ "authentication_attempts", "0" ]
}
}
if [file_size] == "-" {
mutate {
replace => [ "file_size", "0" ]
}
}
if [original_ip_bytes] == "-" {
mutate {
replace => [ "original_ip_bytes", "0" ]
}
}

# I recommend changing the field types below to integer or floats so searches can do greater than or less than
# and also so math functions can be ran against them
mutate {
Expand Down Expand Up @@ -137,6 +194,11 @@ filter {
convert => [ "total_bytes", "integer" ]
convert => [ "trans_depth", "integer" ]
convert => [ "transaction_id", "integer" ]
# convert the following boolean to text for now
convert => [ "local_respond", "string" ]
convert => [ "tc", "string" ]
convert => [ "is_orig", "string" ]
convert => [ "local_orig", "string" ]
lowercase => [ "query" ]
#remove_field => [ "timestamp" ]
}
Expand Down
26 changes: 24 additions & 2 deletions etc/logstash/logstash-template.json
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,9 @@
"basic_constraints_path_length":{
"type":"long"
},
"bound_port":{
"type":"long"
},
"call_id":{
"type":"text",
"fields":{
Expand Down Expand Up @@ -843,7 +846,12 @@
}
},
"domain_age":{
"type":"long"
"type":"text",
"fields":{
"keyword":{
"type":"keyword"
}
}
},
"domain_name":{
"type":"text",
Expand Down Expand Up @@ -1882,6 +1890,9 @@
}
}
},
"original_bytes":{
"type":"long"
},
"original_country_code":{
"type":"text",
"fields":{
Expand All @@ -1890,6 +1901,9 @@
}
}
},
"original_ip_bytes":{
"type":"long"
},
"original_packets":{
"type":"long"
},
Expand Down Expand Up @@ -2276,6 +2290,14 @@
}
}
},
"request_port":{
"type":"text",
"fields":{
"keyword":{
"type":"keyword"
}
}
},
"request_timestamp":{
"type":"text",
"fields":{
Expand Down Expand Up @@ -2343,7 +2365,7 @@
"respond_bytes":{
"type":"long"
},
"respond_ipbytes":{
"respond_ip_bytes":{
"type":"long"
},
"respond_packets":{
Expand Down
10 changes: 9 additions & 1 deletion usr/sbin/so-elastic-start-logstash
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,20 @@

. /etc/nsm/securityonion.conf

CUSTOM_CONF='/etc/logstash/custom'

if [ "$LOGSTASH_ENABLED" = "yes" ]; then
echo -n "so-logstash: "
if docker ps | grep -q so-logstash; then
echo "Already started!"
else
if [ "$FREQ_SERVER_ENABLED" = "yes" ]; then
if [ -d $CUSTOM_CONF ]; then
rsync $CUSTOM_CONF/*.conf /etc/logstash/conf.d >/dev/null 2>&1
rsync $CUSTOM_CONF/*.json /etc/logstash/ >/dev/null 2>&1
else
mkdir $CUSTOM_CONF
fi
if [ "$FREQ_SERVER_ENABLED" = "yes" ]; then
cp -f /etc/logstash/optional/*_postprocess_freq_analysis_*.conf /etc/logstash/conf.d/
fi
if [ "$FREQ_SERVER_ENABLED" = "no" ]; then
Expand Down

0 comments on commit f11c342

Please sign in to comment.