added alot of notebook and docker stuff
logstash config
This commit is contained in:
41
dockerfiles/elk/docker-compose.yml
Normal file
41
dockerfiles/elk/docker-compose.yml
Normal file
@@ -0,0 +1,41 @@
|
||||
version: '2'
|
||||
services:
|
||||
elasticsearch:
|
||||
image:
|
||||
elasticsearch
|
||||
container_name: elasticsearch
|
||||
hostname: elasticsearch
|
||||
ports:
|
||||
- "9200:9200"
|
||||
- "9300:9300"
|
||||
volumes:
|
||||
- elastic-data:/usr/share/elasticsearch/data
|
||||
environment:
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xms2g -Xmx2g"
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
|
||||
networks:
|
||||
- elk
|
||||
|
||||
kibana:
|
||||
image:
|
||||
kibana
|
||||
hostname: kibana
|
||||
ports:
|
||||
- "5601:5601"
|
||||
networks:
|
||||
- elk
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
|
||||
networks:
|
||||
elk:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
elastic-data:
|
||||
driver: local
|
||||
32
dockerfiles/logstash/configs/as_dns.conf
Normal file
32
dockerfiles/logstash/configs/as_dns.conf
Normal file
@@ -0,0 +1,32 @@
|
||||
input {
|
||||
# tcp {
|
||||
# port => 5000
|
||||
# type => syslog
|
||||
# }
|
||||
# udp {
|
||||
# port => 5000
|
||||
# type => syslog
|
||||
# }
|
||||
file{
|
||||
type => as_csv
|
||||
path => "/tmp/logfile"
|
||||
# path => "/data/*"
|
||||
start_position => "beginning"
|
||||
}
|
||||
}
|
||||
filter {
|
||||
csv{
|
||||
columns => ["endTime","Name","requestUrl","sourceAddress","destinationAddress","priority","Device Vendor","Device Product","Non-CEF Raw Message"]
|
||||
}
|
||||
grok { match => { "message" => ["%{DATESTAMP:datum}"] } }
|
||||
date { match => [ "datum","yy/MM/dd HH:mm:ss" ] timezone => "CET" }
|
||||
|
||||
}
|
||||
output {
|
||||
elasticsearch {
|
||||
hosts => ["elasticsearch:9200"]
|
||||
index => "arcsight"
|
||||
# index => "arcsight-%{+YYYY.MM.dd}"
|
||||
}
|
||||
# stdout { codec => rubydebug }
|
||||
}
|
||||
90
dockerfiles/logstash/configs/bwi.conf
Normal file
90
dockerfiles/logstash/configs/bwi.conf
Normal file
@@ -0,0 +1,90 @@
|
||||
input {
|
||||
# tcp {
|
||||
# port => 5000
|
||||
# type => syslog
|
||||
# }
|
||||
# udp {
|
||||
# port => 5000
|
||||
# type => syslog
|
||||
# }
|
||||
file{
|
||||
type => syslog
|
||||
path => "/data/*"
|
||||
start_position => "beginning"
|
||||
}
|
||||
}
|
||||
filter {
|
||||
if [type] == "syslog" {
|
||||
syslog_pri{}
|
||||
grok {
|
||||
match => { "message" => [
|
||||
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}",
|
||||
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{GREEDYDATA:syslog_message}"
|
||||
]
|
||||
}
|
||||
add_field => [ "received_at", "%{@timestamp}" ]
|
||||
add_field => [ "received_from", "%{host}" ]
|
||||
}
|
||||
date {
|
||||
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
|
||||
}
|
||||
mutate {
|
||||
# replace => [ "message", "%{syslog_message}" ]
|
||||
rename => [ "syslog_message", "message" ]
|
||||
}
|
||||
}
|
||||
if [syslog_program] == "mwg"{
|
||||
kv{
|
||||
field_split => "|"
|
||||
transform_key => "lowercase"
|
||||
trim_value => '"'
|
||||
target => "mwg"
|
||||
add_tag => "mwg"
|
||||
remove_field => "message"
|
||||
include_keys => [
|
||||
"devtime",
|
||||
"referer",
|
||||
"httpmethod",
|
||||
"bytes",
|
||||
"mediatype",
|
||||
"useragent",
|
||||
"blockreason",
|
||||
"url",
|
||||
"dst",
|
||||
"httpstatus",
|
||||
"src",
|
||||
"urlcategories",
|
||||
"usrname"
|
||||
]
|
||||
}
|
||||
mutate{
|
||||
convert => { "mwg[bytes]" => "integer" }
|
||||
}
|
||||
date{
|
||||
match => ["mwg[devtime]" , "UNIX_MS"]
|
||||
# target => "mwg[timestamp]"
|
||||
}
|
||||
useragent {
|
||||
source => "mwg[useragent]"
|
||||
target => "mwg[ua]"
|
||||
}
|
||||
geoip {
|
||||
source => "mwg[dst]"
|
||||
target => "mwg[dst_geo]"
|
||||
}
|
||||
# geoip {
|
||||
# source => "mwg[src]"
|
||||
# target => "mwg[src_geo]"
|
||||
# }
|
||||
|
||||
|
||||
# mutate {
|
||||
# split => { "syslog_message" => "|" }
|
||||
# }
|
||||
|
||||
}
|
||||
}
|
||||
output {
|
||||
elasticsearch { hosts => ["elasticsearch:9200"] }
|
||||
# stdout { codec => rubydebug }
|
||||
}
|
||||
28
dockerfiles/logstash/configs/first.conf
Normal file
28
dockerfiles/logstash/configs/first.conf
Normal file
@@ -0,0 +1,28 @@
|
||||
input {
|
||||
tcp {
|
||||
port => 514
|
||||
type => syslog
|
||||
}
|
||||
udp {
|
||||
port => 514
|
||||
type => syslog
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
if [type] == "syslog" {
|
||||
grok {
|
||||
match => { "message" => "(<%{POSINT:syslog_pri}>|)%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
|
||||
add_field => [ "received_at", "%{@timestamp}" ]
|
||||
add_field => [ "received_from", "%{host}" ]
|
||||
}
|
||||
date {
|
||||
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
elasticsearch { hosts => ["elasticsearch:9200"] }
|
||||
stdout { codec => rubydebug }
|
||||
}
|
||||
90
dockerfiles/logstash/configs/syslog.conf
Normal file
90
dockerfiles/logstash/configs/syslog.conf
Normal file
@@ -0,0 +1,90 @@
|
||||
input {
|
||||
# tcp {
|
||||
# port => 5000
|
||||
# type => syslog
|
||||
# }
|
||||
# udp {
|
||||
# port => 5000
|
||||
# type => syslog
|
||||
# }
|
||||
file{
|
||||
type => syslog
|
||||
path => "/data/*"
|
||||
start_position => "beginning"
|
||||
}
|
||||
}
|
||||
filter {
|
||||
if [type] == "syslog" {
|
||||
syslog_pri{}
|
||||
grok {
|
||||
match => { "message" => [
|
||||
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}",
|
||||
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{GREEDYDATA:syslog_message}"
|
||||
]
|
||||
}
|
||||
add_field => [ "received_at", "%{@timestamp}" ]
|
||||
add_field => [ "received_from", "%{host}" ]
|
||||
}
|
||||
date {
|
||||
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
|
||||
}
|
||||
mutate {
|
||||
# replace => [ "message", "%{syslog_message}" ]
|
||||
rename => [ "syslog_message", "message" ]
|
||||
}
|
||||
}
|
||||
if [syslog_program] == "mwg"{
|
||||
kv{
|
||||
field_split => "|"
|
||||
transform_key => "lowercase"
|
||||
trim_value => '"'
|
||||
target => "mwg"
|
||||
add_tag => "mwg"
|
||||
remove_field => "message"
|
||||
include_keys => [
|
||||
"devtime",
|
||||
"referer",
|
||||
"httpmethod",
|
||||
"bytes",
|
||||
"mediatype",
|
||||
"useragent",
|
||||
"blockreason",
|
||||
"url",
|
||||
"dst",
|
||||
"httpstatus",
|
||||
"src",
|
||||
"urlcategories",
|
||||
"usrname"
|
||||
]
|
||||
}
|
||||
mutate{
|
||||
convert => { "mwg[bytes]" => "integer" }
|
||||
}
|
||||
date{
|
||||
match => ["mwg[devtime]" , "UNIX_MS"]
|
||||
# target => "mwg[timestamp]"
|
||||
}
|
||||
useragent {
|
||||
source => "mwg[useragent]"
|
||||
target => "mwg[ua]"
|
||||
}
|
||||
geoip {
|
||||
source => "mwg[dst]"
|
||||
target => "mwg[dst_geo]"
|
||||
}
|
||||
# geoip {
|
||||
# source => "mwg[src]"
|
||||
# target => "mwg[src_geo]"
|
||||
# }
|
||||
|
||||
|
||||
# mutate {
|
||||
# split => { "syslog_message" => "|" }
|
||||
# }
|
||||
|
||||
}
|
||||
}
|
||||
output {
|
||||
elasticsearch { hosts => ["elasticsearch:9200"] }
|
||||
# stdout { codec => rubydebug }
|
||||
}
|
||||
10
dockerfiles/plaso140/Dockerfile
Normal file
10
dockerfiles/plaso140/Dockerfile
Normal file
@@ -0,0 +1,10 @@
|
||||
FROM ubuntu:xenial
|
||||
LABEL maintainer="d4n6"
|
||||
WORKDIR /data
|
||||
RUN apt update && apt upgrade -y
|
||||
RUN apt install python-pip -y
|
||||
RUN pip install pip --upgrade
|
||||
RUN pip install bencode artifacts pyelasticsearch
|
||||
RUN apt install plaso -y
|
||||
ENTRYPOINT ["/bin/bash"]
|
||||
|
||||
7
dockerfiles/plasonew/Dockerfile
Normal file
7
dockerfiles/plasonew/Dockerfile
Normal file
@@ -0,0 +1,7 @@
|
||||
FROM ubuntu:xenial
|
||||
LABEL maintainer="d4n6"
|
||||
WORKDIR /data
|
||||
RUN apt update && apt upgrade -y ; apt install software-properties-common python-software-properties -y
|
||||
RUN add-apt-repository ppa:gift/stable -y ; apt update ; apt install python-plaso plaso-tools -y
|
||||
ENTRYPOINT ["/bin/bash"]
|
||||
|
||||
6
dockerfiles/volatility/Dockerfile
Normal file
6
dockerfiles/volatility/Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM ubuntu:xenial
|
||||
LABEL maintainer="d4n6"
|
||||
WORKDIR /data
|
||||
COPY vol /usr/local/bin/volatility
|
||||
RUN chmod +x /usr/local/bin/volatility
|
||||
ENTRYPOINT ["/bin/bash"]
|
||||
Reference in New Issue
Block a user