added alot of notebook and docker stuff

logstash config
This commit is contained in:
root
2018-04-03 10:28:02 +02:00
parent e85d8a9f85
commit a6cc17b436
11 changed files with 310 additions and 0 deletions

View File

@@ -8,6 +8,7 @@ alias mv="mv -vi"
alias cp="cp -vi" alias cp="cp -vi"
alias nasmount="pushd . ; sudo mount ~/mnt/nas ; cd ~/mnt/nas" alias nasmount="pushd . ; sudo mount ~/mnt/nas ; cd ~/mnt/nas"
alias nasunmount="sudo umount ~/mnt/nas" alias nasunmount="sudo umount ~/mnt/nas"
alias docker_status="docker run -it --rm -v /var/run/docker.sock:/var/run/docker.sock moncho/dry"
alias kali='docker run --rm -it --dns=192.168.130.1 -v ${workdir-`pwd`}:/root/workdir kali' alias kali='docker run --rm -it --dns=192.168.130.1 -v ${workdir-`pwd`}:/root/workdir kali'
alias remnux_viper='docker run --rm -it --dns=192.168.130.1 -v ${workdir-`pwd`}:/home/nonroot/workdir remnux/viper bash' alias remnux_viper='docker run --rm -it --dns=192.168.130.1 -v ${workdir-`pwd`}:/home/nonroot/workdir remnux/viper bash'
alias remnux_pescanner='docker run --rm -it --dns=192.168.130.1 -v ${workdir-`pwd`}:/home/nonroot/workdir remnux/pescanner bash' alias remnux_pescanner='docker run --rm -it --dns=192.168.130.1 -v ${workdir-`pwd`}:/home/nonroot/workdir remnux/pescanner bash'

View File

@@ -5,6 +5,8 @@ awk '{a[$0]+=1; if (a[$0]<2)print}'
arecord -f cd -t raw | oggenc - -r | ssh <remote> mplayer - arecord -f cd -t raw | oggenc - -r | ssh <remote> mplayer -
#and reverse #and reverse
ssh <user>@<remotehost> 'arecord -f cd -t raw | oggenc - -r' | mplayer - ssh <user>@<remotehost> 'arecord -f cd -t raw | oggenc - -r' | mplayer -
#got to sleep for an hour then wake up
sudo rtcwake -m mem -t +`date -u +%s -d +60minutes`
#Zahlen verteilung analysieren #Zahlen verteilung analysieren
primes 1 100 |gnuplot -p -e 'plot "/dev/stdin"' primes 1 100 |gnuplot -p -e 'plot "/dev/stdin"'

View File

@@ -0,0 +1,41 @@
version: '2'
services:
elasticsearch:
image:
elasticsearch
container_name: elasticsearch
hostname: elasticsearch
ports:
- "9200:9200"
- "9300:9300"
volumes:
- elastic-data:/usr/share/elasticsearch/data
environment:
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms2g -Xmx2g"
ulimits:
memlock:
soft: -1
hard: -1
networks:
- elk
kibana:
image:
kibana
hostname: kibana
ports:
- "5601:5601"
networks:
- elk
depends_on:
- elasticsearch
networks:
elk:
driver: bridge
volumes:
elastic-data:
driver: local

View File

@@ -0,0 +1,32 @@
input {
# tcp {
# port => 5000
# type => syslog
# }
# udp {
# port => 5000
# type => syslog
# }
file{
type => as_csv
path => "/tmp/logfile"
# path => "/data/*"
start_position => "beginning"
}
}
filter {
csv{
columns => ["endTime","Name","requestUrl","sourceAddress","destinationAddress","priority","Device Vendor","Device Product","Non-CEF Raw Message"]
}
grok { match => { "message" => ["%{DATESTAMP:datum}"] } }
date { match => [ "datum","yy/MM/dd HH:mm:ss" ] timezone => "CET" }
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "arcsight"
# index => "arcsight-%{+YYYY.MM.dd}"
}
# stdout { codec => rubydebug }
}

View File

@@ -0,0 +1,90 @@
input {
# tcp {
# port => 5000
# type => syslog
# }
# udp {
# port => 5000
# type => syslog
# }
file{
type => syslog
path => "/data/*"
start_position => "beginning"
}
}
filter {
if [type] == "syslog" {
syslog_pri{}
grok {
match => { "message" => [
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}",
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{GREEDYDATA:syslog_message}"
]
}
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
mutate {
# replace => [ "message", "%{syslog_message}" ]
rename => [ "syslog_message", "message" ]
}
}
if [syslog_program] == "mwg"{
kv{
field_split => "|"
transform_key => "lowercase"
trim_value => '"'
target => "mwg"
add_tag => "mwg"
remove_field => "message"
include_keys => [
"devtime",
"referer",
"httpmethod",
"bytes",
"mediatype",
"useragent",
"blockreason",
"url",
"dst",
"httpstatus",
"src",
"urlcategories",
"usrname"
]
}
mutate{
convert => { "mwg[bytes]" => "integer" }
}
date{
match => ["mwg[devtime]" , "UNIX_MS"]
# target => "mwg[timestamp]"
}
useragent {
source => "mwg[useragent]"
target => "mwg[ua]"
}
geoip {
source => "mwg[dst]"
target => "mwg[dst_geo]"
}
# geoip {
# source => "mwg[src]"
# target => "mwg[src_geo]"
# }
# mutate {
# split => { "syslog_message" => "|" }
# }
}
}
output {
elasticsearch { hosts => ["elasticsearch:9200"] }
# stdout { codec => rubydebug }
}

View File

@@ -0,0 +1,28 @@
input {
tcp {
port => 514
type => syslog
}
udp {
port => 514
type => syslog
}
}
filter {
if [type] == "syslog" {
grok {
match => { "message" => "(<%{POSINT:syslog_pri}>|)%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
output {
elasticsearch { hosts => ["elasticsearch:9200"] }
stdout { codec => rubydebug }
}

View File

@@ -0,0 +1,90 @@
input {
# tcp {
# port => 5000
# type => syslog
# }
# udp {
# port => 5000
# type => syslog
# }
file{
type => syslog
path => "/data/*"
start_position => "beginning"
}
}
filter {
if [type] == "syslog" {
syslog_pri{}
grok {
match => { "message" => [
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}",
"%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} |)%{GREEDYDATA:syslog_message}"
]
}
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
mutate {
# replace => [ "message", "%{syslog_message}" ]
rename => [ "syslog_message", "message" ]
}
}
if [syslog_program] == "mwg"{
kv{
field_split => "|"
transform_key => "lowercase"
trim_value => '"'
target => "mwg"
add_tag => "mwg"
remove_field => "message"
include_keys => [
"devtime",
"referer",
"httpmethod",
"bytes",
"mediatype",
"useragent",
"blockreason",
"url",
"dst",
"httpstatus",
"src",
"urlcategories",
"usrname"
]
}
mutate{
convert => { "mwg[bytes]" => "integer" }
}
date{
match => ["mwg[devtime]" , "UNIX_MS"]
# target => "mwg[timestamp]"
}
useragent {
source => "mwg[useragent]"
target => "mwg[ua]"
}
geoip {
source => "mwg[dst]"
target => "mwg[dst_geo]"
}
# geoip {
# source => "mwg[src]"
# target => "mwg[src_geo]"
# }
# mutate {
# split => { "syslog_message" => "|" }
# }
}
}
output {
elasticsearch { hosts => ["elasticsearch:9200"] }
# stdout { codec => rubydebug }
}

View File

@@ -0,0 +1,10 @@
FROM ubuntu:xenial
LABEL maintainer="d4n6"
WORKDIR /data
RUN apt update && apt upgrade -y
RUN apt install python-pip -y
RUN pip install pip --upgrade
RUN pip install bencode artifacts pyelasticsearch
RUN apt install plaso -y
ENTRYPOINT ["/bin/bash"]

View File

@@ -0,0 +1,7 @@
FROM ubuntu:xenial
LABEL maintainer="d4n6"
WORKDIR /data
RUN apt update && apt upgrade -y ; apt install software-properties-common python-software-properties -y
RUN add-apt-repository ppa:gift/stable -y ; apt update ; apt install python-plaso plaso-tools -y
ENTRYPOINT ["/bin/bash"]

View File

@@ -0,0 +1,6 @@
FROM ubuntu:xenial
LABEL maintainer="d4n6"
WORKDIR /data
COPY vol /usr/local/bin/volatility
RUN chmod +x /usr/local/bin/volatility
ENTRYPOINT ["/bin/bash"]

3
toggle_display.sh Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/bash
display=$(xrandr | grep -wi connected | grep -i hdmi | cut -f1 -d" ")
xrandr --output ${display} --primary