Restructure repository: organize tools by purpose, create what search tool

- Move single-file tools to tools/ organized by category (security, forensics, data, etc.)
- Move multi-file projects to projects/ (go-tools, puzzlebox, timesketch, rust-tools)
- Move system scripts to scripts/ (proxy, display, setup, windows)
- Organize config files in config/ (shell, visidata, applications)
- Move experimental tools to archive/experimental
- Create 'what' fuzzy search tool with progressive enhancement (ollama->fzf->grep)
- Add initial metadata database for intelligent tool discovery
- Preserve git history using 'git mv' commands
This commit is contained in:
tobias
2025-08-24 19:50:00 +02:00
parent 9518290544
commit 619b0bc432
124 changed files with 1063 additions and 0 deletions

BIN
tools/.DS_Store vendored Normal file

Binary file not shown.

98
tools/cloud/cloudsend.py Executable file
View File

@@ -0,0 +1,98 @@
#!/usr/bin/env python3
import argparse
import logging
import owncloud
import gnupg
import os
import requests
import re
from icecream import ic
def isurl(text):
pattern = 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
matcher = re.compile(pattern)
return matcher.match(text)
def upload(file,url):
try:
oc = owncloud.Client.from_public_link(args.url)
ic(oc)
response = oc.drop_file(fn)
ic(response)
return response
except owncloud.owncloud.HTTPResponseError as e:
logging.error(f'Error while uploading file {fn} <{e}>')
def upload_rq(file,url):
CLOUDURL=""
FOLDERTOKEN=""
# FILENAME="$1"
# CLOUDURL=''
# # if we have index.php in the URL, process accordingly
# if [[ $2 == *"index.php"* ]]; then
# CLOUDURL="${2%/index.php/s/*}"
# else
# CLOUDURL="${2%/s/*}"
# fi
# FOLDERTOKEN="${2##*/s/}"
# -T "$FILENAME" -u "$FOLDERTOKEN":"$PASSWORD" -H "$HEADER" "$CLOUDURL/$PUBSUFFIX/$BFILENAME"
# if [ ! -f "$FILENAME" ]; then
# initError "Invalid input file: $FILENAME"
# fi
# if [ -z "$CLOUDURL" ]; then
# initError "Empty URL! Nowhere to send..."
# fi
# if [ -z "$FOLDERTOKEN" ]; then
# initError "Empty Folder Token! Nowhere to send..."
# fi
PUBSUFFIX="public.php/webdav"
HEADER='X-Requested-With: XMLHttpRequest'
INSECURE=''
headers = {
'X-Requested-With': 'XMLHttpRequest',
}
response = requests.put('https://nextcloud.exampyocclientple.com/public.php/webdav/testfile.txt', headers=headers, verify=args.insecure, auth=('AtAxVrKorgC5YJf', ''))
parser = argparse.ArgumentParser()
parser.add_argument("-k","--insecure",action="store_false")
parser.add_argument("-x","--encryption",action="store",default=None,const='*',nargs="?",type=str)
parser.add_argument("url")
parser.add_argument("file",nargs="+")
args=parser.parse_args()
if args.encryption is not None:
ic(args.encryption)
if not isurl(args.url):
logging.warning(f"URL '{args.url}' is not valid")
ic(args)
for fn in args.file:
ic(os.path.isdir(fn))
ic(os.path.isfile(fn))
if os.path.isdir(fn):
logging.warning("Foldersupport not implemented yet")
continue
if upload(fn,args.url):
logging.info(f"{fn} successfully uploaded")
else:
logging.warning(f"Error uploading {fn}")

151
tools/cloud/cloudsend.sh Executable file
View File

@@ -0,0 +1,151 @@
#!/usr/bin/env bash
############################################################
# MIGRATED TO REPOSITORY
# https://github.com/tavinus/cloudsend.sh
#
# This gist will NOT be updated anymore
############################################################
############################################################
#
# cloudsend.sh
#
# Uses curl to send files to a shared
# Nextcloud/Owncloud folder
#
# Usage: ./cloudsend.sh <file> <folderLink>
# Help: ./cloudsend.sh -h
#
# Gustavo Arnosti Neves
# https://github.com/tavinus
#
# Contributors:
# @MG2R @gessel
#
# Get this script to current folder with:
# curl -O 'https://raw.githubusercontent.com/tavinus/cloudsend.sh/master/cloudsend.sh' && chmod +x cloudsend.sh
#
############################################################
CS_VERSION="0.1.6"
# https://cloud.mydomain.net/s/fLDzToZF4MLvG28
# curl -k -T myFile.ext -u "fLDzToZF4MLvG28:" -H 'X-Requested-With: XMLHttpRequest' https://cloud.mydomain.net/public.php/webdav/myFile.ext
log() {
[ "$VERBOSE" == " -s" ] || printf "%s\n" "$1"
}
printVersion() {
printf "%s\n" "CloudSender v$CS_VERSION"
}
initError() {
printVersion >&2
printf "%s\n" "Init Error! $1" >&2
printf "%s\n" "Try: $0 --help" >&2
exit 1
}
usage() {
printVersion
printf "\n%s%s\n" "Parameters:" "
-h | --help Print this help and exits
-q | --quiet Be quiet
-x | --encrypt Encrypt Upload with Password
-V | --version Prints version and exits
-k | --insecure Uses curl with -k option (https insecure)
-p | --password Uses env var \$CLOUDSEND_PASSWORD as share password
You can 'export CLOUDSEND_PASSWORD' at your system, or set it at the call.
Please remeber to also call -p to use the password set."
printf "\n%s\n%s\n%s\n" "Use:" " $0 <filepath> <folderLink>" " CLOUDSEND_PASSWORD='MySecretPass' $0 -p <filepath> <folderLink>"
printf "\n%s\n%s\n%s\n" "Example:" " $0 './myfile.txt' 'https://cloud.mydomain.net/s/fLDzToZF4MLvG28'" " CLOUDSEND_PASSWORD='MySecretPass' $0 -p './myfile.txt' 'https://cloud.mydomain.net/s/fLDzToZF4MLvG28'"
}
##########################
# Process parameters
if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then
usage
exit 0
fi
if [ "$1" = "-V" ] || [ "$1" = "--version" ]; then
printVersion
exit 0
fi
if [ "$1" = "-q" ] || [ "$1" = "--quiet" ]; then
VERBOSE=" -s"
shift
fi
if [ "$1" = "-k" ] || [ "$1" = "--insecure" ]; then
INSECURE=' -k'
log " > Insecure mode ON"
shift
fi
if [ "$1" = "-p" ] || [ "$1" = "--password" ]; then
PASSWORD=${CLOUDSEND_PASSWORD}
log " > Using password from env"
shift
fi
##########################
# Validate input
FILENAME="$1"
CLOUDURL=''
# if we have index.php in the URL, process accordingly
if [[ $2 == *"index.php"* ]]; then
CLOUDURL="${2%/index.php/s/*}"
else
CLOUDURL="${2%/s/*}"
fi
FOLDERTOKEN="${2##*/s/}"
if [ ! -f "$FILENAME" ]; then
initError "Invalid input file: $FILENAME"
fi
if [ -z "$CLOUDURL" ]; then
initError "Empty URL! Nowhere to send..."
fi
if [ -z "$FOLDERTOKEN" ]; then
initError "Empty Folder Token! Nowhere to send..."
fi
##########################
# Check for curl
CURLBIN='/usr/bin/curl'
if [ ! -x "$CURLBIN" ]; then
CURLBIN="$(which curl 2>/dev/null)"
if [ ! -x "$CURLBIN" ]; then
initError "No curl found on system!"
fi
fi
##########################
# Extract base filename
BFILENAME=$(/usr/bin/basename $FILENAME)
##########################
# Send file
#echo "$CURLBIN"$INSECURE$VERBOSE -T "$FILENAME" -u "$FOLDERTOKEN":"$PASSWORD" -H "$HEADER" "$CLOUDURL/$PUBSUFFIX/$BFILENAME"
"$CURLBIN"$INSECURE$VERBOSE -T "$FILENAME" -u "$FOLDERTOKEN":"$PASSWORD" -H "$HEADER" "$CLOUDURL/$PUBSUFFIX/$BFILENAME"

26
tools/cloud/speech.py Normal file
View File

@@ -0,0 +1,26 @@
import speech_recognition as sr
recognizer = sr.Recognizer()
''' recording the sound '''
with sr.Microphone() as source:
print("Adjusting noise ")
recognizer.adjust_for_ambient_noise(source, duration=1)
print("Recording for 4 seconds")
recorded_audio = recognizer.listen(source, timeout=4)
print("Done recording")
''' Recorgnizing the Audio '''
try:
print("Recognizing the text")
text = recognizer.recognize_sphinx(
recorded_audio,
language="de-DE"
)
print("Decoded Text : {}".format(text))
except Exception as ex:
print(ex)

84
tools/cloud/vqa3.py Normal file
View File

@@ -0,0 +1,84 @@
import os
import argparse
from PIL import Image
from transformers import CLIPProcessor, CLIPModel
from collections import defaultdict
def classify_images(model_name, image_paths, class_names):
# Load CLIP model and processor
model = CLIPModel.from_pretrained(model_name)
processor = CLIPProcessor.from_pretrained(model_name)
classification_results = defaultdict(list)
# Perform zero-shot classification on each image
for image_path in image_paths:
try:
image = Image.open(image_path)
# Process the input image and text labels
inputs = processor(
text=class_names,
images=image,
return_tensors="pt",
padding=True
)
# Run the model and get logits
outputs = model(**inputs)
logits_per_image = outputs.logits_per_image
# Calculate probabilities
probs = logits_per_image.softmax(dim=1)
# Get the predicted label
pred_label = class_names[probs.argmax(dim=1).item()]
classification_results[pred_label].append(image_path)
except Exception as e:
print(f"Skipping {image_path} due to error: {e}")
for label, images in classification_results.items():
print(f"{label}:")
for image_path in images:
print(f" {image_path}")
def main():
available_models = [
"openai/clip-vit-large-patch14",
"openai/clip-vit-base-patch32",
"openai/clip-vit-base-patch16"
]
parser = argparse.ArgumentParser(description="CLIP-based Image Classifier")
parser.add_argument("--model", type=str, default="openai/clip-vit-base-patch16",
help="Model name to use for classification (default: openai/clip-vit-base-patch16)")
parser.add_argument("-c", "--category", action="append",default=["image is safe for work", "image is not safe for work"],help="Add a classification category (e.g., 'man', 'woman', 'child', 'animal'). If not specified, the default categories will be 'safe for work' and 'not safe for work'.")
parser.add_argument("paths", metavar="path", type=str, nargs="+",
help="List of image file paths or directories")
args = parser.parse_args()
if args.model.lower() == "list":
print("Available models:")
for model in available_models:
print(f" {model}")
return
image_paths = []
for path in args.paths:
if os.path.isdir(path):
image_paths.extend([os.path.join(path, file) for file in os.listdir(path)])
elif os.path.isfile(path):
image_paths.append(path)
else:
print(f"Skipping {path}, not a valid file or directory")
classify_images(args.model, image_paths, args.category)
if __name__ == "__main__":
main()

14
tools/cloud/youtube_resolve.sh Executable file
View File

@@ -0,0 +1,14 @@
#!/bin/bash
url=$(echo -ne "${*}" | grep -Pio -m1 'https://www.youtube.com/(watch\?[^&,|]+|embed/[^?/,|]+)')
if [[ -n "${url}" ]] ; then
title=$(wget -q -O- "${url}" | grep -Po "(?<=title>).*(?=</title)")
title_parsed=$(cat <<eof | python3
from urllib.parse import unquote
from html import unescape
url="${title}"
print(unescape(unquote(url)))
eof
)
echo "${url};\"${title_parsed}\""
fi

13
tools/ctf/filtertext.py Normal file
View File

@@ -0,0 +1,13 @@
import sys
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
with open(sys.argv[0],'r') as f:
text=" ".join(f.readlines())
stop_words = set(stopwords.words('english'))
word_tokens = word_tokenize(text)
for word in [w for w in word_tokens if len(w)>3 and not w in stop_words]:
word=word.strip(' \n,.=!_\'')
word.replace(".","_")
print(word)

2
tools/ctf/getjs.py Normal file
View File

@@ -0,0 +1,2 @@
def getjss(text):
return "String.fromCharCode({})".format(",".join(["{}".format(ord(x)) for x in text]))

22
tools/ctf/guess.py Normal file
View File

@@ -0,0 +1,22 @@
import requests
import sys
from pprint import pprint
def getjss(text):
return "String.fromCharCode({})".format(",".join(["{}".format(ord(x)) for x in text]))
def test(teststring):
return '''test' + ''' + getjss('},'+teststring+',{"guess":"') + ''' + 'test'''
burp0_url = "http://cxvhbgkymde5cg.code.unibw-muenchen.de:80/a81b583202982d472bde5e9f4a89becd/guess"
burp0_headers = {"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Referer": "http://cxvhbgkymde5cg.code.unibw-muenchen.de/a81b583202982d472bde5e9f4a89becd/", "Content-Type": "application/json;charset=utf-8", "Authorization": "Basic dX==", "Connection": "close"}
s=test(sys.argv[1])
burp0_json={"guess": s }
print(s)
r=requests.post(burp0_url, headers=burp0_headers, json=burp0_json)
pprint(r.text)
for head in r.headers:
print("{}\t{}".format(head,r.headers[head]))

112
tools/ctf/ps_.py Normal file
View File

@@ -0,0 +1,112 @@
import psutil
import os
import pwd
import sys
from collections import defaultdict
mypid=os.getpid()
#Check if run as root
white_list_pname = [ "systemd", "kthreadd", "apport-gtk"]
white_list_pid =[]
if (os.geteuid()) != 0:
print("[-] Not Root")
else:
#whitelist this python script and all parents
cursor=psutil.Process()
ende=0
while cursor != None:
white_list_pid.append(cursor.pid)
cursor=cursor.parent()
print(white_list_pid)
mydict = defaultdict(list)
ps_dict = defaultdict(list)
def on_terminate(proc):
print("[+] Terminating Child: %s" % (str(proc)))
def killpid(pid):
parent = psutil.Process(pid)
print(len(parent.children()))
children=parent.children(recursive=True)
for child in children:
try:
child.terminate()
except Exception as e :
print("[-] FAILED - Terminating Child: %s" % (str(child)))
print("[-] ERROR: %s" % str(e))
gone, still_alive = psutil.wait_procs(children, timeout=3, callback=on_terminate)
for child in still_alive:
try:
child.kill()
except Exception as e :
print("[-] FAILED - Terminating Child: %s" % (str(child)))
print("[-] ERROR: %s" % str(e))
else:
print("[+] Terminating Child: %s" % (str(child)))
try:
parent.terminate()
parent.wait(timeout=3)
parent.kill()
except Exception as e:
print("[-] FAILED - Killing Process: %s" % (str(parent)))
print("[-] ERROR: %s" % str(e))
else:
print("[+] Process Killes: %s" % (str(parent)))
def printproc(p: psutil.Process):
return "{0}({1})".format(p.name(),p.pid())
def printchild(p: psutil.Process):
output=printproc(p) + "-"
for c in p.children():
output+=printproc(c)
#Fill ps_dict with processes
for proc in psutil.process_iter():
try:
pinfo = proc.as_dict(attrs=['pid','uids','ppid','name','create_time','terminal','username'])
except psutil.NoSuchProcess:
pass
else:
pid=str(pinfo['pid'])
ps_dict[pid]=pinfo
#Walk ps_dict and fill in missing information
for key in ps_dict:
p=ps_dict[key]
ppid=str(p['ppid'])
if ppid in ps_dict:
pp=ps_dict[ppid]
p['ppname'] = pp['name']
p['ppusername'] = pp['username']
p['ppuids'] = pp['uids']
p['ppcreate_time'] = pp['create_time']
#Kill all escalators
to_kill=[]
for key in ps_dict:
p=ps_dict[key]
if 'ppusername' in p and 'real=0' in str(p['uids']) and p['username'] not in p['ppusername']:
if p['name'] not in white_list_pname:
print("[+] Escalted Process found: %s (%s)" % (str(p['name']),str(p['pid'])))
printchild(psutil.Process(p['pid']))
for pid in to_kill:
if pid not in white_list_pid:
killpid(pid)

17
tools/ctf/search.py Normal file
View File

@@ -0,0 +1,17 @@
import math
x=1
notfound=1
while notfound:
silber=math.pow(x,2)
ungerade=math.floor(silber/16.)%2
rest=silber%16
# print str(silber) + " " + str(ungerade)
if ungerade == 1 and rest>1 and rest<9:
print "rest passt"
print x
print silber
print rest
print 16-rest
notfound=0
x+=1

3
tools/ctf/submit_flag.sh Normal file
View File

@@ -0,0 +1,3 @@
#!/bin/bash
curl 'https://score.code.unibw-muenchen.de/quali/flag' -H 'Authorization: Basic Y3RmMjAxOXF1YWxpOmN0ZjIwMTl0aDM1dGhlbGVtM250' -H 'Sec-Fetch-Site: same-origin' -H 'Origin: https://score.code.unibw-muenchen.de' -H 'Accept-Encoding: gzip, deflate, br' -H 'Accept-Language: en-DE,en;q=0.9,de-DE;q=0.8,de;q=0.7,en-US;q=0.6' -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36' -H 'Sec-Fetch-Mode: cors' -H 'Content-Type: application/json;charset=UTF-8' -H 'Accept: application/json, text/plain, */*' -H 'Referer: https://score.code.unibw-muenchen.de/quali/' -H 'Cookie: connect.sid=s%3AYfJKqsKR9tYJTPFRUfgTGr3-r306-LL2.yo4tGwhIG%2FaqwiHCmEJgj%2Blr1m7wTd1OKN0BHGLEHt4; io=uqljJkFKOYy_3X_QAAlQ' -H 'Connection: keep-alive' -H 'DNT: 1' --data-binary '{"flag":"$1"}' --compressed
sleep 6

125
tools/ctf/transpose.py Normal file
View File

@@ -0,0 +1,125 @@
#!/usr/bin/env python3
import pprint
import math
import itertools
try:
import tqdm
has_tqdm=True
except ImportError:
print("Install tqdm for Progressbar! (pip3 install tqdm)")
has_tqdm=False
secret="OUHRSTHFSOENOFETURFELIRFTSNEMOEEMELNTARETOKCAETBFIHFTTTNMEELEEOHYBAERORCRSEDNCEUUTHITOYRSTEDSBEIEOTNLRMOEFPOHHAYLAGXYISNIARAUABGBURILFERPEEHTECDINNDITHFFIEHTKESYTDHEREOALGNABSMWEHVEFSOAMETAOCRFTAHEOFSINAMEOTRNGRINTHFFIEHTIEGMELNTSTEOMCOHEOWTEWREAIDANHTRARARTEHEETVFIYREAHVSAONDPROSTRAEUOYCTTTHWISANMUHETENTIISEDHETSUSENTEITNG OOLEEB L"
col_key="EJALMVWUSTRPOBY" # (16)missing 1 char
row_key="GHPTYPAMTAPQRNDHD" # (21) missing 4 chars one of which is 'D'
col_alpha="ABCDEFGHIJKLMNOPQRSTUVWXYZ"
row_alpha="ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def cell_length(text_length,key_length):
return math.ceil(text_length/key_length)
def padded_length(text_length,key_length):
return cell_length(text_length,key_length)*key_length
def revert_key(enc_key):
return [x[0] for x in sorted(enumerate(enc_key), key=lambda x: x[1])]
def mosh(text,enc_key):
tmp=sorted(zip(text,enc_key), key=lambda x: x[1])
return [x[0] for x in tmp]
def cols(text,key_length):
# col_length=cell_length(len(text),key_length)
columns=[ "" for i in range(0,key_length) ]
cursor=0
for c in text:
columns[cursor%key_length]+=c
cursor += 1
return columns
def rows(text,key_length):
# row_length=math.ceil(len(text)/key_length)
rows=[text[i:i+key_length] for i in range(0,len(text),key_length)]
return rows
def cols_to_str(a):
max_length=max([len(i) for i in a] )
result=""
for i in range(0,max_length):
for x in a:
try:
result+=x[i]
except:
pass
return result
def rows_to_str(a):
return "".join(a)
def pcols(a):
print("COLUMS:")
text=cols_to_str(a)
split_text=rows(text,len(a))
for x in split_text:
print(x)
def prows(a,header=None):
print("ROWS:")
counter=0
for x in a:
if header:
heading="{}".format(header[counter]).ljust(5)
else:
heading="{}".format(counter).ljust(5)
counter+=1
print("%s : %s"%(heading,x))
def encode(text,key):
text=text.ljust(padded_length(len(text),len(key)),'_')
columnized_text=cols(text,len(key))
shuffled_colums=mosh(columnized_text,key)
return rows_to_str(shuffled_colums)
def decode(text,key):
row_data=rows(text,cell_length(len(text), len(key)))
reorderd=mosh(row_data,revert_key(key))
return cols_to_str(reorderd)
def get_col_keys():
for x in col_alpha:
yield col_key+x
def get_row_keys():
for x in row_alpha:
for y in row_alpha:
for z in row_alpha:
# for d in row_alpha:
# yield(row_key+d+x+y+z)
yield(row_key+"D"+x+y+z)
yield(row_key+x+"D"+y+z)
yield(row_key+x+y+"D"+z)
yield(row_key+x+y+z+"D")
def normalize_keys(key_generator):
k = [revert_key(revert_key(x)) for x in key_generator]
k.sort()
return list(k for k,_ in itertools.groupby(k))
def decryptor():
rowkeys=normalize_keys(get_row_keys())
colkeys=normalize_keys(get_col_keys())
if has_tqdm:
pbar=tqdm.tqdm(total=(len(rowkeys)*len(colkeys)))
with open("normalized2.txt",'w') as f:
for col_key in colkeys:
for row_key in rowkeys:
text=encode(encode(secret,col_key),row_key)
f.write("{};{};{}\n".format(row_key,col_key,text))
if has_tqdm:
pbar.update(1)
if has_tqdm:
pbar.close()
decryptor()

50
tools/data/json_save.py Executable file
View File

@@ -0,0 +1,50 @@
import simplejson
import json
def put(data, filename):
try:
jsondata = simplejson.dumps(data, indent=4, skipkeys=True, sort_keys=True)
fd = open(filename, 'w')
fd.write(jsondata)
fd.close()
except Exception as e:
print('ERROR writing', filename)
print( e)
pass
def get(filename):
returndata = {}
try:
fd = open(filename, 'r')
text = fd.read()
fd.close()
returndata = json.read(text)
# Hm. this returns unicode keys...
#returndata = simplejson.loads(text)
except:
print('COULD NOT LOAD:', filename)
return returndata
# print(mail.filename)
# print(mail.status)
# import gzip
# import json
#
# # writing
# with gzip.GzipFile(jsonfilename, 'w') as outfile:
# for obj in objects:
# outfile.write(json.dumps(obj) + '\n')
#
# # reading
# with gzip.GzipFile(jsonfilename, 'r') as isfile:
# for line in infile:
# obj = json.loads(line)
# # process obj
# picklefile=open("mails.dump",'wb')
# for mail in list_of_mail:
# pickle.dump(mail, picklefile )
#
# picklefile.close()

28
tools/data/kv_parse.py Executable file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env python3
import re
import json
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--preserve", action='store_true', help="preserve original logline in dict")
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
args = parser.parse_args()
data = args.infile.readlines()
kv_pat = re.compile('(?P<key>[^= ]+)=(?P<value>"[^"]+"|\S+)')
log=[]
for line in data:
line_dict={}
line = line.strip()
matches=kv_pat.findall(line)
for match in matches:
line_dict[match[0]] = match[1].strip('"')
if args.preserve:
line_dict['original_logline'] = line
log.append(line_dict)
json.dump(log,args.outfile)

9
tools/data/uniq.py Executable file
View File

@@ -0,0 +1,9 @@
#!/usr/bin/env python3
import sys
hashes=set()
for line in sys.stdin:
h = hash(line)
if not h in hashes:
hashes.add(h)
print(line,end="")

View File

@@ -0,0 +1,28 @@
#!/usr/bin/python
#
# Decode VBA Macro based on chr() obfuscation
# Xavier Mertens <xavier@rootshell.be>
#
import re
import sys
import argparse
def do_chr(m):
if m.group(0):
return eval(re.sub(r'[cC][hH][rR][wW\$]*\(([\d\+\-\s.]*)\)',r'chr(int(\1))', m.group(0)))
return ""
for line in sys.stdin.readlines():
line = re.sub(r'[cC][hH][rR][wW\$]*\(([\d+\+\-\s\.]*)\)', do_chr, line)
line = re.sub(" & ", "", line)
print line.rstrip()
exit
if __name__ == '__main__':
main()
def mname(self, arg):
do_chr(1);
pass

View File

@@ -0,0 +1,37 @@
import sqlite3
import sys
import re
dbfile=sys.argv[1]
# dbfile="/home/skyhawk/Documents/test.db"
try:
db=sqlite3.connect(dbfile)
cur = db.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;")
tables=cur.fetchall()
# for row in db.execute("pragma table_info('sqlite_master')").fetchall():
# print(row)
nice_tables={}
for table in tables:
# print(table)
nice_rows=[]
for row in db.execute("pragma table_info(" + str(table[0]) +")").fetchall():
# print(row[1])
if re.match('hash|pass',row[1], re.IGNORECASE):
nice_rows.append(row[1])
if len(nice_rows) > 0:
nice_tables[table[0]]=nice_rows
except Exception as e:
# print("Error opening DB %s" % dbfile)
# sys.std.write(e)
exit(1)
print("[+] %s is Valid DB " % dbfile)
if len(nice_tables)>0:
for tab in nice_tables:
print(nice_tables[tab])
db.close()

View File

@@ -0,0 +1,23 @@
import subprocess
import sys
image=sys.argv[1]
inode=sys.argv[2]
output = subprocess.check_output(f"fls -F {image} {inode}", shell=True)
output=output.decode()
result = {}
for row in output.split('\n'):
if ':' in row:
key, value = row.split(':')
idx = key.split(" ")[-1]
fsid = idx.split("-")[0]
result[fsid] = value.strip()
for fsid in result:
print(f"Writing Inode {fsid} -> {result[fsid]} ")
outfile=open(result[fsid],'w')
subprocess.run(["icat", image, fsid],stdout=outfile)

272
tools/forensics/process_leak.py Executable file
View File

@@ -0,0 +1,272 @@
#!/usr/bin/python3
import os
import re
import mmh3
import string
import sys
from os import walk
from chardet.universaldetector import UniversalDetector
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk
from multiprocessing import Pool,Lock
import multiprocessing
import hashlib
import json
import argparse
lock = Lock()
def log_to_file(text):
global log_filename
with lock: # thread blocks at this line until it can obtain lock
with open(log_filename, 'a+') as file_log:
file_log.write("{}\n".format(text))
def log_to_console(text):
ps=multiprocessing.current_process()
with lock: # thread blocks at this line until it can obtain lock
print("[{}]:{}".format(ps.pid,text))
def get_mask(s):
mask = ""
for c in s:
if c.isdigit():
mask += "?d"
elif c.islower():
mask += "?l"
elif c.isupper():
mask += "?u"
else:
mask += "?s"
return mask
def check_special(s):
for c in s:
if c in string.punctuation or c.isspace():
return True
return False
def check_upper(s):
return any(i.isupper() for i in s)
def check_lower(s):
return any(i.islower() for i in s)
def check_digit(s):
return any(i.isdigit() for i in s)
# list all files in dir
def get_file_enconding(file):
detector = UniversalDetector()
with open(file, 'rb') as daf:
i = 1000
for line in daf.readlines():
i -= 1
detector.feed(line)
if detector.done or i == 0:
break
detector.close()
r = detector.result
return r["encoding"]
patter = re.compile("([^@]+)@([^@]+\.[^@]+)(\s|:|;)(.*)")
def extract_email(line):
global patter
match = patter.search(line)
if match:
res = (match.group(1), match.group(2), match.group(4))
return (res)
else:
return None
def strip_badbytes(b, encoding):
return (b.decode(encoding, errors='ignore')).strip()
def get_files(dir):
files_in_log={}
global threshold
try:
with open(log_filename,'r') as file_log:
for line in file_log.readlines():
try:
filedata=line.split(";")
files_in_log[filedata[0]]=float(filedata[1])
except:
log_to_console("Can't parse Line")
pass
except:
log_to_console("Can't open Logfile")
pass
for (dirpath, dirnames, filenames) in walk(dir):
for file in filenames:
full_filename=os.path.join(dirpath, file)
if full_filename in files_in_log and files_in_log[full_filename] > threshold:
log_to_console('[~] Skipping file [Already Parsed]: %s' % full_filename)
continue
yield full_filename
def get_lines(file,encoding=None):
if not encoding:
encoding = get_file_enconding(file)
with open(file, 'rb') as f:
return [strip_badbytes(line, encoding) for line in f]
# for line in f:
# yield (strip_badbytes(line, encoding))
def get_parsable_lines(file,encoding):
global log_filename
success = 0 # initialized with 1 to preven div/0
failure = 0
for line in get_lines(file,encoding):
doc = extract_email(line)
if doc:
success += 1
yield doc
else:
failure += 1
success_rate = (success / (success + failure))
log_to_console('[+] Done parsing file: {} ({})'.format(file,success_rate))
log_to_file("{};{}".format(file, success_rate))
def get_hash(text):
hash_object = hashlib.md5(text.encode())
return hash_object.hexdigest()
# return hex(mmh3.hash(text, 12, signed=False)).split("x")[1]
def get_user_pw_hash(text):
return get_hash(text)
# return hex(mmh3.hash128(text, 12,signed=False) % 1000000000000000).split("x")[1]
def create_doc(file,encoding):
for cred in get_parsable_lines(file,encoding):
doc = {
"user" : cred[0],
"domain" : cred[1],
"password" : cred[2][:129],
"file" : file,
"length" : len(cred[2]),
"passwordMask" : get_mask(cred[2]),
"containsDigits" : check_digit(cred[2]),
"containsLowerCase" : check_lower(cred[2]),
"containsUpperCase" : check_upper(cred[2]),
"containsSpecial" : check_special(cred[2])
}
username_split=cred[0].split(";")
if len(username_split)==2:
if len(username_split[0]) > 0 and len(username_split[1]) > 0:
doc["username"]=username_split[0]
doc["user"]=username_split[1]
id_hash=get_user_pw_hash("{}{}{}".format(doc["user"],doc["domain"],doc["password"]))
id_domain=id_hash[:1]
yield id_domain, id_hash, doc
def process_file(input_file,encoding):
global index, doc_type_name
for id_domain, id_hash, doc in create_doc(input_file,encoding):
yield {
"_index": "{}_{}".format(index,id_domain),
"_type": doc_type_name,
"_id": id_hash,
"_source": doc
}
def index_file(input_file):
encoding=get_file_enconding(input_file)
if encoding:
es = Elasticsearch(["172.16.1.141"],http_compress=True)
# count = es.count(index=index, doc_type=doc_type_name, body={ "query": {"match_all" : { }}})
# pre=count["count"]
log_to_console('[*] Indexing file: {}'.format(input_file))
try:
success, _ = bulk(es, process_file(input_file,encoding), chunk_size=10000, initial_backoff=60, max_retries=3, request_timeout=60, raise_on_error=False, raise_on_exception=True)
log_to_console('[!] Indexing done: {} [{} lines committed]'.format(input_file,success))
except Exception as e:
log_to_console('[!] Indexing failed for: {}\n[!] REASON:{}'.format(input_file,str((e.errors[0]))))
# count = es.count(index=index, doc_type=doc_type_name, body={ "query": {"match_all" : { }}})
# post=count["count"]
# log_to_console('[{}:=] Added {} Documents with {}'.format(ps.pid,post-pre,input_file))
else:
log_to_console('[~] Skipping file [Unknown Encoding]: {}'.format(input_file))
def bench_file(input_file):
ps=multiprocessing.current_process()
encoding=get_file_enconding(input_file)
devnull=open(os.devnull,'w')
if encoding:
es = Elasticsearch()
# count = es.count(index=index, doc_type=doc_type_name, body={ "query": {"match_all" : { }}})
# pre=count["count"]
log_to_console('[{}:*] Benching file: {}'.format(ps.pid,input_file))
docs=0
try:
# success, _ = bulk(es, process_file(input_file,encoding), chunk_size=1000, request_timeout=60, raise_on_error=False, raise_on_exception=False)
for doc in process_file(input_file,encoding):
docs+=1
devnull.write(json.dumps(doc))
log_to_console('[{}:*] Benching Done: {} [processed {} docs]'.format(ps.pid,input_file,docs))
except Exception as e:
log_to_console('[{}:!] Benching failed for: {}\n[{}:!] REASON: {}'.format(ps.pid,input_file,e.message))
# count = es.count(index=index, doc_type=doc_type_name, body={ "query": {"match_all" : { }}})
# post=count["count"]
# log_to_console('[{}:=] Added {} Documents with {}'.format(ps.pid,post-pre,input_file))
else:
log_to_console('[{}:~] Skipping file [Unknown Encoding]: {}'.format(ps.pid,input_file))
index=""
doc_type_name = "credential"
log_filename = "processed_files"
threshold = -1 #threshold for reparsing an already parsed file
def main():
global index
parser = argparse.ArgumentParser(description="Put Leakdata into local Elasticsearch")
parser.add_argument("-p",help="how many workers (default:4)",default=4,type=int,nargs='?')
parser.add_argument("-i",help="index suffix",default="leak_data")
parser.add_argument("-b",help="dont write to es just benchmark",action='store_true')
parser.add_argument('folder')
args = parser.parse_args()
index=args.i
workers=args.p
dir=args.folder
p=Pool(workers)
if args.b:
p.map(bench_file,get_files(dir))
else:
p.map(index_file,get_files(dir))
if __name__ == '__main__':
main()

59
tools/formats/convert2pdf.sh Executable file
View File

@@ -0,0 +1,59 @@
#!/bin/bash
# Check if a file argument is provided
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <file-to-convert>"
exit 1
fi
# Assign the file argument to a variable
FILE_TO_CONVERT=$1
FILE_NAME=$(basename "$FILE_TO_CONVERT")
# Define the Gotenberg Docker image
GOTENBERG_IMAGE="gotenberg/gotenberg:8"
# Start the Gotenberg Docker container in the background
docker run --rm -d -p 3000:3000 --name gotenberg $GOTENBERG_IMAGE
# Function to stop the Docker container
function stop_container {
docker stop gotenberg
}
# Register the stop_container function to be called on script exit
trap stop_container EXIT
# Wait for a moment to ensure that the container is up and running
sleep 3
# Determine the correct API endpoint based on the file extension
EXTENSION="${FILE_TO_CONVERT##*.}"
API_ENDPOINT="/forms/libreoffice/convert"
# Special handling for HTML files to use Chromium conversion
if [ "$EXTENSION" == "html" ]; then
API_ENDPOINT="/forms/chromium/convert/html"
# Ensure the HTML file is named index.html
if [ "$FILE_NAME" != "index.html" ]; then
echo "Renaming $FILE_NAME to index.html for conversion"
cp "$FILE_TO_CONVERT" "/tmp/index.html"
FILE_TO_CONVERT="/tmp/index.html"
fi
fi
# Convert the file to PDF using the Gotenberg API
curl --request POST \
--url http://localhost:3000$API_ENDPOINT \
--header 'Content-Type: multipart/form-data' \
--form files=@"$FILE_TO_CONVERT" \
-o converted.pdf
# Check if the conversion was successful
if [ ! -f converted.pdf ]; then
echo "Failed to convert the file to PDF."
exit 3
fi
# Display the PDF with Evince
evince converted.pdf &

16
tools/formats/flatpdf.sh Executable file
View File

@@ -0,0 +1,16 @@
#!/bin/bash
if ! which zathura 1>/dev/null 2>&1 ; then
echo "zathura pdf viewer not found"
echo "sudo apt install zathura"
exit 1
fi
if ! which docker 1>/dev/null 2>&1 ; then
echo "docker not found"
echo "sudo apt install docker.io"
exit 1
fi
if [[ -f "${1}" ]] ; then
cat "${1}" | docker run -i --rm tabledevil/flatpdf | zathura -
fi

View File

@@ -1,56 +0,0 @@
package main
import (
"encoding/csv"
"encoding/json"
"flag"
"fmt"
"log"
"os"
)
func csvToJson(inputSource *os.File) {
csvReader := csv.NewReader(inputSource)
headers, err := csvReader.Read()
if err != nil {
log.Fatal("Failed to read headers: ", err)
}
for {
record, err := csvReader.Read()
if err != nil {
if err.Error() == "EOF" {
break
}
log.Fatal("Failed to read the data: ", err)
}
if len(record) == len(headers) {
jsonData := make(map[string]string)
for i, value := range record {
jsonData[headers[i]] = value
}
jsonOutput, err := json.Marshal(jsonData)
if err != nil {
log.Fatal("Failed to convert to json: ", err)
}
fmt.Println(string(jsonOutput))
} else {
log.Fatal("The number of columns in the record is not equal to the number of headers")
}
}
}
func main() {
inputSource := os.Stdin
flag.Parse()
var err error
if flag.NArg() > 0 {
inputSource, err = os.Open(flag.Args()[0])
if err != nil {
log.Fatalf("Failed to open file: %v\n", err)
}
defer inputSource.Close()
}
csvToJson(inputSource)
}

View File

@@ -1,140 +0,0 @@
package main
import (
"bufio"
"flag"
"fmt"
"log"
"os"
"regexp"
"strconv"
"strings"
)
func printUsage() {
fmt.Fprintf(os.Stderr, "Usage: %s [options] <range specification> [file]\n", os.Args[0])
fmt.Fprintln(os.Stderr, "Options:")
flag.PrintDefaults()
fmt.Fprintln(os.Stderr, "\nRange specification examples:")
fmt.Fprintln(os.Stderr, "10,20 Reads lines 10 to 20 from the input")
fmt.Fprintln(os.Stderr, "15:+5 Reads 5 lines starting from line 15")
fmt.Fprintln(os.Stderr, "3 Reads from line 3 to the end of the file")
fmt.Fprintln(os.Stderr, "+2 Reads the first 2 lines")
fmt.Fprintln(os.Stderr, "Please ensure you input valid range specifications and file paths.")
os.Exit(1)
}
func parseArgs(args []string) (int, int, string, error) {
var filename string
var start, length, end int
found := false
// Concatenate all arguments to a single string for easier parsing
joinedArgs := strings.Join(args, " ")
// Regular expression to match ranges and numbers
rangeRegex := regexp.MustCompile(`((\d+)?([ :,;-]))(\+)?(\d+)`)
range2Regex := regexp.MustCompile(`((\d+)([ :,;-]))`)
range3Regex := regexp.MustCompile(`(\+)?(\d+)`)
matches := rangeRegex.FindStringSubmatch(joinedArgs)
if matches != nil {
//check if start was defined
if matches[2] != "" {
start, _ = strconv.Atoi(matches[2]) // Convert start line to integer
} else {
start = 1
}
if matches[4] == "+" { // Check if it's a relative length
length, _ = strconv.Atoi(matches[5]) // Convert length to integer
end = start + length - 1
} else {
end, _ = strconv.Atoi(matches[5]) // Convert end line to integer
}
// Remove the matched part from the arguments
joinedArgs = strings.Replace(joinedArgs, matches[0], "", 1)
found = true
} else {
matches = range2Regex.FindStringSubmatch(joinedArgs)
if matches != nil {
start, _ = strconv.Atoi(matches[2]) // Convert start line to integer
end = -1
// Remove the matched part from the arguments
joinedArgs = strings.Replace(joinedArgs, matches[0], "", 1)
found = true
} else {
matches = range3Regex.FindStringSubmatch(joinedArgs)
if matches != nil {
if matches[1] == "+" { // Check if it's a relative length
length, _ := strconv.Atoi(matches[2]) // Convert length to integer
start = 1
end = start + length - 1
} else { // Otherwise convert it to an absolute length
start, _ = strconv.Atoi(matches[2]) // Convert start line to integer
end = -1
// Remove the matched part from the arguments
}
joinedArgs = strings.Replace(joinedArgs, matches[0], "", 1)
found = true
} else {
found = false
printUsage()
}
}
}
// Clean up and identify the filename, if present
joinedArgs = strings.TrimSpace(joinedArgs)
if joinedArgs != "" && !found {
// If we didn't find numbers, interpret the remaining as filename
filename = joinedArgs
} else if joinedArgs != "" {
// Otherwise, interpret any non-empty remaining part as filename
filename = joinedArgs
}
if !found {
return 0, 0, "", fmt.Errorf("no valid range or line number found")
}
return start, end, filename, nil
}
func main() {
flag.Parse()
args := flag.Args()
if len(args) < 1 {
printUsage()
os.Exit(1)
}
startIndex, endIndex, filename, err := parseArgs(args)
if err != nil {
fmt.Fprintf(os.Stderr, "Error parsing arguments: %v\n", err)
os.Exit(1)
}
// Detemine the input source (file or stdin)
inputSource := os.Stdin
if filename != "" {
inputSource, err = os.Open(filename)
if err != nil {
log.Fatalf("Failed to open file: %v\n", err)
}
defer inputSource.Close()
}
scanner := bufio.NewScanner(inputSource)
//print all lines from line buner start to linenumber end unless linenumber end is -1
for i := 1; scanner.Scan(); i++ {
if startIndex <= i && (i <= endIndex || endIndex == -1) {
fmt.Println(scanner.Text())
}
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}

View File

@@ -1,3 +0,0 @@
module git.ktf.ninja/tabledevil/goinfo
go 1.22.5

View File

@@ -1,52 +0,0 @@
package main
import (
"fmt"
"log"
"net"
"os"
"os/user"
)
func getHostname() string {
hostname, err := os.Hostname()
if err != nil {
log.Println("Could not retrieve Hostname")
return ""
} else {
return hostname
}
}
func getUsernameDomain() string {
user, err := user.Current()
if err != nil {
log.Fatal(err)
}
return user.Username
}
func getIP(hostname string) string {
addrs, err := net.LookupIP(hostname)
if err != nil {
log.Fatal(err)
}
return addrs[0].String()
}
func getGroups() []int {
groups, err := os.Getgroups()
if err != nil {
log.Fatal(err)
}
return groups
}
func main() {
fmt.Println("hostname: ", getHostname())
fmt.Println("username: ", getUsernameDomain())
fmt.Println("groups: ", getGroups())
fmt.Println("google: ", getIP("www.google.de"))
}

Binary file not shown.

View File

@@ -1,376 +0,0 @@
// ipgrep.go
package main
import (
"bytes"
"encoding/csv"
"encoding/json"
"flag"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"os/exec"
"regexp"
"sort"
"sync"
"time"
)
// IPInfo holds the data we want from ipinfo.io
type IPInfo struct {
IP string `json:"ip"`
Hostname string `json:"hostname"`
City string `json:"city"`
Region string `json:"region"`
Country string `json:"country"`
Org string `json:"org"`
}
func main() {
// Command-line flags.
var (
sortFlag bool
uniqFlag bool
macFlag bool
pingable bool
resolveFlag bool
lookupFlag bool
fileName string
)
flag.BoolVar(&uniqFlag, "u", false, "only show uniq IPs/MACs (implies -s)")
flag.BoolVar(&sortFlag, "s", false, "sort output")
flag.BoolVar(&macFlag, "m", false, "grep MAC-IDs instead of IPs")
flag.BoolVar(&pingable, "p", false, "only show 'pingable' entries (MACs still beta)")
flag.BoolVar(&resolveFlag, "r", false, "resolve (uses host for ip and arping for mac)")
flag.BoolVar(&lookupFlag, "l", false, "lookup ip info using ipinfo.io and output CSV: ip,country,region,city,org,hostname")
flag.StringVar(&fileName, "f", "", "input file")
flag.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s [-u] [-s] [-m] [-p] [-r] [-l] [-f filename] [file...]\n", os.Args[0])
flag.PrintDefaults()
}
flag.Parse()
// If pingable is set, force sorting and uniqueness.
if pingable || lookupFlag {
sortFlag = true
uniqFlag = true
}
if lookupFlag && macFlag {
fmt.Fprintln(os.Stderr, "Lookup mode (-l) only works for IP addresses, not MAC addresses.")
os.Exit(1)
}
// Regular expressions for IPs or MACs.
var pattern string
if macFlag {
// Supports MAC formats: xx:xx:xx:xx:xx:xx or xxxx.xxxx.xxxx
pattern = `(([a-fA-F0-9]{2}[:-]){5}[a-fA-F0-9]{2})|([a-fA-F0-9]{4}\.[a-fA-F0-9]{4}\.[a-fA-F0-9]{4})`
} else {
// Matches valid IPv4 addresses.
pattern = `(((25[0-5])|(2[0-4][0-9])|([0-1]?\d?\d))\.){3}((25[0-5])|(2[0-4][0-9])|([0-1]?\d?\d))`
}
re, err := regexp.Compile(pattern)
if err != nil {
fmt.Fprintf(os.Stderr, "Error compiling regex: %v\n", err)
os.Exit(1)
}
// Read input from -f file, extra args, or stdin.
var inputData []byte
if fileName != "" {
inputData, err = ioutil.ReadFile(fileName)
if err != nil {
fmt.Fprintf(os.Stderr, "Error reading file %s: %v\n", fileName, err)
os.Exit(1)
}
} else if flag.NArg() > 0 {
var buf bytes.Buffer
for _, fname := range flag.Args() {
data, err := ioutil.ReadFile(fname)
if err != nil {
fmt.Fprintf(os.Stderr, "Error reading file %s: %v\n", fname, err)
continue
}
buf.Write(data)
buf.WriteByte('\n')
}
inputData = buf.Bytes()
} else {
inputData, err = io.ReadAll(os.Stdin)
if err != nil {
fmt.Fprintf(os.Stderr, "Error reading stdin: %v\n", err)
os.Exit(1)
}
}
// Filter matches using the regex.
matches := re.FindAllString(string(inputData), -1)
if matches == nil {
os.Exit(0)
}
if sortFlag {
sort.Strings(matches)
}
if uniqFlag {
matches = unique(matches)
}
if pingable {
matches = filterPingable(matches, macFlag)
if sortFlag {
sort.Strings(matches)
}
}
// If lookup flag is set, perform ipinfo.io lookups with caching.
if lookupFlag {
cache := loadCache()
var cacheMu sync.Mutex
results := lookupIPInfo(matches, cache, &cacheMu)
// Sort the results by IP.
sort.Slice(results, func(i, j int) bool {
return results[i].IP < results[j].IP
})
// Save the updated cache.
saveCache(cache)
// Output CSV using csv.Writer for proper CSV formatting.
w := csv.NewWriter(os.Stdout)
// Write header.
if err := w.Write([]string{"ip", "country", "region", "city", "org", "hostname"}); err != nil {
fmt.Fprintf(os.Stderr, "Error writing CSV header: %v\n", err)
os.Exit(1)
}
for _, info := range results {
record := []string{
info.IP,
info.Country,
info.Region,
info.City,
info.Org,
info.Hostname,
}
if err := w.Write(record); err != nil {
fmt.Fprintf(os.Stderr, "Error writing CSV record: %v\n", err)
os.Exit(1)
}
}
w.Flush()
if err := w.Error(); err != nil {
fmt.Fprintf(os.Stderr, "Error flushing CSV data: %v\n", err)
}
return
}
// If resolve flag is set, perform resolution.
if resolveFlag {
results := resolveEntries(matches, macFlag)
for _, r := range results {
fmt.Print(r)
if !macFlag {
fmt.Println()
}
}
return
}
// Otherwise, just output the matches.
for _, m := range matches {
fmt.Println(m)
}
}
// unique removes duplicate strings from a slice.
func unique(input []string) []string {
seen := make(map[string]struct{})
var result []string
for _, s := range input {
if _, ok := seen[s]; !ok {
seen[s] = struct{}{}
result = append(result, s)
}
}
return result
}
// filterPingable runs ping (or arping) concurrently on each entry.
func filterPingable(entries []string, mac bool) []string {
var wg sync.WaitGroup
var mu sync.Mutex
var result []string
for _, entry := range entries {
wg.Add(1)
go func(e string) {
defer wg.Done()
if isPingable(e, mac) {
mu.Lock()
result = append(result, e)
mu.Unlock()
}
}(entry)
}
wg.Wait()
return result
}
// isPingable tests if an entry is reachable using ping (or arping for MACs).
func isPingable(entry string, mac bool) bool {
var cmd *exec.Cmd
if mac {
cmd = exec.Command("arping", "-c", "1", "-w", "5000000", entry)
} else {
cmd = exec.Command("ping", "-c", "1", "-w", "1", entry)
}
cmd.Stdout = nil
cmd.Stderr = nil
err := cmd.Run()
return err == nil
}
// resolveEntries performs resolution via external commands.
func resolveEntries(entries []string, mac bool) []string {
var wg sync.WaitGroup
results := make([]string, len(entries))
for i, entry := range entries {
wg.Add(1)
go func(i int, e string) {
defer wg.Done()
if mac {
cmd := exec.Command("arping", "-q", "-c", "1", "-w", "5000000", e)
if err := cmd.Run(); err == nil {
cmd2 := exec.Command("arping", "-c", "1", e)
out, err := cmd2.CombinedOutput()
if err == nil {
results[i] = string(out)
} else {
results[i] = e + "\n"
}
} else {
results[i] = e + "\n"
}
} else {
cmd := exec.Command("host", e)
out, err := cmd.CombinedOutput()
if err == nil {
// Extract the hostname via regex (similar to grep -Po '(?<=pointer ).*')
reHost := regexp.MustCompile(`(?i)(?<=pointer\s)(\S+)`)
match := reHost.FindString(string(out))
if match != "" {
results[i] = fmt.Sprintf("%s %s", e, match)
} else {
results[i] = e
}
} else {
results[i] = e
}
}
}(i, entry)
}
wg.Wait()
return results
}
// lookupIPInfo queries ipinfo.io for each IP concurrently,
// checking a local cache before going to the network.
// It returns a slice of IPInfo.
func lookupIPInfo(entries []string, cache map[string]IPInfo, cacheMu *sync.Mutex) []IPInfo {
var wg sync.WaitGroup
var mu sync.Mutex
var results []IPInfo
client := &http.Client{
Timeout: 5 * time.Second,
}
for _, ip := range entries {
wg.Add(1)
go func(ip string) {
defer wg.Done()
// Check cache first.
cacheMu.Lock()
info, found := cache[ip]
cacheMu.Unlock()
if found {
mu.Lock()
results = append(results, info)
mu.Unlock()
return
}
// Not in cache; perform HTTP lookup.
url := fmt.Sprintf("https://ipinfo.io/%s", ip)
resp, err := client.Get(url)
if err != nil {
return
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return
}
var newInfo IPInfo
if err := json.Unmarshal(body, &newInfo); err != nil {
return
}
// Only add valid responses.
if newInfo.IP == "" {
return
}
// Update cache.
cacheMu.Lock()
cache[ip] = newInfo
cacheMu.Unlock()
mu.Lock()
results = append(results, newInfo)
mu.Unlock()
}(ip)
}
wg.Wait()
return results
}
// loadCache reads the cache from ~/.ipgrep.db (if present)
// and returns it as a map[string]IPInfo.
func loadCache() map[string]IPInfo {
home, err := os.UserHomeDir()
if err != nil {
return make(map[string]IPInfo)
}
cachePath := home + "/.ipgrep.db"
data, err := ioutil.ReadFile(cachePath)
if err != nil {
// File doesn't exist or can't be read, start with an empty cache.
return make(map[string]IPInfo)
}
var cache map[string]IPInfo
if err := json.Unmarshal(data, &cache); err != nil {
// If unmarshal fails, use an empty cache.
return make(map[string]IPInfo)
}
return cache
}
// saveCache writes the cache map to ~/.ipgrep.db.
func saveCache(cache map[string]IPInfo) {
home, err := os.UserHomeDir()
if err != nil {
return
}
cachePath := home + "/.ipgrep.db"
data, err := json.MarshalIndent(cache, "", " ")
if err != nil {
fmt.Fprintf(os.Stderr, "Error marshaling cache: %v\n", err)
return
}
if err := ioutil.WriteFile(cachePath, data, 0644); err != nil {
fmt.Fprintf(os.Stderr, "Error writing cache file: %v\n", err)
}
}

Binary file not shown.

View File

@@ -1,502 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"log"
"os/exec"
"regexp"
"runtime"
"strings"
"time"
)
// Software holds information about installed software
type Software struct {
Source string `json:"source"`
Name string `json:"name"`
Version string `json:"version"`
InstallDate string `json:"install_date"`
}
func main() {
var softwareList []Software
if runtime.GOOS == "windows" {
softwareList = append(softwareList, enumerateWindows()...)
} else if runtime.GOOS == "linux" {
softwareList = append(softwareList, enumerateLinux()...)
}
output, err := json.MarshalIndent(softwareList, "", " ")
if err != nil {
log.Println("Error marshalling software list:", err)
return
}
fmt.Println(string(output))
}
// Add to the enumerateWindows function
func enumerateWindows() []Software {
var softwareList []Software
// Winget
softwareList = append(softwareList, enumerateWinget()...)
// Chocolatey
softwareList = append(softwareList, enumerateChocolatey()...)
// Scoop
softwareList = append(softwareList, enumerateScoop()...)
// Common Windows Installations
softwareList = append(softwareList, enumerateCommonWindows()...)
// NuGet
softwareList = append(softwareList, enumerateNuGet()...)
// Microsoft Store
softwareList = append(softwareList, enumerateMicrosoftStore()...)
return softwareList
}
// Add to the enumerateLinux function
func enumerateLinux() []Software {
var softwareList []Software
// APT
softwareList = append(softwareList, enumerateApt()...)
// Snap
softwareList = append(softwareList, enumerateSnap()...)
// Pip
softwareList = append(softwareList, enumeratePip()...)
// NPM
softwareList = append(softwareList, enumerateNpm()...)
// Flatpak
softwareList = append(softwareList, enumerateFlatpak()...)
// RPM
softwareList = append(softwareList, enumerateRpm()...)
// Pacman
softwareList = append(softwareList, enumeratePacman()...)
// Homebrew
softwareList = append(softwareList, enumerateHomebrew()...)
// Conda
softwareList = append(softwareList, enumerateConda()...)
return softwareList
}
func enumerateNuGet() []Software {
var softwareList []Software
output, err := runCommand("nuget", "list", "-AllVersions")
if err != nil {
log.Println("Error enumerating NuGet packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "nuget",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateMicrosoftStore() []Software {
var softwareList []Software
output, err := runCommand("powershell", "Get-AppxPackage", "|", "Select-Object", "Name,PackageFullName,InstallDate")
if err != nil {
log.Println("Error enumerating Microsoft Store packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 3 {
continue
}
softwareList = append(softwareList, Software{
Source: "microsoft store",
Name: parts[0],
Version: parts[1],
InstallDate: parseInstallDate(parts[2]),
})
}
return softwareList
}
func enumerateFlatpak() []Software {
var softwareList []Software
output, err := runCommand("flatpak", "list")
if err != nil {
log.Println("Error enumerating Flatpak packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "flatpak",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateRpm() []Software {
var softwareList []Software
output, err := runCommand("rpm", "-qa", "--queryformat", "%{NAME} %{VERSION}\n")
if err != nil {
log.Println("Error enumerating RPM packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "rpm",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumeratePacman() []Software {
var softwareList []Software
output, err := runCommand("pacman", "-Q")
if err != nil {
log.Println("Error enumerating Pacman packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "pacman",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateHomebrew() []Software {
var softwareList []Software
output, err := runCommand("brew", "list", "--versions")
if err != nil {
log.Println("Error enumerating Homebrew packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "homebrew",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateConda() []Software {
var softwareList []Software
output, err := runCommand("conda", "list", "--json")
if err != nil {
log.Println("Error enumerating Conda packages:", err)
return softwareList
}
var condaPackages []map[string]interface{}
err = json.Unmarshal([]byte(output), &condaPackages)
if err != nil {
log.Println("Error parsing Conda JSON output:", err)
return softwareList
}
for _, pkg := range condaPackages {
softwareList = append(softwareList, Software{
Source: "conda",
Name: pkg["name"].(string),
Version: pkg["version"].(string),
})
}
return softwareList
}
func runCommand(cmd string, args ...string) (string, error) {
out, err := exec.Command(cmd, args...).Output()
if err != nil {
return "", err
}
return string(out), nil
}
func parseInstallDate(dateString string) string {
layout := "2006-01-02"
date, err := time.Parse(layout, dateString)
if err != nil {
return ""
}
return date.Format(layout)
}
func enumerateWinget() []Software {
var softwareList []Software
output, err := runCommand("winget", "list", "--source", "winget")
if err != nil {
log.Println("Error enumerating Winget packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := regexp.MustCompile(`\s+`).Split(line, -1)
if len(parts) < 4 {
continue
}
softwareList = append(softwareList, Software{
Source: "winget",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateChocolatey() []Software {
var softwareList []Software
output, err := runCommand("choco", "list", "--local-only")
if err != nil {
log.Println("Error enumerating Chocolatey packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Split(line, "|")
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "chocolatey",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateScoop() []Software {
var softwareList []Software
output, err := runCommand("scoop", "list")
if err != nil {
log.Println("Error enumerating Scoop packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
if strings.HasPrefix(line, "Installed apps") {
continue
}
parts := strings.Fields(line)
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "scoop",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateCommonWindows() []Software {
var softwareList []Software
output, err := runCommand("powershell", "Get-WmiObject", "-Class", "Win32_Product", "|", "Select-Object", "Name,Version,InstallDate")
if err != nil {
log.Println("Error enumerating common Windows installations:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 3 {
continue
}
softwareList = append(softwareList, Software{
Source: "common windows installation",
Name: parts[0],
Version: parts[1],
InstallDate: parseInstallDate(parts[2]),
})
}
return softwareList
}
func enumerateApt() []Software {
var softwareList []Software
output, err := runCommand("dpkg-query", "-W", "-f=${binary:Package} ${Version} ${Installed-Size}\n")
if err != nil {
log.Println("Error enumerating APT packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 2 {
continue
}
softwareList = append(softwareList, Software{
Source: "apt",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumerateSnap() []Software {
var softwareList []Software
output, err := runCommand("snap", "list")
if err != nil {
log.Println("Error enumerating Snap packages:", err)
return softwareList
}
lines := strings.Split(output, "\n")
for _, line := range lines {
parts := strings.Fields(line)
if len(parts) < 3 {
continue
}
softwareList = append(softwareList, Software{
Source: "snap",
Name: parts[0],
Version: parts[1],
})
}
return softwareList
}
func enumeratePip() []Software {
var softwareList []Software
output, err := runCommand("pip", "list", "--format=json")
if err != nil {
log.Println("Error enumerating Pip packages:", err)
return softwareList
}
var pipPackages []map[string]string
err = json.Unmarshal([]byte(output), &pipPackages)
if err != nil {
log.Println("Error parsing Pip JSON output:", err)
return softwareList
}
for _, pkg := range pipPackages {
softwareList = append(softwareList, Software{
Source: "pip",
Name: pkg["name"],
Version: pkg["version"],
})
}
return softwareList
}
func enumerateNpm() []Software {
var softwareList []Software
output, err := runCommand("npm", "list", "-g", "--depth=0", "--json")
if err != nil {
log.Println("Error enumerating NPM packages:", err)
return softwareList
}
var npmPackages map[string]interface{}
err = json.Unmarshal([]byte(output), &npmPackages)
if err != nil {
log.Println("Error parsing NPM JSON output:", err)
return softwareList
}
dependencies := npmPackages["dependencies"].(map[string]interface{})
for name, info := range dependencies {
infoMap := info.(map[string]interface{})
version := infoMap["version"].(string)
softwareList = append(softwareList, Software{
Source: "npm",
Name: name,
Version: version,
})
}
return softwareList
}

View File

@@ -1,75 +0,0 @@
package main
import (
"bufio"
"flag"
"fmt"
"hash/fnv"
"log"
"os"
)
func hashLine(s string) uint32 {
hasher := fnv.New32a()
hasher.Write([]byte(s))
return hasher.Sum32()
}
func main() {
// Define command line flags
reverse := flag.Bool("d", false, "Print only lines that appear more than once.")
help := flag.Bool("h", false, "Display help and usage information.")
flag.Usage = func() {
fmt.Fprintf(flag.CommandLine.Output(), "Usage of %s:\n", os.Args[0])
fmt.Println("This program reads from a file or standard input, deduplicates lines, and outputs the results.")
fmt.Println("Options:")
flag.PrintDefaults()
fmt.Println("Example usage:")
fmt.Println("\t", os.Args[0], "[options] [filename]")
fmt.Println("\t", os.Args[0], "-d filename # Only print duplicates")
fmt.Println("\t", "cat /some/text/file |", os.Args[0], "# Read from standard input")
}
flag.Parse()
// Check for help flag
if *help {
flag.Usage()
os.Exit(0)
}
// Detemine the input source (file or stdin)
inputSource := os.Stdin
var err error
if flag.NArg() > 0 {
inputSource, err = os.Open(flag.Args()[0])
if err != nil {
log.Fatalf("Failed to open file: %v\n", err)
}
defer inputSource.Close()
}
seenLines := make(map[uint32]int)
scanner := bufio.NewScanner(inputSource)
//Readin lines
for scanner.Scan() {
line := scanner.Text()
hash := hashLine(line)
seenLines[hash]++
if *reverse {
// Print only lines that appear more than once
if seenLines[hash] > 1 {
fmt.Println(line)
}
} else {
// Normal mode, print only unique lines
if seenLines[hash] == 1 {
fmt.Println(line)
}
}
}
//Check for errors during scanning
if err := scanner.Err(); err != nil {
log.Fatalf("Failed to read input: %v\n", err)
}
}

15
tools/hashing/hashzip.py Normal file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env python3
import zipfile
import sys
import hashlib
zip_file_name = sys.argv[1]
with zipfile.ZipFile(zip_file_name, 'r') as zf:
print(f"======== Filelisting for {zip_file_name} ========")
for file_info in zf.filelist:
date_time = file_info.date_time
with zf.open(file_info) as zip_file:
content = zip_file.read()
md5 = hashlib.md5(content).hexdigest()
print(f"{file_info.filename} ({file_info.file_size}) {md5} {date_time[0]}/{date_time[1]:02}/{date_time[2]:02} {date_time[3]:02}:{date_time[4]:02}:{date_time[5]:02}")

32
tools/network/fritzshark.sh Executable file
View File

@@ -0,0 +1,32 @@
#!/bin/sh
# Stolen from Hippie2000 and modified by Jackfritt ;)
# Stolen from Jackfritt and modified by Chaosmaster :-P
ipddr="fritz.box"
unset dumpfile
unset passwd
if [ "$1" = "-h" ] || [ "$1" = "--help" ] || [ ! $1 ]; then
echo "Usage: $0 <PASSWORD> [<DUMPFILE>] [<IP>]"
exit 1
fi
[ $1 ] && passwd=$1
[ $2 ] && dumpfile="-w $2"
[ $3 ] && ipaddr=$3
# Challenge abholen
ChallengeXML=`wget -O - "http://$ipddr/cgi-bin/webcm?getpage=../html/login_sid.xml" 2>/dev/null| grep Challenge`
Challenge=`echo $ChallengeXML | awk '{match($0,/>[^<>]+</); print substr($0,RSTART+1,RLENGTH-2)}'`
# login aufbauen und hashen
CPSTR="$Challenge-$passwd"
MD5=`echo -n $CPSTR | iconv -f ISO8859-1 -t UTF-16LE | md5sum -b | awk '{print substr($0,1,32)}'`
RESPONSE="$Challenge-$MD5"
POSTDATA="login:command/response=$RESPONSE&getpage=../html/de/menus/menu2.html"
# login senden und SID herausfischen
SID=`wget -O - --post-data="$POSTDATA" "http://$ipddr/cgi-bin/webcm" 2>/dev/null| grep "name=\"sid\"" | head -1 | awk '{match($0,/value="[^"]+"/); print substr($0,RSTART+7,RLENGTH-8)}'`
# Internet Capture
wget -O - "http://$ipddr/cgi-bin/capture_notimeout?ifaceorminor=3-17 \
&snaplen=1600&capture=Start&sid=$SID" 2>/dev/null | \
wireshark -k $dumpfile -i -

30
tools/network/fritzshark2.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/sh
IP="fritz.box"
echo -n Password:
read -s Passwd
# Challenge abholen
Challenge=`wget -O - "http://$IP/login_sid.lua" 2>/dev/null | sed 's/.*<Challenge>\(.*\)<\/Challenge>.*/\1/'`
# login aufbauen und hashen
CPSTR="$Challenge-$Passwd"
MD5=`echo -n $CPSTR | iconv -f ISO8859-1 -t UTF-16LE | md5sum -b | awk '{print substr($0,1,32)}'`
RESPONSE="$Challenge-$MD5"
POSTDATA="?username=&response=$RESPONSE"
# login senden und SID herausfischen
SID=`wget -O - --post-data="$POSTDATA" "http://$IP/login_sid.lua" 2>/dev/null | sed 's/.*<SID>\(.*\)<\/SID>.*/\1/'`
# Internet Capture
#Schnittstelle 1(Internet)=3-17
#wget -O - "http://$IP/cgi-bin/capture_notimeout?ifaceorminor=3-17 \
#alle Schnittstellen =3-0
#wget -O - "http://$IP/cgi-bin/capture_notimeout?ifaceorminor=3-0 \
#&snaplen=1600&capture=Start&sid=$SID" 2>/dev/null | \
#tshark -i - -S -l -N nmtC
#wget -O - "http://$IP/cgi-bin/capture_notimeout?ifaceorminor=3-0 \
#Externe Schnittstelle
#wget -O - "http://$IP/cgi-bin/capture_notimeout?ifaceorminor=3-17 \
#Lokal LAN
#wget -O - "http://$IP/cgi-bin/capture_notimeout?ifaceorminor=1-eth0&snaplen=1600&capture=Start&sid=$SID" 2>/dev/null | tshark -i - -S -l -N nmtC
wget -O - "http://$IP/cgi-bin/capture_notimeout?ifaceorminor=1-eth0&snaplen=1600&capture=Start&sid=$SID" 2>/dev/null | tcpdump -r - -w /tmp/trace -W 48 -G 1800 -C 100 -K -n

113
tools/security/certwipe Executable file
View File

@@ -0,0 +1,113 @@
#!/bin/bash
###################Wipe (optional)
DEVICE=${1}
wipedelay=20
# Required packages
REQUIRED_PACKAGES=("hdparm" "dialog" "dc3dd" "util-linux")
# Check for missing packages
check_missing_packages()
{
for package in "${REQUIRED_PACKAGES[@]}"; do
if ! dpkg -s "${package}" >/dev/null 2>&1; then
echo "Wipe script requires the following packages:"
for p in "${REQUIRED_PACKAGES[@]}"; do
echo " ${p}"
done
exit 1
fi
done
}
# Get device from the user if not specified or invalid
get_device()
{
if [ -z "$DEVICE" ] || [ ! -b "$DEVICE" ]; then
# Create a list of available devices
W=()
while read -r line; do
dev=$(echo $line | cut -f1 -d" ")
rest=$(echo $line | cut -f2- -d" " | tr -s " ")
W+=("/dev/${dev}" "${rest}")
done < <(lsblk -l -oname,size,model,type | grep -e disk)
# Display device selection menu
DEVICE=$(dialog --backtitle "CERTBw - SecureErase" --title "Available Devices" --menu "Which disk should be wiped?" 24 80 17 "${W[@]}" 3>&2 2>&1 1>&3)
fi
}
# cleanup function to unset the ATA Password if execution gets interrupted
cleanup()
{
echo
echo "==WIPE : Removing ATA password due to user interruption..."
hdparm --user-master u --security-disable certbw "${DEVICE}"
echo "==WIPE : ATA password removed."
exit 1
}
# Display warning and countdown
display_warning()
{
dialog --backtitle "CERTBw - SecureErase" --defaultno --cancel-label "Cancel" --colors --title "\Z1!WARNING!\Zn" --pause "\n\Z1The device ${DEVICE} will be completely erased!\Zn\n\nThe SecureErase process must not be interrupted, as this will lock the device, and it will need to be manually unlocked afterward.\n\n\nThe process will automatically continue after the countdown expires.\n\nTo cancel the DiskWipe, you can:\n \Z4Select \"Cancel\"\n Press \"ESC\"\n Press \"CTRL + C\"\n Turn off the computer\Zn" 24 80 ${wipedelay}
if [ "$?" -gt 0 ]; then
echo "==WIPE : Wipe was canceled by the user."
sleep 1
read -p "Press [ENTER] key for Shell..."
exit 1
fi
}
# Securely erase the device
secure_erase()
{
if hdparm -I "${DEVICE}" | grep supported | grep -q erase; then
echo "==WIPE : Secure Erase is supported by ${DEVICE}"
if ! (hdparm -I "${DEVICE}" | grep not | grep -q frozen); then
echo "==WIPE : The device ${DEVICE} is frozen"
echo "==WIPE : The notebook will now be put to sleep for 10 seconds."
echo "==WIPE : Do not turn off the notebook."
sleep 5
rtcwake -s 10 -m mem
echo "==WIPE : The notebook has woken up. Checking the status of ${DEVICE}."
fi
if hdparm -I "${DEVICE}" | grep not | grep -q frozen; then
echo "==WIPE : The device ${DEVICE} is 'not frozen'"
echo
echo "==WIPE : A temporary ATA password (certbw) must be set for SecureErase."
echo "==WIPE : If the SecureErase process is interrupted, the disk will be unusable until manually unlocked."
echo "==WIPE : Do not turn off the notebook."
sleep 5
# Set a trap to catch SIGINT and call the cleanup function
trap 'cleanup' SIGINT
# Set ATA password
hdparm --user-master u --security-set-pass certbw "${DEVICE}"
# Issue Secure Erase command
hdparm --user-master u --security-erase certbw "${DEVICE}"
# Remove the trap after the Secure Erase is completed
trap - SIGINT
else
# Normal wipe because unfreeze didn't work
echo "==WIPE : The device could not be unfrozen."
echo "==WIPE : The device ${DEVICE} will be overwritten."
/usr/bin/dc3dd wipe="${DEVICE}"
fi
else
# Normal wipe because Secure Erase is not supported
echo "==WIPE : Secure Erase is NOT supported."
echo "==WIPE : The device ${DEVICE} will be overwritten."
/usr/bin/dc3dd wipe="${DEVICE}"
fi
}
check_missing_packages
get_device
if [ ! -b "${DEVICE}" ]; then
echo "==WIPE : Kein gültiges BLOCK-Device ausgewählt."
sleep 1
read -p "Press [ENTER] key for Shell..."
exit 1
fi
display_warning
secure_erase

5
tools/security/imphash.py Executable file
View File

@@ -0,0 +1,5 @@
#!/usr/bin/env python3
import pefile
import sys
pe=pefile.PE(sys.argv[1])
print(pe.get_imphash())

33
tools/security/scan_vt.py Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/python3
import requests
import sys
import hashlib
from os.path import expanduser
out_sep=';'
with open(expanduser('~/.virustotal_api_key')) as api_f:
api_key=api_f.readline().strip()
with open(sys.argv[1],'rb') as f:
hash=hashlib.md5(f.read())
params = {'apikey': api_key, 'resource': hash.hexdigest()}
headers = {
"Accept-Encoding": "gzip, deflate",
"User-Agent" : "gzip,python_requests,scan_vt.py"
}
response = requests.get('https://www.virustotal.com/vtapi/v2/file/report', params=params, headers=headers)
try:
json_response = response.json()
except:
print(response)
exit(1)
if json_response["response_code"]:
print("{}{}{}{}{}/{}{}{}".format(sys.argv[1],out_sep,hash.hexdigest(),out_sep,json_response["positives"],json_response["total"],out_sep,json_response["permalink"]))
else:
print("{}{}{}{}{}".format(sys.argv[1],out_sep,hash.hexdigest(),out_sep,out_sep))

View File

@@ -0,0 +1,47 @@
from scapy.all import srp, Ether, ARP
from threading import Thread
from ipaddress import IPv4Network
from pprint import pprint
from time import sleep, time
threads = []
clients = list()
class Scanner(Thread):
def __init__(self, ip):
super().__init__()
self.ip = ip
def run(self):
# The below code from https://www.thepythoncode.com/article/building-network-scanner-using-scapy
packet = Ether(dst="ff:ff:ff:ff:ff:ff") / ARP(pdst=self.ip)
# this is a tuple, which index 0 is host that answers arp request.
# while index 1 is unanswered when no host answers arp request.
result = srp(packet, timeout=3, verbose=0)[0]
# the result is a tuple with index 0 as sent, and 1 as received.
for _, received in result:
# psrc is the arp responder's ip address
# hwsrc is the arp responder's mac address
clients.append(
{
"ip": received.psrc,
"mac": received.hwsrc
}
)
# maintain consistency by forcing this method to sleep for 1 second
# before beginning the next host.
sleep(1)
if __name__ == "__main__":
start = time()
for ip in IPv4Network('192.168.178.0/24').hosts():
t = Scanner(str(ip))
threads.append(t)
t.start()
for t in threads:
t.join()
pprint(clients)
print(f"Executed in {time() - start} seconds.")

View File

@@ -0,0 +1,30 @@
import socket as sk
import sys
print(sys.argv)
print(len(sys.argv))
print("Host:" , sys.argv[1])
default=(21,22,23,80,110,111,135,139,389,443,515,631,3306,3389)
def usage():
print("Usage:",sys.argv[0],"<ip> ( [<start_port> - <end_port] | [<port>] ) ")
if (len(sys.argv)==5) and sys.argv[3]=='-':
try:
ports=range(int(sys.argv[2]),int(sys.argv[4]))
except:
usage()
ports=default
elif len(sys.argv)>2:
ports=sys.arv[2:]
else:
ports=default
print("Ports:", ports)
for port in ports:
try:
s=sk.socket(sk.AF_INET,sk.SOCK_STREAM)
s.settimeout(1)
s.connect((sys.argv[1],port))
print('%d:OPEN' % port)
s.close
except: continue

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env python
# banner.py
import sys
import socket
import argparse
def grab(ip, port):
"""Connects to the specified IP and port, retrieves data and returns the decoded response."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # TCP
sock.settimeout(5) # Set a timeout of 5 seconds
sock.connect((ip, port))
ret = sock.recv(1024)
return ret.strip().decode()
except socket.error as e:
return f"Connection error: {e}"
finally:
sock.close()
def main():
parser = argparse.ArgumentParser(description="Retrieve banner information from the specified IP and port.")
parser.add_argument("ip", help="The target IP address")
parser.add_argument("-p", "--port", type=int, default=25, help="The target port (default: 25)")
args = parser.parse_args()
ip = args.ip
port = args.port
print(grab(ip, port))
if __name__ == "__main__":
main()

26
tools/security/testpw.py Executable file
View File

@@ -0,0 +1,26 @@
#!/usr/bin/python3
import sys
import hashlib
import requests
if len(sys.argv) != 2:
print("Usage: python testpw.py <password>")
exit(1)
url="https://api.pwnedpasswords.com/range/"
hash_object = hashlib.sha1(sys.argv[1].encode("UTF-8"))
pw_hash=hash_object.hexdigest()
first_part=pw_hash[:5]
second_part=pw_hash[5:]
print(pw_hash)
furl="{}{}".format(url,first_part)
print("Das gehashte Passwort lautet: {}".format(pw_hash))
print("Es werden lediglich die ersten 5 Zeichen des Hashes übertragen ({})".format(first_part))
print("Dies lässt keinerlei Rückschlusse auf da Passwort zu.")
response=requests.get(furl)
for line in response.text.splitlines():
if second_part.lower() in line.lower():
print("Passwort wurde {} mal im Datenbestand gefunden".format(line.split(":")[1]))
exit(0)
print("Passwort wurde nicht im Datenbestand gefunden")

41
tools/security/vt_download.py Executable file
View File

@@ -0,0 +1,41 @@
#!/usr/bin/python3
import sys
import pprint
import requests
import os.path
# os.path.exists(file_path)
out_sep=';'
with open(os.path.expanduser('~/.virustotal_api_key')) as api_f:
api_key=api_f.readline().strip()
hash=sys.argv[1]
url = 'https://www.virustotal.com/vtapi/v2/file/download'
params = {'apikey': api_key, 'hash':hash }
headers = {
"Accept-Encoding": "gzip, deflate",
"User-Agent" : "gzip,python_requests,vt_pdns.py"
}
try:
response = requests.get(url, params=params, headers=headers)
if response.ok:
with open(hash, 'wb') as f:
f.write(response.content)
else:
print("NOTFOUND:{}".format(hash))
except requests.exceptions.ProxyError as e:
print("Proxy Error")
print(e)
exit(1)

56
tools/security/vt_ip.py Normal file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/python3
import sys
import pprint
import requests
import os.path
# os.path.exists(file_path)
out_sep=';'
with open(os.path.expanduser('~/.virustotal_api_key')) as api_f:
api_key=api_f.readline().strip()
if os.path.exists(os.path.expanduser('~/.ipinfo_api_key')):
with open(os.path.expanduser('~/.ipinfo_api_key')) as api_g:
ipinfo_api_key=api_g.readline().strip()
ipinfo_data=requests.get('http://ipinfo.io/{}'.format(sys.argv[1]), params={'token':ipinfo_api_key})
print(ipinfo_data.json())
ip=sys.argv[1]
# url='https://www.virustotal.com/vtapi/v2/ip/report'
url = 'https://www.virustotal.com/vtapi/v2/ip-address/report'
params = {'apikey': api_key, 'ip':ip }
headers = {
"Accept-Encoding": "gzip, deflate",
"User-Agent" : "gzip,python_requests,vt_pdns.py"
}
try:
response = requests.get(url, params=params, headers=headers)
response_data = response.json()
except requests.exceptions.ProxyError as e:
print("Proxy Error")
print(e)
exit(1)
print("=== Short report for : {} ===".format(ip))
print(response_data['verbose_msg'])
if 'detected_urls' in response_data :
print("{} detected URLs found".format(len(response_data['detected_urls'])))
if 'detected_downloaded_samples' in response_data :
print("{} detected Downloads found".format(len(response_data['detected_downloaded_samples'])))
if 'resolutions' in response_data:
print("== Resolutions ==")
data=sorted(response_data['resolutions'], key=lambda i:i['last_resolved']) if len(response_data['resolutions'])>1 else response_data['resolutions']
for r in data:
print(" {} : {}".format(r["last_resolved"],r["hostname"]))
for k in response.json():
print("=== {} ===".format(k))
print(response_data[k])

102
tools/security/vt_pdns.py Executable file
View File

@@ -0,0 +1,102 @@
#!/usr/bin/python3
import sys
import pprint
import requests
from os.path import expanduser
out_sep=';'
with open(expanduser('~/.virustotal_api_key')) as api_f:
api_key=api_f.readline().strip()
domain=sys.argv[1]
url='https://www.virustotal.com/vtapi/v2/domain/report'
params = {'apikey': api_key, 'domain':domain }
headers = {
"Accept-Encoding": "gzip, deflate",
"User-Agent" : "gzip,python_requests,vt_pdns.py"
}
cat_fields=["Alexa category",
"categories",
"BitDefender category",
"TrendMicro category",
"Forcepoint ThreatSeeker category"]
#
# "whois",
# "WOT domain info",
# "Webutation domain info",
# "BitDefender domain info",
# "Alexa domain info",
# BitDefender category
# WOT domain info
# Webutation domain info
# Alexa category
# Opera domain info
# TrendMicro category
# categories
# domain_siblings
# BitDefender domain info
# whois
# Alexa domain info
# Forcepoint ThreatSeeker category
# Alexa rank
#
# detected_downloaded_samples
# detected_urls
#
# detected_communicating_samples
# detected_referrer_samples
# undetected_downloaded_samples
# undetected_referrer_samples
# undetected_urls
# undetected_communicating_samples
# resolutions
# response_code
# verbose_msg
# pcaps
#
try:
response = requests.get(url, params=params, headers=headers)
response_data = response.json()
except requests.exceptions.ProxyError as e:
print("Proxy Error")
print(e)
exit(1)
# resolutions=[r for r in response.json()['resolutions']]
def get(key,dict):
split_key=key.split(sep=" ")
if len(split_key)>1:
prefix="{}: ".format(split_key[0])
else:
prefix="VT: "
if key in dict:
print("{}{}".format(prefix,dict[key]))
# # detected_downloaded_samples=[d for d in response.json()['detected_downloaded_samples']]
# # detected_url=[d for d in response.json()['detected_url']]
print("=== Short report for : {} ===".format(domain))
print(response_data['verbose_msg'])
if 'detected_urls' in response_data :
print("{} detected URLs found".format(len(response_data['detected_urls'])))
if 'detected_downloaded_samples' in response_data :
print("{} detected Downloads found".format(len(response_data['detected_downloaded_samples'])))
if any([True for x in cat_fields if x in response_data]):
print("== Categories ==")
for cat in cat_fields:
get(cat,response_data)
if 'resolutions' in response_data:
print("== Resolutions ==")
data=sorted(response_data['resolutions'], key=lambda i:i['last_resolved']) if len(response_data['resolutions'])>1 else response_data['resolutions']
for r in data:
print(" {} : {}".format(r["last_resolved"],r["ip_address"]))
# print('--------------------------infos')
# for k in response.json():
# print(k)

65
tools/system/ltop.py Executable file
View File

@@ -0,0 +1,65 @@
#!/usr/bin/env python3
import sys
import curses
from operator import itemgetter
import time
# Number of top items to be displayed
N = 10
def gen_output(item_dict, N=10):
"""
Generate a formatted output string for the top N items in item_dict.
:param item_dict: A dictionary containing items and their counts
:param N: The number of top items to be displayed
:return: A generator yielding formatted strings for each of the top N items
"""
top_items = dict(sorted(item_dict.items(), key=itemgetter(1), reverse=True)[:N])
count_length = len(str(max(top_items.values())))
for i, key in enumerate(top_items):
yield i, f'{i + 1:3} : [{top_items[key]:{count_length}}] {key}'
def main(screen):
"""
Main function to read input lines, maintain a count of each unique line, and
periodically display the top N lines with the highest counts using curses.
:param screen: A curses window object
"""
if not sys.stdin.isatty(): # Check if the input comes from a pipe
# Initialize an empty dictionary to store unique input lines and their counts
toplist = {}
# Set the next screen update time
t_update = time.time() + 1
for line in sys.stdin:
line = line.strip()
# Increment the count for each unique input line
if line in toplist:
toplist[line] += 1
else:
toplist[line] = 1
# Periodically update the screen with the top N lines
if time.time() > t_update:
for idx, line in gen_output(toplist):
screen.addstr(idx, 0, line)
screen.refresh()
t_update = time.time() + 1
# Clean up the curses environment and print the final top N lines
curses.endwin()
for idx, line in gen_output(toplist):
print(line)
else:
print("Usage: cat input_file.txt | ./top_lines.py")
print("Or: ./top_lines.py < input_file.txt")
# Initialize the curses library, run the main function, and restore the terminal state
curses.wrapper(main)

101
tools/system/wipe.sh Normal file
View File

@@ -0,0 +1,101 @@
#!/bin/bash
#disabling Kernellogging to Console
echo '2 4 1 7' > /proc/sys/kernel/printk
#rechnet die eine centrierierte fensterposition aus anhand von bildschirm- & fenstergröße
# 'mitte 50'
function mitte(){
cols=$(tput cols)
mitte=$(echo $(( $cols / 2 - $1 / 2 )) )
echo $mitte
}
#zeigt eine infomeldung für x-Sekunden an
# 'info text 5'
function info(){
text=${1}
text_len=$(( ${#1} + 4 ))
timeout=${2}
dialog --backtitle "CERTBw - Zero-Wipe" --infobox "$text" 3 $text_len; sleep $timeout
}
#zeigt überischt von datenträgern an und fragt ab welcher gewipet werden soll
function ask_4_device(){
[ -e /tmp/devicelist ] || rm /tmp/devicelist
lsblk -o NAME,SIZE,TYPE,FSTYPE | tail -n+2 | tr -cd ',.\n [:alnum:]' | awk '{printf "%-5s%6s %s (%s) \n" , $1,$2,$3,$4}' | sed -e "s/()//g" >/tmp/devicelist
devlines=$(( $(cat /tmp/devicelist | wc -l) + 2 ))
dialog --backtitle "CERTBw - Zero-Wipe" --begin 2 $(mitte 30) --title "Available Devices" --progressbox $devlines 30 --and-widget --stdout --inputbox 'Welche Platte soll gewipet werden?' 7 60 '/dev/sda' < /tmp/devicelist
result=${?}
return $result
}
#prüft den rückgabewert des vorangegangenen 'dialog' fensters auf abbruch und startet das menu neu
function check_result(){
result=${?}
if ([ $result = 1 ] || [ $result = 255 ]); then
info 'CANCELED' 1
menu
exit 0
fi
}
#kopiert Nullen auf das Angegebene Gerät und zeitg den Fortschritt mit 'dialog' an
function wipe(){
#anlegen von named pipes für den Datenstrom und Statusmeldungen
mkfifo data
mkfifo status
size_512=$(blockdev --getsz $1)
size=$((512 * ${size_512}))
echo "wiping Disk $1:"
(while read -r line
do
#Zusammenfassen von Informationen für das Dialogfenster in ein 'dialog' kompatibles Format
split=$(echo $line | tr -d "%[]=<>" | xargs)
space=$(echo "$split" | cut -f1 -d" ")
time=$(echo "$split" | cut -f2 -d" ")
rate=$(echo "$split" | cut -f3 -d" ")
prozent=$(echo "$split" | cut -f4 -d" ")
eta=$(echo "$split" | cut -f6 -d" ")
echo "XXX"
echo $prozent
echo "Wiped $space in $time so far. ($rate)"
echo "ETA : $eta"
echo "XXX"
done < <(pv -f -s $size /dev/zero 1>data 2>status | dd bs=1M iflag=fullblock oflag=nocache if=data of=$1 2>/dev/null | stdbuf -oL tr "\r" "\n" <status) ) | dialog --backtitle "CERTBw - Zero-Wipe" --title "Wiping $1" --gauge "Please wait" 7 70 0
rm data
rm status
}
function menu(){
menu=$(dialog --stdout --backtitle "CERTBw - Zero-Wipe" --title "Wiping Complete" --menu "Action:" 0 0 5 1 Reboot 2 Poweroff 3 Verify 4 Re-Wipe 5 Shell)
case "$menu" in
1) info "REBOOTING" 1; reboot
exit 0
;;
2) info "SHUTTING DOWN" 1; poweroff
exit 0
;;
3) info "Verify - Not yet implemented" 3
menu
;;
4) /etc/wipe.sh
exit 0
;;
5) exit 0
;;
*) info 'CANCELED' 1
exit 0
;;
esac
}
##simpler ablauf
drive=$(ask_4_device)
check_result
wipe $drive
menu
exit 0

18
tools/text/depth Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
file="${1}"
stag="${2}"
max=0
open=0
grep -Po "</?${stag}" "${file}" | while read tag; do
if [[ "$tag" == "<${stag}" ]] ; then
(( open++ ))
else
(( open--))
fi
echo "$open - $max"
if [[ $open -gt $max ]] ; then
max=$open
fi
done

29
tools/text/probability.py Normal file
View File

@@ -0,0 +1,29 @@
#!/usr/bin/env python
import sys
import random
from random import shuffle
from collections import Counter
def main():
employees = []
for i in range(0, 19):
employees.append(1)
for i in range(0, 23):
employees.append(0)
count = 0
for i in range(1, 1000001):
temp = employees[:]
shuffle(temp)
if Counter(temp[0:11])[1] == 4:
count += 1
print(count / 1000000.)
if __name__ == '__main__':
main()
sys.exit(0)