Restructure repository: organize tools by purpose, create what search tool

- Move single-file tools to tools/ organized by category (security, forensics, data, etc.)
- Move multi-file projects to projects/ (go-tools, puzzlebox, timesketch, rust-tools)
- Move system scripts to scripts/ (proxy, display, setup, windows)
- Organize config files in config/ (shell, visidata, applications)
- Move experimental tools to archive/experimental
- Create 'what' fuzzy search tool with progressive enhancement (ollama->fzf->grep)
- Add initial metadata database for intelligent tool discovery
- Preserve git history using 'git mv' commands
This commit is contained in:
tobias
2025-08-24 19:50:00 +02:00
parent 9518290544
commit 619b0bc432
124 changed files with 1063 additions and 0 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

149
.what_db.json Normal file
View File

@@ -0,0 +1,149 @@
{
"version": "1.0",
"tools": {
"tools/security/scan_vt.py": {
"path": "tools/security/scan_vt.py",
"name": "scan_vt.py",
"type": "python script",
"summary": "Scans files against VirusTotal using MD5 hashes and displays detection results with positives/total ratios and permalink.",
"purpose": "Malware detection and threat analysis",
"short_description": "VirusTotal file scanner with detection ratios",
"executable": true
},
"tools/security/imphash.py": {
"path": "tools/security/imphash.py",
"name": "imphash.py",
"type": "python script",
"summary": "Calculates and displays the import hash (imphash) of PE files using pefile library for malware analysis.",
"purpose": "Malware analysis and PE file fingerprinting",
"short_description": "PE import hash calculator",
"executable": true
},
"tools/security/scapy_arp.py": {
"path": "tools/security/scapy_arp.py",
"name": "scapy_arp.py",
"type": "python script",
"summary": "Multi-threaded ARP network scanner using Scapy to discover live hosts on a /24 network range with MAC addresses.",
"purpose": "Network discovery and reconnaissance",
"short_description": "threaded ARP network scanner",
"executable": true
},
"tools/data/domgrep.py": {
"path": "tools/data/domgrep.py",
"name": "domgrep.py",
"type": "python script",
"summary": "Extracts domain names from URLs read from stdin, filtering out IP addresses and handling malformed URLs gracefully.",
"purpose": "Data extraction and URL processing",
"short_description": "extract domains from URL lists",
"executable": true
},
"tools/data/unum.py": {
"path": "tools/data/unum.py",
"name": "unum.py",
"type": "python script",
"summary": "Analyzes Unicode characters showing decimal/hex codes, categories, and official Unicode names with proper formatting.",
"purpose": "Text analysis and Unicode debugging",
"short_description": "detailed Unicode character analyzer",
"executable": true
},
"tools/forensics/chechsqlite.py": {
"path": "tools/forensics/chechsqlite.py",
"name": "chechsqlite.py",
"type": "python script",
"summary": "Scans SQLite databases for tables containing password or hash-related columns for security analysis.",
"purpose": "Database security analysis",
"short_description": "find password/hash columns in SQLite DBs",
"executable": true
},
"tools/hashing/scatterhash.py": {
"path": "tools/hashing/scatterhash.py",
"name": "scatterhash.py",
"type": "python script",
"summary": "Performs sparse hashing of large files by sampling blocks across the file for efficient integrity checking and validation.",
"purpose": "Large file integrity verification",
"short_description": "sparse hashing for huge files",
"executable": true
},
"tools/hashing/libarchivesum.py": {
"path": "tools/hashing/libarchivesum.py",
"name": "libarchivesum.py",
"type": "python script",
"summary": "Calculates hashes of individual files within archives (zip, tar, etc.) without extracting them.",
"purpose": "Archive analysis and integrity checking",
"short_description": "like md5sum but for files inside archives",
"executable": true
},
"tools/system/ltop.py": {
"path": "tools/system/ltop.py",
"name": "ltop.py",
"type": "python script",
"summary": "Real-time frequency counter for stdin lines, showing top N most common entries with live updates using curses.",
"purpose": "Log analysis and monitoring",
"short_description": "like top but for line frequency in streams",
"executable": true
},
"tools/network/ipgrep": {
"path": "tools/network/ipgrep",
"name": "ipgrep",
"type": "shell script",
"summary": "Comprehensive IP and MAC address extractor with sorting, deduplication, ping testing, and DNS resolution capabilities.",
"purpose": "Network analysis and IP processing",
"short_description": "advanced IP/MAC extractor with ping testing",
"executable": true
},
"tools/security/certwipe": {
"path": "tools/security/certwipe",
"name": "certwipe",
"type": "shell script",
"summary": "Professional disk wiping tool supporting ATA SecureErase with frozen disk handling and fallback to dc3dd overwriting.",
"purpose": "Data destruction and security",
"short_description": "professional disk wiper with SecureErase",
"executable": true
},
"tools/system/watchgrowth.sh": {
"path": "tools/system/watchgrowth.sh",
"name": "watchgrowth.sh",
"type": "shell script",
"summary": "Monitors file/directory size growth in real-time, showing transfer speeds and optional progress percentage.",
"purpose": "File monitoring and transfer analysis",
"short_description": "real-time file growth monitor",
"executable": true
},
"projects/timesketch/deploy_timesketch.sh": {
"path": "projects/timesketch/deploy_timesketch.sh",
"name": "deploy_timesketch.sh",
"type": "shell script",
"summary": "Automated deployment script for Timesketch digital forensics timeline analysis platform with Docker Compose setup.",
"purpose": "Digital forensics infrastructure deployment",
"short_description": "deploy Timesketch forensic timeline platform",
"executable": true
},
"tools/system/backup_docker.sh": {
"path": "tools/system/backup_docker.sh",
"name": "backup_docker.sh",
"type": "shell script",
"summary": "Comprehensive Docker Compose stack backup including images, configs, and volumes with incremental storage optimization.",
"purpose": "Container infrastructure backup",
"short_description": "backup entire Docker Compose stacks",
"executable": true
},
"tools/cloud/cloudsend.py": {
"path": "tools/cloud/cloudsend.py",
"name": "cloudsend.py",
"type": "python script",
"summary": "Uploads files to NextCloud/OwnCloud public shares with optional GPG encryption support via command line interface.",
"purpose": "Cloud file sharing and backup",
"short_description": "upload files to NextCloud public shares",
"executable": true
},
"tools/cloud/vqa3.py": {
"path": "tools/cloud/vqa3.py",
"name": "vqa3.py",
"type": "python script",
"summary": "AI-powered image classification using OpenAI CLIP models for content categorization with customizable classification categories.",
"purpose": "AI image analysis and content filtering",
"short_description": "AI image classifier using CLIP models",
"executable": true
}
}
}

141
WARP.md Normal file
View File

@@ -0,0 +1,141 @@
# WARP.md
This file provides guidance to WARP (warp.dev) when working with code in this repository.
## Repository Overview
This is a collection of utility scripts, tools, and gists organized for cybersecurity, forensics, data analysis, and system administration tasks. The repository contains standalone utilities rather than a cohesive application, with scripts written in Python, Bash, Go, JavaScript, PowerShell, and C.
## Key Directory Structure
- **`codegrab/`** - Main collection of security and analysis tools
- `ctf/` - CTF challenge solving scripts
- `puzzlebox/` - 3D puzzle solving algorithms with visualization
- **`tools/`** - System utilities and data processing tools
- **`config/`** - System configuration and installation scripts
- **`systemscripts/`** - System administration and environment setup
- `proxy/` - Network proxy configuration utilities
- **`dockerfiles/`** - Docker container build scripts
- **`collected/`** - Archive of older utilities with documentation
## Common Development Tasks
### Running Security Analysis Tools
Most security tools are standalone and follow this pattern:
```bash
# VirusTotal scanning
./codegrab/scan_vt.py <filename>
# Import hash calculation
python3 codegrab/imphash.py <pe_file>
# Network analysis
./codegrab/scapy_arp.py
./codegrab/simple_portscan.py
```
### Data Processing Utilities
```bash
# Hash utilities for archives
python3 tools/libarchivesum.py archive.zip
# Unicode character analysis
echo "text" | python3 tools/unum.py
# Domain extraction from URLs
cat urls.txt | python3 tools/domgrep.py
# File organization by MIME type
python3 tools/rename.mime.py
```
### Docker Environment Management
```bash
# Backup Docker Compose stacks
./tools/backup_docker.sh docker-compose.yml
# Restore Docker environments
./tools/restore_docker.sh
# Build forensics containers
./dockerfiles/build_kali.sh
```
### System Configuration
```bash
# Install dependencies and configure environment
./config/install.sh
# Proxy configuration
./systemscripts/proxy/get_proxy.sh
./systemscripts/proxy/update_apt_proxy.sh
```
## Architecture and Patterns
### Security Tools Pattern
Most security utilities in `codegrab/` follow this pattern:
- Standalone executables with shebang
- Take file paths or stdin as input
- Output results in structured format (often CSV-like with custom separators)
- Use external APIs (VirusTotal, etc.) with API keys from `~/.virustotal_api_key`
### Data Processing Pattern
Tools in `tools/` directory typically:
- Accept multiple file inputs via command line arguments
- Use argparse for option handling
- Support multiple hash algorithms or processing modes
- Include error handling for malformed inputs
### System Scripts Pattern
Scripts in `systemscripts/` are designed for:
- Environment detection and configuration
- Proxy and network setup automation
- Service management and monitoring
- Display and hardware management
### Specialized Solvers
The `puzzlebox/` directory contains algorithmic solvers featuring:
- 3D spatial problem solving with numpy
- Visualization using matplotlib
- Recursive backtracking algorithms
- Multi-processing optimization variants
## Key Dependencies
The repository relies on various Python packages that should be available:
- **Security**: `pefile`, `requests`, `scapy`
- **Data Processing**: `libarchive-c`, `openpyxl`, `visidata`
- **Scientific**: `numpy`, `matplotlib`, `scipy`
- **Forensics**: `AnalyzeMFT`, `pymisp`
- **System**: `ntplib`, `mac-vendor-lookup`, `dateparser`
## API Keys and Configuration
Several tools expect API keys in home directory files:
- `~/.virustotal_api_key` - VirusTotal API access
- Tools may also use environment variables for proxy configuration (`http_proxy`, etc.)
## Testing and Validation
Tools are typically tested individually:
```bash
# Test with sample data
python3 codegrab/chechsqlite.py sample.db
python3 tools/quickchardet.py sample.txt
# Validate with CTF challenges
python3 codegrab/ctf/solve.py
```
## Development Notes
- Most utilities are designed as single-file executables for easy deployment
- Scripts include minimal error handling suitable for command-line usage
- Many tools output to stdout in formats suitable for piping to other commands
- Docker-based tools assume availability of container runtime
- Forensics tools may require elevated privileges for certain operations

BIN
codegrab/.DS_Store vendored Normal file

Binary file not shown.

1
codegrab/imphash.go Normal file
View File

@@ -0,0 +1 @@
package main

BIN
config/.DS_Store vendored Normal file

Binary file not shown.

BIN
dockerfiles/.DS_Store vendored Normal file

Binary file not shown.

View File

@@ -0,0 +1,55 @@
use std::env;
use std::fs::File;
use std::io::{self, BufRead, BufReader};
fn main() -> io::Result<()> {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("Usage: {} start-end [count] [file...]", args[0]);
std::process::exit(1);
}
let range = &args[1];
let (start, mut end) = if let Some(dash_pos) = range.find('-') {
(range[..dash_pos].parse().unwrap(), range[dash_pos + 1..].parse().unwrap())
} else {
(range.parse().unwrap(), 0)
};
let mut count = 1;
let mut files_start = 2;
if end == 0 {
if args.len() > 2 {
if let Ok(c) = args[2].parse::<usize>() {
count = c;
files_start = 3;
}
}
end = start + count - 1;
}
if args.len() > files_start {
for filename in &args[files_start..] {
let file = File::open(filename)?;
let reader = BufReader::new(file);
process_lines(reader, start, end)?;
}
} else {
// No files provided, read from stdin
let stdin = io::stdin();
let reader = stdin.lock();
process_lines(reader, start, end)?;
}
Ok(())
}
fn process_lines<R: BufRead>(reader: R, start: usize, end: usize) -> io::Result<()> {
reader.lines()
.enumerate()
.filter_map(|(i, line)| if i + 1 >= start && i + 1 <= end { line.ok() } else { None })
.for_each(|line| println!("{}", line));
Ok(())
}

BIN
projects/rust-tools/uniq Executable file

Binary file not shown.

View File

@@ -0,0 +1,41 @@
use std::env;
use std::fs::File;
use std::io::{self, BufRead, BufReader};
use std::collections::HashSet;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
fn main() -> io::Result<()> {
let args: Vec<String> = env::args().collect();
if args.len() > 1 {
for filename in &args[1..] {
let file = File::open(filename)?;
let reader = BufReader::new(file);
remove_duplicates(reader)?;
}
} else {
// No files provided, read from stdin
let stdin = io::stdin();
let reader = stdin.lock();
remove_duplicates(reader)?;
}
Ok(())
}
fn remove_duplicates<R: BufRead>(reader: R) -> io::Result<()> {
let mut seen_hashes = HashSet::new();
for line in reader.lines() {
let line = line?;
let mut hasher = DefaultHasher::new();
line.hash(&mut hasher);
let hash = hasher.finish();
if seen_hashes.insert(hash) {
println!("{}", line);
}
}
Ok(())
}

BIN
projects/rust-tools/uniq2 Executable file

Binary file not shown.

View File

@@ -0,0 +1,36 @@
use std::collections::HashSet;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
struct HashOnlySet {
set: HashSet<u64>,
}
impl HashOnlySet {
fn new() -> HashOnlySet {
HashOnlySet { set: HashSet::new() }
}
fn insert<T: Hash>(&mut self, item: &T) -> bool {
let hash = Self::hash_item(item);
self.set.insert(hash)
}
fn contains<T: Hash>(&self, item: &T) -> bool {
let hash = Self::hash_item(item);
self.set.contains(&hash)
}
fn hash_item<T: Hash>(item: &T) -> u64 {
let mut hasher = DefaultHasher::new();
item.hash(&mut hasher);
hasher.finish()
}
}
fn main() {
let mut set = HashOnlySet::new();
set.insert(&"Hello, world!");
println!("Contains 'Hello, world!': {}", set.contains(&"Hello, world!"));
println!("Contains 'Goodbye, world!': {}", set.contains(&"Goodbye, world!"));
}

213
restructure_git.sh Executable file
View File

@@ -0,0 +1,213 @@
#!/bin/bash
# Git-aware repository restructuring script
# Uses git mv to preserve file history during reorganization
set -e
echo "=== Git-aware Repository Restructuring ==="
echo "This script will reorganize files using 'git mv' to preserve history"
echo ""
# Check if we're in a git repository
if ! git rev-parse --git-dir > /dev/null 2>&1; then
echo "Warning: Not in a git repository. Using regular 'mv' commands."
MV_CMD="mv"
else
echo "Git repository detected. Using 'git mv' to preserve history."
MV_CMD="git mv"
fi
echo ""
echo "=== Creating new directory structure ==="
# Create new directory structure
mkdir -p tools/{security,forensics,data,hashing,network,formats,cloud,system,ctf,text}
mkdir -p projects/{go-tools,puzzlebox,timesketch,rust-tools}
mkdir -p scripts/{proxy,display,setup,windows}
mkdir -p config/{shell,visidata/plugins,applications}
mkdir -p archive/{collected,experimental,binaries,awk}
echo "=== Moving security tools ==="
$MV_CMD codegrab/scan_vt.py tools/security/
$MV_CMD codegrab/vt_download.py tools/security/
$MV_CMD codegrab/vt_ip.py tools/security/
$MV_CMD codegrab/vt_pdns.py tools/security/
$MV_CMD codegrab/imphash.py tools/security/
$MV_CMD codegrab/scapy_arp.py tools/security/
$MV_CMD codegrab/simple_portscan.py tools/security/
$MV_CMD codegrab/smtpbanner.py tools/security/
$MV_CMD codegrab/testpw.py tools/security/
$MV_CMD codegrab/certwipe tools/security/
echo "=== Moving forensics tools ==="
$MV_CMD codegrab/chechsqlite.py tools/forensics/
$MV_CMD codegrab/process_leak.py tools/forensics/
$MV_CMD codegrab/extractfolder.py tools/forensics/
echo "=== Moving data processing tools ==="
$MV_CMD tools/domgrep.py tools/data/
$MV_CMD tools/geturls.py tools/data/
$MV_CMD tools/urldecode.py tools/data/
$MV_CMD tools/unum.py tools/data/
$MV_CMD codegrab/vba_chr_decode.py tools/data/
$MV_CMD tools/quickchardet.py tools/data/
$MV_CMD codegrab/kv_parse.py tools/data/
$MV_CMD tools/concat.py tools/data/
$MV_CMD tools/split_linewise.py tools/data/
$MV_CMD codegrab/json_save.py tools/data/
$MV_CMD tools/csv_get tools/data/
$MV_CMD codegrab/uniq.py tools/data/
$MV_CMD tools/between tools/data/
echo "=== Moving hashing tools ==="
$MV_CMD tools/libarchivesum.py tools/hashing/
$MV_CMD tools/tarsum.py tools/hashing/
$MV_CMD codegrab/hashzip.py tools/hashing/
$MV_CMD tools/scatterhash.py tools/hashing/
echo "=== Moving network tools ==="
$MV_CMD tools/ipgrep tools/network/
$MV_CMD codegrab/fritzshark.sh tools/network/
$MV_CMD codegrab/fritzshark2.sh tools/network/
$MV_CMD tools/get_stp.sh tools/network/
$MV_CMD tools/get_ntp.py tools/network/
echo "=== Moving format conversion tools ==="
$MV_CMD codegrab/convert2pdf.sh tools/formats/
$MV_CMD codegrab/flatpdf.sh tools/formats/
$MV_CMD tools/rename.mime.py tools/formats/
echo "=== Moving cloud service tools ==="
$MV_CMD codegrab/cloudsend.py tools/cloud/
$MV_CMD codegrab/cloudsend.sh tools/cloud/
$MV_CMD codegrab/speech.py tools/cloud/
$MV_CMD codegrab/vqa3.py tools/cloud/
$MV_CMD codegrab/youtube_resolve.sh tools/cloud/
echo "=== Moving system utilities ==="
$MV_CMD tools/backup_docker.sh tools/system/
$MV_CMD tools/restore_docker.sh tools/system/
$MV_CMD tools/watchgrowth.sh tools/system/
$MV_CMD codegrab/wipe.sh tools/system/
$MV_CMD codegrab/ltop.py tools/system/
echo "=== Moving CTF tools ==="
$MV_CMD codegrab/ctf/filtertext.py tools/ctf/
$MV_CMD codegrab/ctf/getjs.py tools/ctf/
$MV_CMD codegrab/ctf/guess.py tools/ctf/
$MV_CMD codegrab/ctf/search.py tools/ctf/
$MV_CMD codegrab/ctf/transpose.py tools/ctf/
$MV_CMD codegrab/ctf/ps_.py tools/ctf/
$MV_CMD codegrab/ctf/submit_flag.sh tools/ctf/
echo "=== Moving text analysis tools ==="
$MV_CMD codegrab/probability.py tools/text/
$MV_CMD codegrab/depth tools/text/
echo "=== Moving experimental tools to archive ==="
$MV_CMD codegrab/kv.py archive/experimental/
$MV_CMD codegrab/flm.py archive/experimental/
$MV_CMD codegrab/hydrogentest.py archive/experimental/
$MV_CMD codegrab/matplottest.py archive/experimental/
$MV_CMD codegrab/lpic.sh archive/experimental/
$MV_CMD codegrab/fuzz.sh archive/experimental/
echo "=== Moving multi-file projects ==="
$MV_CMD tools/go projects/go-tools
$MV_CMD codegrab/puzzlebox projects/
$MV_CMD codegrab/deploy_timesketch.sh projects/timesketch/
# Move Rust tools if they exist
if [ -d tools/rs ]; then
$MV_CMD tools/rs projects/rust-tools
fi
echo "=== Moving system scripts ==="
$MV_CMD systemscripts/proxy scripts/
$MV_CMD systemscripts/reset_screens.sh scripts/display/
$MV_CMD systemscripts/toggle_display.sh scripts/display/
$MV_CMD systemscripts/toggle_touchpad scripts/display/
$MV_CMD systemscripts/terminal-logs.sh scripts/setup/
$MV_CMD systemscripts/automountctl scripts/setup/
# Move additional system scripts if they exist
[ -f systemscripts/mount_container ] && $MV_CMD systemscripts/mount_container scripts/setup/
[ -f systemscripts/fullhd ] && $MV_CMD systemscripts/fullhd scripts/setup/
[ -f systemscripts/share.sh ] && $MV_CMD systemscripts/share.sh scripts/setup/
echo "=== Moving PowerShell scripts ==="
$MV_CMD codegrab/Get-ZimmermanTools.ps1 scripts/windows/
$MV_CMD codegrab/sbom.ps1 scripts/windows/
if [ -d codegrab/powershell ]; then
$MV_CMD codegrab/powershell/getscreen.psm1 scripts/windows/
fi
echo "=== Organizing configuration files ==="
$MV_CMD config/bash_aliases config/shell/
$MV_CMD config/bash_prompt config/shell/
$MV_CMD config/shell_aliases config/shell/
$MV_CMD config/inputrc config/shell/
$MV_CMD systemscripts/agnoster.zsh-theme config/shell/
$MV_CMD systemscripts/solarized.dircolors config/shell/
$MV_CMD config/visidatarc config/visidata/
$MV_CMD config/visidataplugins/hidecol.py config/visidata/plugins/
[ -f config/access_log.vdj ] && $MV_CMD config/access_log.vdj config/visidata/
# Applications directory should already be in the right place
# Just ensure it exists
mkdir -p config/applications
echo "=== Moving items to archive ==="
$MV_CMD collected archive/
# Move binaries and scripts to archive
[ -f codegrab/csv_cols ] && $MV_CMD codegrab/csv_cols archive/binaries/
[ -f codegrab/mapping ] && $MV_CMD codegrab/mapping archive/binaries/
[ -f tools/csv2dot ] && $MV_CMD tools/csv2dot archive/binaries/
[ -f tools/mailunpack ] && $MV_CMD tools/mailunpack archive/binaries/
[ -f tools/noerr ] && $MV_CMD tools/noerr archive/binaries/
[ -f tools/openflattenpdf.sh ] && $MV_CMD tools/openflattenpdf.sh archive/binaries/
[ -f tools/sep_test.sh ] && $MV_CMD tools/sep_test.sh archive/binaries/
[ -f tools/showgm.sh ] && $MV_CMD tools/showgm.sh archive/binaries/
[ -f tools/showosm.sh ] && $MV_CMD tools/showosm.sh archive/binaries/
[ -f tools/sparsecmp.sh ] && $MV_CMD tools/sparsecmp.sh archive/binaries/
[ -f tools/trunc_by_hash.py ] && $MV_CMD tools/trunc_by_hash.py archive/binaries/
# Move AWK scripts
[ -f codegrab/ips.awk ] && $MV_CMD codegrab/ips.awk archive/awk/
[ -f codegrab/map.awk ] && $MV_CMD codegrab/map.awk archive/awk/
# Move any remaining compiled binaries
[ -f codegrab/rootshell.c ] && $MV_CMD codegrab/rootshell.c archive/binaries/
[ -f codegrab/usbreset.c ] && $MV_CMD codegrab/usbreset.c archive/binaries/
[ -f codegrab/imphash.go ] && $MV_CMD codegrab/imphash.go archive/binaries/
echo "=== Cleaning up empty directories ==="
# Remove empty directories (but be careful with git)
if [ "$MV_CMD" = "git mv" ]; then
echo "Leaving directory cleanup for manual review due to git"
else
rmdir codegrab/ctf codegrab/powershell codegrab systemscripts tools config/visidataplugins 2>/dev/null || true
fi
echo ""
echo "=== Repository restructuring complete! ==="
echo ""
echo "New structure:"
echo "├── tools/ - Single-file utilities by purpose"
echo "├── projects/ - Multi-file projects"
echo "├── scripts/ - System management scripts"
echo "├── config/ - Configuration files"
echo "├── dockerfiles/ - Docker configurations (unchanged)"
echo "└── archive/ - Legacy and experimental items"
echo ""
if [ "$MV_CMD" = "git mv" ]; then
echo "All moves used 'git mv' - file history preserved!"
echo "You can review changes with: git status"
echo "Commit when ready with: git commit -m 'Restructure repository for better organization'"
else
echo "Standard 'mv' used - consider initializing git if needed"
fi

View File

@@ -0,0 +1,4 @@
#!/bin/bash
xrandr --output DVI-I-1 --mode 1920x1080 --rotate left --pos 0x0
xrandr --output DP-1 --primary --mode 2560x1440 --pos 1080x350
xrandr --output DP-2 --mode 2560x1440 --pos 3640x350

BIN
tools/.DS_Store vendored Normal file

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More