Initial commit

This commit is contained in:
darnodo
2025-06-14 17:03:07 +02:00
commit d999440f13
10 changed files with 449 additions and 0 deletions

5
.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
# Cache and anonymize logs
data/
# SSH keys
ssl/

37
Dockerfile Normal file
View File

@@ -0,0 +1,37 @@
FROM ubuntu:25.10
ENV DEBIAN_FRONTEND=noninteractive
# Installation des paquets nécessaires
RUN apt-get update && apt-get install -y \
squid-openssl \
openssl \
ca-certificates \
curl \
dnsutils \
sudo \
&& rm -rf /var/lib/apt/lists/*
# Créer les répertoires de base
RUN mkdir -p /etc/squid/ssl
# Copier la configuration et les certificats
COPY squid.conf /etc/squid/squid.conf
COPY ssl/squid-ca-cert.pem /etc/squid/ssl/
COPY ssl/squid-ca-key.pem /etc/squid/ssl/
# Permissions sur les certificats
RUN chmod 644 /etc/squid/ssl/squid-ca-cert.pem \
&& chmod 600 /etc/squid/ssl/squid-ca-key.pem
# Script d'initialisation
COPY init-ssl.sh /usr/local/bin/init-ssl.sh
RUN chmod +x /usr/local/bin/init-ssl.sh
# Script d'entrée
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 3128
ENTRYPOINT ["/entrypoint.sh"]

138
README.md Normal file
View File

@@ -0,0 +1,138 @@
# Squid SSL Bumping Lab
This project demonstrates how to deploy a Squid proxy that performs SSL bumping (maninthemiddle) to inspect HTTPS traffic, and how to collect and export its logs with Fluent Bit (OTLP) or read them locally. Its packaged with Docker and Docker Compose for easy lab deployment.
---
## Features
- **SSL Bumping**
Intercept and decrypt HTTPS traffic using a custom CA certificate.
- **Logging**
- Export logs in real time to an OpenTelemetry/HTTP endpoint (e.g. SigNoz).
- Or retain logs locally in plain text files.
- **Reproducible**
Dockerfile + `docker-compose.yml` spin up everything (Squid, Fluent Bit).
---
## Repository Layout
```text
squid-ssl-bumping-lab/
├── ssl/ # Your custom CA cert + key (ignored by Git)
│ ├── squid-ca-cert.pem
│ └── squid-ca-key.pem
├── data/ # Runtime data (ignored by Git)
│ ├── fluent-bit-db/ # Fluent Bit position database
│ ├── squid-cache/ # Squid cache directory
│ └── squid-logs/ # Squid logs (access.log, cache.log)
├── fluent-bit/
│ └── conf/
│ ├── fluent-bit.conf # Fluent Bit main configuration
│ ├── parsers.conf # Log parsing rules for Squid
│ └── transform.lua # Lua filter to reshape records
├── squid.conf # Squid configuration (SSL bump, logformat)
├── init-ssl.sh # Initialize Squid SSL DB on startup
├── entrypoint.sh # Container entrypoint (dirs, permissions, init)
├── Dockerfile # Build Squid with SSL bump support
├── docker-compose.yml # Compose for Squid + optional Fluent Bit
└── .gitignore # data/ and ssl/ are not committed
```
---
## Prerequisites
- Docker >= 20.x
- Docker Compose >= 2.x
- A custom CA certificate (`.pem`) and private key in `ssl/`
---
## Initial Setup
1. **Clone the repo**
```bash
git clone <repo-url> squid-ssl-bumping-lab && cd squid-ssl-bumping-lab
```
2. **Generate or copy your CA** into the `ssl/` directory:
- `ssl/squid-ca-cert.pem`
- `ssl/squid-ca-key.pem`
3. **Create runtime directories** (they are in `.gitignore`):
```bash
mkdir -p data/squid-cache data/squid-logs data/fluent-bit-db
```
---
## Deployment
### 1. With Log Export (Fluent Bit → OTLP)
This will start both Squid and Fluent Bit and forward each request to your OTLP endpoint.
```bash
docker-compose --profile logging up --build -d
```
- `squid` service listens on port **3128** (host).
- `fluent-bit` reads `/var/log/squid/access.log`, transforms and ships to OTLP at port **4318**.
### 2. Without Log Export
Run only the Squid proxy and keep logs locally:
```bash
docker-compose up --build -d
```
Under the hood, `docker-compose` will skip the `fluent-bit` service because its attached to the `logging` profile.
---
## Reading Logs Locally
If you did **not** enable Fluent Bit, Squid will write logs into:
- `data/squid-logs/access.log`
- `data/squid-logs/cache.log`
To tail the access log:
```bash
tail -f data/squid-logs/access.log
```
Or, inside the container:
```bash
docker-compose exec squid tail -f /var/log/squid/access.log
```
Use your favorite tools (`less`, `grep`, `awk`) to analyze stored logs.
---
## SSL Bump & Certificates
1. The entrypoint script runs `init-ssl.sh` to build a Squid SSL DB under `/var/cache/squid/ssl_db`.
2. Squids `squid.conf` points at your `ssl/squid-ca-cert.pem` and `ssl/squid-ca-key.pem`.
3. Clients must trust your CA (import the `squid-ca-cert.pem` into their browser/system).
---
## Cleanup
To stop and remove containers, networks, volumes:
```bash
docker-compose down
rm -rf data/*
```
---
## License & Credits
This lab is provided “as is” for educational purposes. Feel free to adapt it to your security-lab environment!

29
docker-compose.yml Normal file
View File

@@ -0,0 +1,29 @@
services:
squid:
build: .
container_name: squid-ssl-proxy
ports:
- "3128:3128"
volumes:
- ./data/squid-cache:/var/cache/squid
- ./data/squid-logs:/var/log/squid
restart: unless-stopped
cap_add:
- NET_ADMIN
fluent-bit:
image: fluent/fluent-bit
container_name: fluent-bit-logger
profiles:
- logging
volumes:
- ./fluent-bit/conf/:/fluent-bit/etc/
- ./data/squid-logs:/var/log/squid:ro
- ./data/fluent-bit-db:/fluent-bit/db
ports:
- "2020:2020"
depends_on:
- squid
extra_hosts:
- "my-log-server:192.168.1.100"
restart: unless-stopped

32
entrypoint.sh Normal file
View File

@@ -0,0 +1,32 @@
#!/bin/bash
echo "=== Starting Squid SSL container ==="
# Create all necessary directories
mkdir -p /var/cache/squid
mkdir -p /var/log/squid
mkdir -p /run/squid
# Set basic permissions
chown -R proxy:proxy /var/cache/squid
chown -R proxy:proxy /var/log/squid
chown -R proxy:proxy /run/squid
chown proxy:proxy /etc/squid/ssl/*
# Initialize SSL database
/usr/local/bin/init-ssl.sh
# Initialize cache if needed
if [ ! -d /var/cache/squid/00 ]; then
echo "Initializing Squid cache..."
squid -N -z
echo "Cache initialized"
fi
# Test Squid configuration
echo "Testing Squid configuration..."
squid -k parse
# Start Squid
echo "Starting Squid..."
exec squid -N -d 1

View File

@@ -0,0 +1,40 @@
[SERVICE]
# Flush data every 5 seconds
Flush 5
# Log level for Fluent Bit itself (useful for debugging)
Log_Level info
# Location of parser definitions
Parsers_File parsers.conf
# [INPUT] - Where do the logs come from?
[INPUT]
Name tail
Tag squid.access
Path /var/log/squid/access.log
# Use the parser defined in parsers.conf
Parser squid_parser
# Database file to track reading position
DB /fluent-bit/db/squid.db
# Do not skip the first line if the file is new
Read_from_Head true
# [FILTER] - How to transform logs?
[FILTER]
Name lua
Match squid.access
# Lua script file to use
script transform.lua
# Function to call in the script
call remap_records
# [OUTPUT] - Where to send the logs?
[OUTPUT]
Name opentelemetry
Match squid.access
# Host and port of your OTLP/HTTP SigNoz receiver
Host my-log-instance.com
Port 4318
# API path for logs
logs_uri /v1/logs
# Do not use TLS for an http:// connection
tls Off

View File

@@ -0,0 +1,11 @@
[PARSER]
Name squid_parser
Format regex
# Regex adapted to the real log format (with User-Agent in quotes)
Regex ^(?<time>\S+)\s+(?<client_ip>\S+)\s+(?<status_code>\S+)\s+(?<response_size>\S+)\s+(?<method>\S+)\s+(?<url>\S+)\s+"(?<user_agent>[^"]*)"$
# Indicates which field contains the timestamp
Time_Key time
# Indicates the time format (Unix timestamp with milliseconds)
Time_Format %s.%L
# Automatically converts types
Types response_size:integer

View File

@@ -0,0 +1,83 @@
--[[
Lua transform script for Squid logs.
This script enriches parsed logs before sending them to SigNoz.
- Adds standard fields (http.*, client.*)
- Extracts the search term from URLs
- Identifies the search engine used
]]
-- Table mapping search engine domains to names. Easy to extend!
local search_engines = {
["google.com"] = "Google",
["youtube.com"] = "YouTube",
["bing.com"] = "Bing",
["duckduckgo.com"] = "DuckDuckGo",
["yahoo.com"] = "Yahoo",
["qwant.com"] = "Qwant",
["ecosia.org"] = "Ecosia"
}
-- Helper function to decode URL-encoded strings.
-- Declared 'local' since it's only used within this file.
local function url_decode(str)
if str == nil then return nil end
str = string.gsub(str, "+", " ")
str = string.gsub(str, "%%(%x%x)", function(h) return string.char(tonumber(h, 16)) end)
return str
end
--
-- Main transform function, called by Fluent Bit for each log.
-- THIS FUNCTION MUST REMAIN GLOBAL to be visible to the Fluent Bit engine.
---@diagnostic disable-next-line: lowercase-global
function remap_records(tag, timestamp, record)
if record["method"] == nil then
return 0, nil, nil
end
local new_record = {}
-- Field mapping
new_record["http.method"] = record["method"]
new_record["http.url"] = record["url"]
new_record["http.status_code"] = record["status_code"]
new_record["http.response_content_length"] = record["response_size"]
new_record["client.ip"] = record["client_ip"]
new_record["log_body"] = "Squid request"
if record["user_agent"] then
new_record["http.user_agent"] = record["user_agent"]
end
-- Extraction of search term and search engine
if record["url"] and string.match(record["url"], "[?&]q=") then
local search_term = string.match(record["url"], "[?&]q=([^&]*)")
if search_term and search_term ~= "" then
new_record["search.query"] = url_decode(search_term)
-- Search engine identification
local engine_found = false
for domain, name in pairs(search_engines) do
-- string.find(string, pattern, start_pos, plain_search)
if string.find(record["url"], domain, 1, true) then
new_record["search.engine"] = name
engine_found = true
break -- We stop as soon as we find a match
end
end
if not engine_found then
new_record["search.engine"] = "Unknown" -- Engine not listed
end
end
end
-- Adding resource data
new_record["resource"] = {
["service.name"] = "squid-ssl-proxy-final",
["deployment.environment"] = "lab"
}
return 2, timestamp, new_record
end

27
init-ssl.sh Normal file
View File

@@ -0,0 +1,27 @@
#!/bin/bash
SSL_DB_DIR="/var/cache/squid/ssl_db"
echo "Checking SSL database..."
# Create the directory if it doesn't exist
if [ ! -d "$SSL_DB_DIR" ]; then
echo "Creating SSL DB directory..."
mkdir -p "$SSL_DB_DIR"
fi
# Ensure permissions are correct
chown -R proxy:proxy "$SSL_DB_DIR"
chmod 755 "$SSL_DB_DIR"
# Check if the SSL database is initialized
if [ ! -d "$SSL_DB_DIR/certs" ]; then
echo "Initializing SSL database..."
cd /var/cache/squid
rm -rf ssl_db
/usr/lib/squid/security_file_certgen -c -s ssl_db -M 4MB
chown -R proxy:proxy ssl_db
echo "SSL database initialized successfully"
else
echo "SSL database already initialized"
fi

47
squid.conf Normal file
View File

@@ -0,0 +1,47 @@
# Port with SSL Bump
http_port 3128 ssl-bump cert=/etc/squid/ssl/squid-ca-cert.pem key=/etc/squid/ssl/squid-ca-key.pem generate-host-certificates=on dynamic_cert_mem_cache_size=4MB
# SSL Bump rules
acl step1 at_step SslBump1
ssl_bump peek step1
ssl_bump bump all
# SSL cert generator
sslcrtd_program /usr/lib/squid/security_file_certgen -s /var/cache/squid/ssl_db -M 4MB
sslcrtd_children 5
# Basic ACLs
acl localnet src 10.0.0.0/8
acl localnet src 172.16.0.0/12
acl localnet src 192.168.0.0/16
acl localnet src fc00::/7
acl localnet src fe80::/10
acl SSL_ports port 443
acl Safe_ports port 80
acl Safe_ports port 21
acl Safe_ports port 443
acl Safe_ports port 1025-65535
acl CONNECT method CONNECT
# Logging format with full URL
logformat full_url_log %ts.%03tu %>a %>Hs %<st %rm %ru "%{User-Agent}>h"
# Main logs only - we'll filter afterwards
cache_log /var/log/squid/cache.log
access_log /var/log/squid/access.log full_url_log
# Access control rules
http_access deny !Safe_ports
http_access deny CONNECT !SSL_ports
http_access allow localnet
http_access allow localhost
http_access deny all
# Prevent stripping of query terms from logs
strip_query_terms off
# Cache settings
cache_dir ufs /var/cache/squid 100 16 256
coredump_dir /var/cache/squid
cache deny all