feat: mega admin panel expansion, contact form, 9 blog posts

- admin.html: 14 sections (dashboard, posts, editor, tracks, settings, homepage editor, services, navigation, links, API keys, theme, SEO, contact settings, backups)
- admin.js: 1554 lines, full AdminApp with CRUD for all sections
- admin.css: 1972 lines, responsive mobile nav, all new section styles
- app.py: 42 endpoints, new routes for homepage/services/nav/links/apikeys/theme/seo/contact/backups
- 9 JSON data files for new settings
- Contact form wired to POST /api/contact
- 9 blog posts with full HUD metadata
- .gitignore added
This commit is contained in:
jae 2026-03-31 22:19:27 +00:00
parent 167bcb15a9
commit 252c7b95b3
17 changed files with 4312 additions and 685 deletions

6
.gitignore vendored Normal file
View file

@ -0,0 +1,6 @@
__pycache__/
*.pyc
.env
*.swp
*.swo
.DS_Store

1202
admin.html

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,12 @@
#!/usr/bin/env python3
"""JAESWIFT HUD Backend API"""
import json, os, time, subprocess, random, datetime, hashlib
import json, os, time, subprocess, random, datetime, hashlib, zipfile, io, smtplib
from functools import wraps
from pathlib import Path
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from flask import Flask, request, jsonify, abort
from flask import Flask, request, jsonify, abort, send_file
from flask_cors import CORS
import jwt
import requests as req
@ -17,13 +19,18 @@ JWT_SECRET = 'jaeswift-hud-s3cr3t-2026!x'
ADMIN_USER = 'jae'
ADMIN_PASS = 'HUDAdmin2026!'
ARRAY_FILES = {
'posts.json', 'tracks.json', 'navigation.json', 'links.json',
'managed_services.json', 'messages.json'
}
# ─── Helpers ─────────────────────────────────────────
def load_json(name):
p = DATA_DIR / name
if p.exists():
with open(p) as f:
return json.load(f)
return [] if name.endswith('posts.json') else {}
return [] if name in ARRAY_FILES else {}
def save_json(name, data):
p = DATA_DIR / name
@ -249,7 +256,6 @@ def delete_track(index):
return jsonify({'ok': True, 'removed': removed})
abort(404)
# ─── Git Activity (from Gitea API) ───────────────────
@app.route('/api/git-activity')
def git_activity():
@ -305,6 +311,363 @@ def update_settings():
save_json('settings.json', d)
return jsonify(d)
# ═════════════════════════════════════════════════════
# NEW ENDPOINTS
# ═════════════════════════════════════════════════════
# ─── Homepage Config ───────────────────<E29480><E29480>─────────────
@app.route('/api/homepage')
def get_homepage():
return jsonify(load_json('homepage.json'))
@app.route('/api/homepage', methods=['POST'])
@require_auth
def save_homepage():
try:
d = request.get_json(force=True)
save_json('homepage.json', d)
return jsonify(d)
except Exception as e:
return jsonify({'error': str(e)}), 500
# ─── Managed Services ───────────────────────────────
@app.route('/api/services/managed')
def get_managed_services():
return jsonify(load_json('managed_services.json'))
@app.route('/api/services/managed', methods=['POST'])
@require_auth
def add_managed_service():
try:
d = request.get_json(force=True)
svcs = load_json('managed_services.json')
if not isinstance(svcs, list):
svcs = []
svcs.append({
'name': d.get('name', ''),
'url': d.get('url', '')
})
save_json('managed_services.json', svcs)
return jsonify(svcs[-1]), 201
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/services/managed/<int:index>', methods=['DELETE'])
@require_auth
def delete_managed_service(index):
svcs = load_json('managed_services.json')
if not isinstance(svcs, list):
svcs = []
if 0 <= index < len(svcs):
removed = svcs.pop(index)
save_json('managed_services.json', svcs)
return jsonify({'ok': True, 'removed': removed})
abort(404)
# ─── Navigation ──────────────────────────────────────
@app.route('/api/navigation')
def get_navigation():
return jsonify(load_json('navigation.json'))
@app.route('/api/navigation', methods=['POST'])
@require_auth
def add_navigation():
try:
d = request.get_json(force=True)
nav = load_json('navigation.json')
if not isinstance(nav, list):
nav = []
nav.append({
'label': d.get('label', ''),
'url': d.get('url', ''),
'order': d.get('order', len(nav) + 1)
})
save_json('navigation.json', nav)
return jsonify(nav[-1]), 201
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/navigation/<int:index>', methods=['DELETE'])
@require_auth
def delete_navigation(index):
nav = load_json('navigation.json')
if not isinstance(nav, list):
nav = []
if 0 <= index < len(nav):
removed = nav.pop(index)
save_json('navigation.json', nav)
return jsonify({'ok': True, 'removed': removed})
abort(404)
# ─── Links ───────────────────────────────────────────
@app.route('/api/links')
def get_links():
return jsonify(load_json('links.json'))
@app.route('/api/links', methods=['POST'])
@require_auth
def add_link():
try:
d = request.get_json(force=True)
links = load_json('links.json')
if not isinstance(links, list):
links = []
links.append({
'name': d.get('name', ''),
'url': d.get('url', ''),
'icon': d.get('icon', ''),
'category': d.get('category', '')
})
save_json('links.json', links)
return jsonify(links[-1]), 201
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/links/<int:index>', methods=['DELETE'])
@require_auth
def delete_link(index):
links = load_json('links.json')
if not isinstance(links, list):
links = []
if 0 <= index < len(links):
removed = links.pop(index)
save_json('links.json', links)
return jsonify({'ok': True, 'removed': removed})
abort(404)
# ─── API Keys ────────────────────────────────────────
def mask_value(val):
"""Mask a string value, showing only last 4 chars if longer than 4."""
if not isinstance(val, str) or len(val) == 0:
return val
if len(val) <= 4:
return '••••'
return '••••••' + val[-4:]
def is_masked(val):
"""Check if a value is a masked placeholder."""
if not isinstance(val, str):
return False
return '••••' in val
@app.route('/api/apikeys')
@require_auth
def get_apikeys():
try:
keys = load_json('apikeys.json')
masked = {}
for group, fields in keys.items():
if isinstance(fields, dict):
masked[group] = {}
for k, v in fields.items():
masked[group][k] = mask_value(v)
else:
masked[group] = mask_value(fields)
return jsonify(masked)
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/apikeys', methods=['POST'])
@require_auth
def save_apikeys():
try:
d = request.get_json(force=True)
group = d.get('group', '')
data = d.get('data', {})
if not group or not isinstance(data, dict):
return jsonify({'error': 'Invalid request: need group and data'}), 400
keys = load_json('apikeys.json')
if group not in keys:
keys[group] = {}
# Only update values that are not masked and not empty
for k, v in data.items():
if isinstance(v, str) and (is_masked(v) or v == ''):
continue # Skip masked or empty values
keys[group][k] = v
save_json('apikeys.json', keys)
return jsonify({'ok': True, 'group': group})
except Exception as e:
return jsonify({'error': str(e)}), 500
# ─── Theme ───────────────────────────────────────────
@app.route('/api/theme')
def get_theme():
return jsonify(load_json('theme.json'))
@app.route('/api/theme', methods=['POST'])
@require_auth
def save_theme():
try:
d = request.get_json(force=True)
save_json('theme.json', d)
return jsonify(d)
except Exception as e:
return jsonify({'error': str(e)}), 500
# ─── SEO ─────────────────────────────────────────────
@app.route('/api/seo')
def get_seo():
return jsonify(load_json('seo.json'))
@app.route('/api/seo', methods=['POST'])
@require_auth
def save_seo():
try:
d = request.get_json(force=True)
save_json('seo.json', d)
return jsonify(d)
except Exception as e:
return jsonify({'error': str(e)}), 500
# ─── Contact Settings ───────────────────────────────
@app.route('/api/contact-settings')
@require_auth
def get_contact_settings():
return jsonify(load_json('contact_settings.json'))
@app.route('/api/contact-settings', methods=['POST'])
@require_auth
def save_contact_settings():
try:
d = request.get_json(force=True)
save_json('contact_settings.json', d)
return jsonify(d)
except Exception as e:
return jsonify({'error': str(e)}), 500
# ─── Contact Form (public) ──────────────────────────
@app.route('/api/contact', methods=['POST'])
def contact_form():
try:
d = request.get_json(force=True)
name = d.get('name', '').strip()
email = d.get('email', '').strip()
message = d.get('message', '').strip()
if not name or not email or not message:
return jsonify({'error': 'All fields are required'}), 400
# Check if form is enabled
settings = load_json('contact_settings.json')
if not settings.get('form_enabled', True):
return jsonify({'error': 'Contact form is currently disabled'}), 403
contact_email = settings.get('email', '')
auto_reply = settings.get('auto_reply', '')
# Save message to messages.json regardless
messages = load_json('messages.json')
if not isinstance(messages, list):
messages = []
messages.append({
'name': name,
'email': email,
'message': message,
'timestamp': datetime.datetime.utcnow().isoformat() + 'Z'
})
save_json('messages.json', messages)
# Try to send email via SMTP if configured
keys = load_json('apikeys.json')
smtp_cfg = keys.get('smtp', {})
smtp_host = smtp_cfg.get('host', '')
smtp_port = smtp_cfg.get('port', '587')
smtp_user = smtp_cfg.get('user', '')
smtp_pass = smtp_cfg.get('pass', '')
email_sent = False
if smtp_host and smtp_user and smtp_pass and contact_email:
try:
# Send notification to site owner
msg = MIMEMultipart()
msg['From'] = smtp_user
msg['To'] = contact_email
msg['Subject'] = f'[JAESWIFT] Contact from {name}'
body = f"Name: {name}\nEmail: {email}\n\nMessage:\n{message}"
msg.attach(MIMEText(body, 'plain'))
server = smtplib.SMTP(smtp_host, int(smtp_port))
server.starttls()
server.login(smtp_user, smtp_pass)
server.send_message(msg)
# Send auto-reply if configured
if auto_reply:
reply = MIMEText(auto_reply, 'plain')
reply['From'] = smtp_user
reply['To'] = email
reply['Subject'] = 'Re: Your message to JAESWIFT'
server.send_message(reply)
server.quit()
email_sent = True
except Exception:
pass # Email failed, but message is saved
return jsonify({
'ok': True,
'email_sent': email_sent,
'message': 'Message received. Thanks for reaching out!'
})
except Exception as e:
return jsonify({'error': str(e)}), 500
# ─── Backups ─────────────────────────────────────────
@app.route('/api/backups/posts')
@require_auth
def backup_posts():
try:
p = DATA_DIR / 'posts.json'
if not p.exists():
return jsonify({'error': 'No posts data found'}), 404
return send_file(p, as_attachment=True, download_name='posts.json', mimetype='application/json')
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/backups/tracks')
@require_auth
def backup_tracks():
try:
p = DATA_DIR / 'tracks.json'
if not p.exists():
return jsonify({'error': 'No tracks data found'}), 404
return send_file(p, as_attachment=True, download_name='tracks.json', mimetype='application/json')
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/backups/settings')
@require_auth
def backup_settings():
try:
p = DATA_DIR / 'settings.json'
if not p.exists():
return jsonify({'error': 'No settings data found'}), 404
return send_file(p, as_attachment=True, download_name='settings.json', mimetype='application/json')
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/backups/all')
@require_auth
def backup_all():
try:
buf = io.BytesIO()
with zipfile.ZipFile(buf, 'w', zipfile.ZIP_DEFLATED) as zf:
for f in DATA_DIR.glob('*.json'):
zf.write(f, f.name)
buf.seek(0)
ts = datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S')
return send_file(
buf,
as_attachment=True,
download_name=f'jaeswift_backup_{ts}.zip',
mimetype='application/zip'
)
except Exception as e:
return jsonify({'error': str(e)}), 500
# ─── Run ─────────────────────────────────────────────
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=False)

37
api/data/apikeys.json Normal file
View file

@ -0,0 +1,37 @@
{
"weather": {
"api_key": ""
},
"spotify": {
"client_id": "",
"client_secret": "",
"refresh_token": ""
},
"smtp": {
"host": "",
"port": "587",
"user": "",
"pass": ""
},
"discord": {
"webhook": ""
},
"github": {
"token": ""
},
"custom1": {
"name": "",
"key": "",
"url": ""
},
"custom2": {
"name": "",
"key": "",
"url": ""
},
"custom3": {
"name": "",
"key": "",
"url": ""
}
}

View file

@ -0,0 +1,5 @@
{
"email": "jaeswift@jaeswift.xyz",
"form_enabled": true,
"auto_reply": "Thanks for reaching out. I'll get back to you soon."
}

38
api/data/homepage.json Normal file
View file

@ -0,0 +1,38 @@
{
"hero_title": "JAESWIFT",
"hero_subtitle": "SECURITY ENGINEER // DEVELOPER",
"hero_tagline": "Building the future, one exploit at a time.",
"sections": [
{
"id": "about",
"label": "About",
"visible": true
},
{
"id": "blog",
"label": "Blog",
"visible": true
},
{
"id": "dev-cards",
"label": "Dev Cards",
"visible": true
},
{
"id": "links",
"label": "Links",
"visible": true
},
{
"id": "contact",
"label": "Contact",
"visible": true
},
{
"id": "terminal",
"label": "Terminal",
"visible": true
}
],
"about_text": "Cybersecurity enthusiast and full-stack developer based in the UK."
}

14
api/data/links.json Normal file
View file

@ -0,0 +1,14 @@
[
{
"name": "GitHub",
"url": "https://github.com/jaeswift",
"icon": "\u2b21",
"category": "social"
},
{
"name": "Discord",
"url": "#",
"icon": "\u25c8",
"category": "social"
}
]

View file

@ -0,0 +1,14 @@
[
{
"name": "Main Site",
"url": "https://jaeswift.xyz"
},
{
"name": "Gitea",
"url": "https://git.jaeswift.xyz"
},
{
"name": "Yoink",
"url": "https://jaeswift.xyz/yoink/"
}
]

1
api/data/messages.json Normal file
View file

@ -0,0 +1 @@
[]

17
api/data/navigation.json Normal file
View file

@ -0,0 +1,17 @@
[
{
"label": "HOME",
"url": "/",
"order": 1
},
{
"label": "BLOG",
"url": "/blog",
"order": 2
},
{
"label": "ADMIN",
"url": "/admin",
"order": 3
}
]

View file

@ -1,12 +1,40 @@
[
{
"title": "Building This Dashboard: A Cyberpunk Dev Log",
"slug": "building-this-dashboard",
"date": "2026-03-30",
"time_written": "04:22",
"excerpt": "The story behind jaeswift.xyz \u2014 how I built a cyberpunk HUD dashboard as a personal homepage using nothing but vanilla HTML, CSS, and JavaScript.",
"tags": [
"webdev",
"css",
"javascript",
"design",
"devlog"
],
"content": "# Building This Dashboard: A Cyberpunk Dev Log\n\nYou're looking at it. This site. The one with the scanlines, the particle system, the glitching text, the fake terminal, and the server metrics that are actually real.\n\n## Why?\n\nEvery developer has a portfolio site. Most look the same \u2014 clean, minimal, forgettable. I wanted something that felt like logging into a system. Something that made you feel like you just SSH'd into my brain.\n\n## The Rules\n\n1. **No frameworks** \u2014 Vanilla HTML, CSS, JS only\n2. **No templates** \u2014 Every line written from scratch\n3. **Real data** \u2014 Server metrics, weather, and now-playing are live\n4. **Cyberpunk aesthetic** \u2014 Dark theme, neon accents, monospace everything\n\n## The HUD Grid\n\nThe hero section is a CSS Grid layout. Left column: UK map (SVG with animated pulse rings) + identity block. Right column: Server metrics + containers + weather + now playing.\n\n## The Particle System\n\nThe background particles are drawn on a canvas element. Each frame: move particles based on velocity, check distance to mouse cursor, draw connection lines between nearby particles, recycle particles that leave the viewport. ~200 particles at 60fps. Surprisingly lightweight.\n\n## Live Data Integration\n\nThe server metrics aren't fake. They hit a Flask API running on the same VPS. Same for weather (Open-Meteo API) and the now-playing widget (rotating through my playlist).\n\n## The Glitch Effect\n\nThe title uses CSS clip-path animations with pseudo-elements offset by a few pixels in cyan and red. Triggered randomly every few seconds. Subtle enough to not be annoying.\n\n## What I'd Do Differently\n\nHonestly? Not much. This was a passion project built at 3AM with coffee and beats. It's not perfect, but it's mine. The entire site is about 1,600 lines of CSS and 800 lines of JS. No build step, no dependencies, no npm. Just files and a server.",
"mood": "creative",
"energy": 3,
"motivation": 5,
"focus": 4,
"difficulty": 3,
"coffee": 6,
"heart_rate": 76,
"threat_level": "LOW"
},
{
"id": 1,
"title": "Building a Self-Hosted Empire",
"slug": "self-hosted-empire",
"date": "2026-03-28",
"excerpt": "How I ditched big tech and built my own infrastructure from the ground up. Gitea, Plex, search engines — all on one box.",
"content": "## The Breaking Point\n\nThere comes a moment in every developer's life when you look at your Google Drive, your Gmail, your hosted repos on GitHub, and you think: *why am I handing all of this to someone else?* For me, that moment came at 2 AM on a Tuesday, staring at a Terms of Service update email that basically said 'we own your soul now.'\n\nSo I did what any rational person would do. I bought a VPS and started building.\n\n## The Stack\n\nThe foundation is a Debian box sitting in a data centre somewhere in Europe. On top of that:\n\n- **Gitea** for git hosting — lightweight, fast, and mine\n- **Plex** for media — because why pay for streaming when you have a 2TB drive\n- **Nginx** as the reverse proxy tying it all together\n- **Docker** containers for everything that can be containerised\n- **WireGuard** for secure remote access\n\nThe whole thing runs on 4GB of RAM and barely breaks a sweat.\n\n## The Hard Parts\n\nDNS was a nightmare at first. Getting wildcard SSL certs with Let's Encrypt took three attempts and a lot of swearing. Docker networking still makes me want to throw my laptop out the window sometimes.\n\nBut the worst part? Email. Self-hosted email is a rabbit hole I'm still climbing out of. Every major provider treats your IP as spam by default. You need SPF, DKIM, DMARC, a reverse DNS entry, and probably a blood sacrifice to get Gmail to accept your messages.\n\n## Was It Worth It?\n\nAbsolutely. I own my data. I control my infrastructure. When a service goes down, it's my fault and I can fix it. There's something deeply satisfying about `ssh root@mybox` and knowing that everything running on that machine is mine.\n\nThe total cost? About £8 a month for the VPS. That's less than a single streaming subscription.\n\n## What's Next\n\nI'm looking at adding a self-hosted AI inference server, a personal search engine, and maybe a Matrix server for comms. The empire keeps growing.",
"tags": ["self-hosted", "infrastructure", "linux", "docker"],
"excerpt": "How I ditched big tech and built my own infrastructure from the ground up. Gitea, Plex, search engines \u2014 all on one box.",
"content": "## The Breaking Point\n\nThere comes a moment in every developer's life when you look at your Google Drive, your Gmail, your hosted repos on GitHub, and you think: *why am I handing all of this to someone else?* For me, that moment came at 2 AM on a Tuesday, staring at a Terms of Service update email that basically said 'we own your soul now.'\n\nSo I did what any rational person would do. I bought a VPS and started building.\n\n## The Stack\n\nThe foundation is a Debian box sitting in a data centre somewhere in Europe. On top of that:\n\n- **Gitea** for git hosting \u2014 lightweight, fast, and mine\n- **Plex** for media \u2014 because why pay for streaming when you have a 2TB drive\n- **Nginx** as the reverse proxy tying it all together\n- **Docker** containers for everything that can be containerised\n- **WireGuard** for secure remote access\n\nThe whole thing runs on 4GB of RAM and barely breaks a sweat.\n\n## The Hard Parts\n\nDNS was a nightmare at first. Getting wildcard SSL certs with Let's Encrypt took three attempts and a lot of swearing. Docker networking still makes me want to throw my laptop out the window sometimes.\n\nBut the worst part? Email. Self-hosted email is a rabbit hole I'm still climbing out of. Every major provider treats your IP as spam by default. You need SPF, DKIM, DMARC, a reverse DNS entry, and probably a blood sacrifice to get Gmail to accept your messages.\n\n## Was It Worth It?\n\nAbsolutely. I own my data. I control my infrastructure. When a service goes down, it's my fault and I can fix it. There's something deeply satisfying about `ssh root@mybox` and knowing that everything running on that machine is mine.\n\nThe total cost? About \u00a38 a month for the VPS. That's less than a single streaming subscription.\n\n## What's Next\n\nI'm looking at adding a self-hosted AI inference server, a personal search engine, and maybe a Matrix server for comms. The empire keeps growing.",
"tags": [
"self-hosted",
"infrastructure",
"linux",
"docker"
],
"mood": 4,
"energy": 5,
"motivation": 5,
@ -18,14 +46,41 @@
"time_written": "02:34 AM",
"word_count": 347
},
{
"title": "Homelab Chronicles: Docker, Proxmox & 10 Containers Deep",
"slug": "homelab-chronicles",
"date": "2026-03-28",
"time_written": "02:15",
"excerpt": "A deep dive into my self-hosted infrastructure \u2014 from bare metal to a fleet of Docker containers running everything from Gitea to media servers.",
"tags": [
"homelab",
"docker",
"self-hosting",
"infrastructure"
],
"content": "# Homelab Chronicles: Docker, Proxmox & 10 Containers Deep\n\nThere's something deeply satisfying about running your own infrastructure. No cloud bills. No vendor lock-in. Just you, a box of silicon, and a terminal.\n\n## The Hardware\n\nMy current setup is a single Contabo VPS \u2014 nothing fancy, but it punches well above its weight:\n\n- **CPU:** 6 vCPU AMD EPYC\n- **RAM:** 16GB DDR4\n- **Storage:** 400GB NVMe SSD\n- **OS:** Debian 12 (Bookworm)\n- **Location:** EU datacenter\n\n## The Stack\n\nEverything runs in Docker containers behind an Nginx reverse proxy with Let's Encrypt SSL. Here's what's running:\n\n### Core Infrastructure\n1. **Nginx Proxy Manager** \u2014 SSL termination, reverse proxy for everything\n2. **Portainer** \u2014 Container management UI (though I mostly use CLI)\n3. **Uptime Kuma** \u2014 Monitoring dashboard for all services\n\n### Development\n4. **Gitea** \u2014 Self-hosted Git at git.jaeswift.xyz\n5. **Drone CI** \u2014 Continuous integration pipelines\n6. **Agent Zero** \u2014 AI agent framework for automation\n\n### Applications\n7. **Yoink** \u2014 Custom SvelteKit media downloader\n8. **Vane** \u2014 Weather dashboard built with Next.js\n9. **Ghost** \u2014 Blog platform (since migrated to custom static)\n10. **Wireguard** \u2014 VPN tunnel for secure access\n\n## Docker Compose Setup\n\nThe entire stack is defined in a single `docker-compose.yml` with proper networking. Each service gets its own network segment. Only services that need external access join the proxy network.\n\n## Monitoring\n\nUptime Kuma pings every service every 60 seconds. I get Discord webhook alerts if anything drops. Current uptime across all services: **99.7%** over the last 30 days.\n\n## Lessons Learned\n\n1. **Always have backups** \u2014 I learned this the hard way when a disk migration went wrong\n2. **Document everything** \u2014 Future you will thank present you\n3. **Don't over-engineer** \u2014 Start simple, add complexity when needed\n4. **Security first** \u2014 Fail2ban, UFW, SSH keys only, no root login\n\nThe homelab never sleeps. Neither do I, apparently.",
"mood": "productive",
"energy": 4,
"motivation": 5,
"focus": 4,
"difficulty": 3,
"coffee": 3,
"heart_rate": 72,
"threat_level": "LOW"
},
{
"id": 2,
"title": "Securing the Perimeter: VPS Hardening 101",
"slug": "vps-hardening-101",
"date": "2026-03-25",
"excerpt": "Your fresh VPS is a sitting duck. Here's how I lock mine down — SSH keys, fail2ban, firewalls, and a healthy dose of paranoia.",
"content": "## You Just Deployed a Server. You're Already Under Attack.\n\nI'm not being dramatic. Within five minutes of spinning up a fresh VPS, check your auth logs. You'll see hundreds of brute-force SSH attempts from IPs all over the world. Bots are scanning every IP range constantly, looking for default credentials and open ports.\n\nWelcome to the internet.\n\n## Step 1: SSH Hardening\n\nFirst thing, always:\n\n```bash\n# Generate a key pair on your local machine\nssh-keygen -t ed25519 -C \"your@email.com\"\n\n# Copy it to the server\nssh-copy-id root@your-server\n\n# Then disable password auth\nsed -i 's/#PasswordAuthentication yes/PasswordAuthentication no/' /etc/ssh/sshd_config\nsystemctl restart sshd\n```\n\nChange the default SSH port while you're at it. Security through obscurity isn't real security, but it cuts the noise in your logs by 95%.\n\n## Step 2: Firewall\n\nUFW is your friend:\n\n```bash\nufw default deny incoming\nufw default allow outgoing\nufw allow 2222/tcp # your SSH port\nufw allow 80/tcp\nufw allow 443/tcp\nufw enable\n```\n\nThat's it. Everything else gets dropped.\n\n## Step 3: Fail2Ban\n\nFail2ban watches your logs and auto-bans IPs that fail authentication too many times. Install it, configure it for SSH, and forget about it. It'll do its job quietly in the background.\n\n## Step 4: Unattended Upgrades\n\nSecurity patches should install themselves. You shouldn't need to remember to run `apt update` every day:\n\n```bash\napt install unattended-upgrades\ndpkg-reconfigure unattended-upgrades\n```\n\n## Step 5: The Paranoia Layer\n\n- Disable root login over SSH (use a regular user + sudo)\n- Set up 2FA with Google Authenticator PAM module\n- Monitor with Netdata or similar\n- Regular `lynis audit system` scans\n- Keep backups. Always keep backups.\n\n## The Mindset\n\nSecurity isn't a destination, it's a process. You're never 'done' securing a server. New vulnerabilities drop daily. The trick is making your box harder to crack than the next one. Attackers are lazy — they'll move on to easier targets.",
"tags": ["security", "linux", "sysadmin", "hardening"],
"excerpt": "Your fresh VPS is a sitting duck. Here's how I lock mine down \u2014 SSH keys, fail2ban, firewalls, and a healthy dose of paranoia.",
"content": "## You Just Deployed a Server. You're Already Under Attack.\n\nI'm not being dramatic. Within five minutes of spinning up a fresh VPS, check your auth logs. You'll see hundreds of brute-force SSH attempts from IPs all over the world. Bots are scanning every IP range constantly, looking for default credentials and open ports.\n\nWelcome to the internet.\n\n## Step 1: SSH Hardening\n\nFirst thing, always:\n\n```bash\n# Generate a key pair on your local machine\nssh-keygen -t ed25519 -C \"your@email.com\"\n\n# Copy it to the server\nssh-copy-id root@your-server\n\n# Then disable password auth\nsed -i 's/#PasswordAuthentication yes/PasswordAuthentication no/' /etc/ssh/sshd_config\nsystemctl restart sshd\n```\n\nChange the default SSH port while you're at it. Security through obscurity isn't real security, but it cuts the noise in your logs by 95%.\n\n## Step 2: Firewall\n\nUFW is your friend:\n\n```bash\nufw default deny incoming\nufw default allow outgoing\nufw allow 2222/tcp # your SSH port\nufw allow 80/tcp\nufw allow 443/tcp\nufw enable\n```\n\nThat's it. Everything else gets dropped.\n\n## Step 3: Fail2Ban\n\nFail2ban watches your logs and auto-bans IPs that fail authentication too many times. Install it, configure it for SSH, and forget about it. It'll do its job quietly in the background.\n\n## Step 4: Unattended Upgrades\n\nSecurity patches should install themselves. You shouldn't need to remember to run `apt update` every day:\n\n```bash\napt install unattended-upgrades\ndpkg-reconfigure unattended-upgrades\n```\n\n## Step 5: The Paranoia Layer\n\n- Disable root login over SSH (use a regular user + sudo)\n- Set up 2FA with Google Authenticator PAM module\n- Monitor with Netdata or similar\n- Regular `lynis audit system` scans\n- Keep backups. Always keep backups.\n\n## The Mindset\n\nSecurity isn't a destination, it's a process. You're never 'done' securing a server. New vulnerabilities drop daily. The trick is making your box harder to crack than the next one. Attackers are lazy \u2014 they'll move on to easier targets.",
"tags": [
"security",
"linux",
"sysadmin",
"hardening"
],
"mood": 3,
"energy": 4,
"motivation": 5,
@ -37,14 +92,65 @@
"time_written": "11:47 PM",
"word_count": 312
},
{
"title": "CTF Write-Up: Breaking Into the Matrix (HackTheBox)",
"slug": "ctf-matrix-writeup",
"date": "2026-03-25",
"time_written": "23:48",
"excerpt": "A detailed write-up of a medium-difficulty HackTheBox challenge involving SSRF, JWT manipulation, and a satisfying root shell.",
"tags": [
"ctf",
"hackthebox",
"pentesting",
"write-up",
"security"
],
"content": "# CTF Write-Up: Breaking Into the Matrix\n\n**Platform:** HackTheBox\n**Difficulty:** Medium\n**Time:** 4 hours 23 minutes\n**Tools:** nmap, burpsuite, john, linpeas\n\n---\n\n## Reconnaissance\n\nStarted with the usual nmap scan. Ports open: 22 (OpenSSH 8.9), 80 (Nginx redirects to matrix.htb), 8080 (Node.js API).\n\n## Web Enumeration\n\nThe main site was a futuristic dashboard (ironic, given my own site). Gobuster revealed /api/v1/users, /api/v1/auth, /api/v1/admin (403), and /internal/health (SSRF candidate).\n\n## The SSRF\n\nThe /api/v1/users endpoint had a profile picture feature that fetched URLs server-side. Classic SSRF. Fetched the internal health endpoint which leaked server config including a JWT secret.\n\n## JWT Manipulation\n\nWith the secret, I forged an admin token using PyJWT. Used this to access /api/v1/admin \u2014 which had a command execution endpoint.\n\n## User Shell\n\nUsed the command exec endpoint to fire a reverse shell. Got in as www-data. User flag captured.\n\n## Privilege Escalation\n\nRan linpeas. Found a SUID binary in /opt/matrix-monitor owned by root. Strings revealed it called curl without a full path. Path hijack \u2014 created a fake curl that spawns bash -p. Root shell. GG.\n\n## Takeaways\n\n- SSRF is still everywhere \u2014 always test URL input fields\n- JWT secrets in internal endpoints = instant admin\n- SUID binaries with relative paths are free root\n- Always run linpeas. Always.",
"mood": "focused",
"energy": 5,
"motivation": 5,
"focus": 5,
"difficulty": 4,
"coffee": 5,
"heart_rate": 88,
"threat_level": "HIGH"
},
{
"title": "OpSec for the Paranoid: My Privacy Stack",
"slug": "opsec-privacy-stack",
"date": "2026-03-22",
"time_written": "01:30",
"excerpt": "A breakdown of the tools, habits, and configurations I use to stay private online \u2014 from DNS to email to browsing.",
"tags": [
"privacy",
"security",
"opsec",
"linux",
"self-hosting"
],
"content": "# OpSec for the Paranoid: My Privacy Stack\n\nI'm not hiding anything. I just don't think anyone's entitled to my data.\n\n## DNS\n\nSelf-hosted Pi-hole pointing to Quad9 (9.9.9.9) with DNSSEC enabled. Every DNS query on my network is filtered and encrypted. The blocklist catches ~30% of all queries.\n\n## Email\n\nProtonMail for personal. Self-hosted Postfix/Dovecot on jaeswift.xyz for project mail. Both with SPF, DKIM, and DMARC. I use email aliases extensively \u2014 every service gets a unique address.\n\n## Browser\n\nFirefox with uBlock Origin (hard mode), Privacy Badger, HTTPS Everywhere, Multi-Account Containers, and a custom user.js based on arkenfox. No Chrome. Ever.\n\n## VPN\n\nWireGuard tunnel to my VPS. All traffic routed through it on public networks. Dead simple config.\n\n## Passwords\n\nKeePassXC locally synced via Syncthing. No cloud password manager. The database is backed up encrypted to three locations.\n\n## Messaging\n\nSignal for anything sensitive. Discord for community (privacy settings maxed). Never SMS for anything important.\n\n## The Mindset\n\nPrivacy isn't just tools \u2014 it's habits:\n\n- **Compartmentalise** \u2014 Different emails, browsers, identities for different contexts\n- **Minimise** \u2014 Don't give data you don't have to\n- **Verify** \u2014 Check permissions, read privacy policies, audit your attack surface\n- **Update** \u2014 Patch everything, always\n\nStay safe out there.",
"mood": "focused",
"energy": 4,
"motivation": 4,
"focus": 5,
"difficulty": 2,
"coffee": 2,
"heart_rate": 65,
"threat_level": "MEDIUM"
},
{
"id": 3,
"title": "AI Agents at 3AM: Building Agent Zero",
"slug": "ai-agents-3am",
"date": "2026-03-20",
"excerpt": "What happens when you give an AI root access to your server and tell it to build things? Chaos, learning, and surprisingly good code.",
"content": "## The Rabbit Hole\n\nIt started with a simple question: what if an AI could actually *do* things on my server, not just talk about doing them?\n\nI'd been messing around with LLMs for months — ChatGPT, Claude, local models on my GPU. They're brilliant at explaining things and writing snippets, but there's always that gap between 'here's the code' and 'it's actually running.' You still have to copy, paste, debug, fix the hallucinated import, debug again.\n\nThen I found Agent Zero.\n\n## What Is It?\n\nAgent Zero is an AI agent framework. You give it access to a terminal, a browser, memory, and tools. Then you tell it what you want. It figures out the steps, writes the code, runs it, checks the output, and iterates until it works.\n\nThe first time I watched it SSH into my VPS, install nginx, write a config file, test it, and reload the service — all without me touching the keyboard — I felt something between excitement and genuine unease.\n\n## The Good\n\n- It handles complex multi-step tasks that would take me an hour of googling\n- It remembers previous solutions and applies them to new problems\n- It can debug its own mistakes (most of the time)\n- It never gets frustrated at 3 AM\n\n## The Bad\n\n- It sometimes gets stuck in loops, trying the same failing approach repeatedly\n- Token costs add up fast on complex tasks\n- You need to be specific — vague instructions produce vague results\n- It occasionally decides to 'improve' things you didn't ask it to touch\n\n## The Philosophy\n\nThere's a real debate about whether giving AI tools like these is a good idea. I get it. But here's my take: these tools exist. They're getting better. The question isn't whether people will use them, it's whether you'll understand how they work when they become mainstream.\n\nI'd rather be the person who built with the early versions than the person trying to catch up later.\n\n## Current Setup\n\nMy Agent Zero instance runs in Docker on the VPS. It has access to the terminal, can browse the web, and maintains long-term memory. I use it for everything from server maintenance to building websites.\n\nYes, including this one.",
"tags": ["ai", "agent-zero", "automation", "development"],
"content": "## The Rabbit Hole\n\nIt started with a simple question: what if an AI could actually *do* things on my server, not just talk about doing them?\n\nI'd been messing around with LLMs for months \u2014 ChatGPT, Claude, local models on my GPU. They're brilliant at explaining things and writing snippets, but there's always that gap between 'here's the code' and 'it's actually running.' You still have to copy, paste, debug, fix the hallucinated import, debug again.\n\nThen I found Agent Zero.\n\n## What Is It?\n\nAgent Zero is an AI agent framework. You give it access to a terminal, a browser, memory, and tools. Then you tell it what you want. It figures out the steps, writes the code, runs it, checks the output, and iterates until it works.\n\nThe first time I watched it SSH into my VPS, install nginx, write a config file, test it, and reload the service \u2014 all without me touching the keyboard \u2014 I felt something between excitement and genuine unease.\n\n## The Good\n\n- It handles complex multi-step tasks that would take me an hour of googling\n- It remembers previous solutions and applies them to new problems\n- It can debug its own mistakes (most of the time)\n- It never gets frustrated at 3 AM\n\n## The Bad\n\n- It sometimes gets stuck in loops, trying the same failing approach repeatedly\n- Token costs add up fast on complex tasks\n- You need to be specific \u2014 vague instructions produce vague results\n- It occasionally decides to 'improve' things you didn't ask it to touch\n\n## The Philosophy\n\nThere's a real debate about whether giving AI tools like these is a good idea. I get it. But here's my take: these tools exist. They're getting better. The question isn't whether people will use them, it's whether you'll understand how they work when they become mainstream.\n\nI'd rather be the person who built with the early versions than the person trying to catch up later.\n\n## Current Setup\n\nMy Agent Zero instance runs in Docker on the VPS. It has access to the terminal, can browse the web, and maintains long-term memory. I use it for everything from server maintenance to building websites.\n\nYes, including this one.",
"tags": [
"ai",
"agent-zero",
"automation",
"development"
],
"mood": 5,
"energy": 3,
"motivation": 4,
@ -56,6 +162,29 @@
"time_written": "03:22 AM",
"word_count": 340
},
{
"title": "Terminal Ricing: My Kali Workflow in 2026",
"slug": "terminal-ricing-2026",
"date": "2026-03-20",
"time_written": "22:15",
"excerpt": "A tour of my terminal setup \u2014 from Zsh to Tmux to custom aliases. Optimised for pentesting, development, and looking absolutely sick.",
"tags": [
"linux",
"terminal",
"workflow",
"customisation",
"dotfiles"
],
"content": "# Terminal Ricing: My Kali Workflow in 2026\n\nI spend 90% of my time in a terminal. It better look good and work fast.\n\n## The Shell: Zsh + Oh My Zsh\n\nBash is fine. Zsh is better. With Oh My Zsh and the Powerlevel10k prompt, I get git branch/status, command execution time, Python virtualenv indicator, error code highlighting, and auto-suggestions from history.\n\n## Key Aliases\n\nMy .zshrc has about 200 lines of aliases and functions. Essentials: ll for detailed listing, gs/gp for git status/push, dc for docker compose, serve for quick http server, myip for external IP, ports for open connections.\n\n## Tmux\n\nAlways running with custom config: Ctrl+A prefix, mouse support, custom status bar with hostname/session/time, pipe for vertical split, dash for horizontal. Colours match the cyberpunk theme.\n\n## Tools I Can't Live Without\n\nfzf (fuzzy finder), bat (syntax-highlighted cat), ripgrep (fast grep), htop (process monitor), lazydocker (Docker TUI), nnn (file manager), jq (JSON processing).\n\n## Neovim\n\nRunning LazyVim with Treesitter, LSP for Python/JS/Go, Telescope, Which-key, and a custom cyberpunk colour scheme.\n\n## The Font\n\nJetBrains Mono with Nerd Font patches. Ligatures, readable at small sizes, all the icons Powerlevel10k needs.\n\n## Why It Matters\n\nA good terminal setup isn't just aesthetics \u2014 it's productivity. Every alias saves seconds. Every keybind saves keystrokes. Over thousands of commands a day, it adds up. Plus, it looks absolutely sick in screenshots.",
"mood": "zen",
"energy": 3,
"motivation": 4,
"focus": 3,
"difficulty": 1,
"coffee": 4,
"heart_rate": 62,
"threat_level": "LOW"
},
{
"id": 4,
"title": "The 4AM Deploy: When Everything Goes Wrong",
@ -63,7 +192,12 @@
"date": "2026-03-15",
"excerpt": "A war story about a production deploy that went sideways, the frantic debugging that followed, and what I learned from nuking my own DNS.",
"content": "## It Was Supposed To Be Quick\n\nFamous last words. All I wanted to do was update the nginx config to add a new subdomain. Five minutes, tops. I'd done it a hundred times before.\n\n```bash\nnginx -t && systemctl reload nginx\n```\n\nExcept this time, `nginx -t` returned an error I'd never seen. Something about a duplicate server name. I'd accidentally created a circular include that referenced itself through a symlink.\n\n## The Cascade\n\nIn my sleep-deprived wisdom, I decided to 'fix' it by removing what I thought was the duplicate config file. Turns out, that was the main config for jaeswift.xyz. Not the duplicate. The main one.\n\nNginx went down. All services went dark. Git, Plex, the docs site, everything. At 4 AM.\n\n## The Panic\n\nMy phone started buzzing. Uptime monitors screaming. I tried to restore from the backup. The backup was three days old because I'd been meaning to set up daily snapshots but hadn't got round to it.\n\nThree days of config changes, gone.\n\n## The Recovery\n\nI spent the next two hours reconstructing the nginx config from memory and bash history. `history | grep nginx` became my best friend. I found most of the server blocks in various terminal scrollback sessions.\n\nBy 6 AM, everything was back online. Mostly. The Plex config took another day to get right because I'd customised the proxy headers and couldn't remember the exact settings.\n\n## The Lessons\n\n1. **Backup your configs to git.** Not tomorrow. Now.\n2. **Never deploy after midnight.** Your brain is lying to you about how awake you are.\n3. **Test in staging.** I know, I know. But actually do it.\n4. **Document everything.** Past-you is the best resource for future-you, but only if past-you wrote things down.\n5. **Set up automated backups.** I now have hourly snapshots of all critical configs.\n\n## Silver Lining\n\nThe reconstructed config was actually cleaner than the original. Sometimes you need to burn it down to build it better.\n\nBut maybe not at 4 AM.",
"tags": ["devops", "war-story", "nginx", "lessons"],
"tags": [
"devops",
"war-story",
"nginx",
"lessons"
],
"mood": 2,
"energy": 1,
"motivation": 2,

7
api/data/seo.json Normal file
View file

@ -0,0 +1,7 @@
{
"title": "JAESWIFT // Security Engineer & Developer",
"description": "Personal site of JAE - cybersecurity engineer, developer, and hacker based in the UK.",
"keywords": "cybersecurity, hacking, developer, pentesting, UK",
"og_image": "",
"favicon": ""
}

10
api/data/theme.json Normal file
View file

@ -0,0 +1,10 @@
{
"accent_color": "#00ffc8",
"bg_color": "#0a0e17",
"text_color": "#e0e0e0",
"scanlines": true,
"particles": true,
"glitch": true,
"grid_bg": true,
"font_size": 14
}

File diff suppressed because it is too large Load diff

View file

@ -523,19 +523,19 @@
<form class="contact-form" id="contactForm">
<div class="form-group">
<label class="form-label">CALLSIGN</label>
<input type="text" class="form-input" placeholder="Enter your name..." required>
<input type="text" class="form-input" id="contactName" placeholder="Enter your name..." required>
</div>
<div class="form-group">
<label class="form-label">FREQUENCY</label>
<input type="email" class="form-input" placeholder="your@email.com" required>
<input type="email" class="form-input" id="contactEmail" placeholder="your@email.com" required>
</div>
<div class="form-group">
<label class="form-label">SUBJECT</label>
<input type="text" class="form-input" placeholder="Transmission subject...">
<input type="text" class="form-input" id="contactSubject" placeholder="Transmission subject...">
</div>
<div class="form-group">
<label class="form-label">MESSAGE PAYLOAD</label>
<textarea class="form-input form-textarea" placeholder="Enter your message..." rows="5" required></textarea>
<textarea class="form-input form-textarea" id="contactMessage" placeholder="Enter your message..." rows="5" required></textarea>
</div>
<button type="submit" class="form-submit">
<span class="submit-text">TRANSMIT</span>

File diff suppressed because it is too large Load diff

View file

@ -463,29 +463,59 @@
const form = $('#contactForm');
if (!form) return;
form.addEventListener('submit', (e) => {
form.addEventListener('submit', async (e) => {
e.preventDefault();
const btn = form.querySelector('.form-submit');
const originalText = btn.querySelector('.submit-text').textContent;
const submitText = btn.querySelector('.submit-text');
const originalText = submitText.textContent;
btn.querySelector('.submit-text').textContent = 'TRANSMITTING...';
const name = $('#contactName').value.trim();
const email = $('#contactEmail').value.trim();
const subject = $('#contactSubject').value.trim();
const message = $('#contactMessage').value.trim();
if (!name || !email || !message) return;
submitText.textContent = 'TRANSMITTING...';
btn.disabled = true;
btn.style.borderColor = 'var(--warning)';
btn.style.color = 'var(--warning)';
setTimeout(() => {
btn.querySelector('.submit-text').textContent = '✓ TRANSMITTED';
btn.style.borderColor = 'var(--accent)';
btn.style.color = 'var(--accent)';
try {
const payload = {
name: name,
email: email,
message: subject ? `[${subject}] ${message}` : message
};
const res = await fetch('/api/contact', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
});
const data = await res.json();
setTimeout(() => {
btn.querySelector('.submit-text').textContent = originalText;
btn.disabled = false;
btn.style.borderColor = '';
btn.style.color = '';
if (res.ok) {
submitText.textContent = '✓ TRANSMITTED';
btn.style.borderColor = 'var(--accent)';
btn.style.color = 'var(--accent)';
form.reset();
}, 2000);
}, 1500);
} else {
submitText.textContent = '✗ FAILED';
btn.style.borderColor = '#ff006e';
btn.style.color = '#ff006e';
}
} catch (err) {
submitText.textContent = '✗ ERROR';
btn.style.borderColor = '#ff006e';
btn.style.color = '#ff006e';
}
setTimeout(() => {
submitText.textContent = originalText;
btn.disabled = false;
btn.style.borderColor = '';
btn.style.color = '';
}, 2500);
});
}