Update Apache SSL configuration and enhance security features across multiple files. Changed X-Frame-Options to SAMEORIGIN for better security, added optional Content Security Policy headers for testing, and improved password handling with HaveIBeenPwned checks during user registration and password reset. Implemented passkey login functionality in the authentication flow, including UI updates for user experience. Enhanced image upload processing with size limits and validation, and added rate limiting for various API endpoints to prevent abuse.
Some checks failed
Code Analysis (JS/Vue) / analyze (push) Failing after 51s

This commit is contained in:
Torsten Schulz (local)
2026-01-05 11:50:57 +01:00
parent 8bd7ed76cd
commit 673c34ac9d
47 changed files with 1738 additions and 83 deletions

35
server/utils/audit-log.js Normal file
View File

@@ -0,0 +1,35 @@
import fs from 'fs/promises'
import path from 'path'
// nosemgrep: javascript.lang.security.audit.path-traversal.path-join-resolve-traversal.path-join-resolve-traversal
const getDataPath = (filename) => {
const cwd = process.cwd()
if (cwd.endsWith('.output')) {
// nosemgrep
return path.join(cwd, '../server/data', filename)
}
// nosemgrep
return path.join(cwd, 'server/data', filename)
}
const AUDIT_LOG_FILE = getDataPath('audit.log.jsonl')
function safeStr(v, max = 500) {
return String(v == null ? '' : v).slice(0, max)
}
export async function writeAuditLog(eventType, data = {}) {
const enabled = (process.env.AUDIT_LOG_ENABLED || 'true').toLowerCase() !== 'false'
if (!enabled) return
const entry = {
ts: new Date().toISOString(),
type: safeStr(eventType, 100),
data
}
await fs.mkdir(path.dirname(AUDIT_LOG_FILE), { recursive: true })
await fs.appendFile(AUDIT_LOG_FILE, JSON.stringify(entry) + '\n', 'utf-8')
}

View File

@@ -117,7 +117,7 @@ export async function readUsers() {
}
return users
} catch (_error) {
} catch (error) {
if (error.code === 'ENOENT') {
return []
}
@@ -133,7 +133,7 @@ export async function writeUsers(users) {
const encryptedData = encryptObject(users, encryptionKey)
await fs.writeFile(USERS_FILE, encryptedData, 'utf-8')
return true
} catch (_error) {
} catch (error) {
console.error('Fehler beim Schreiben der Benutzerdaten:', error)
return false
}
@@ -183,7 +183,7 @@ export async function readSessions() {
await writeSessions(sessions)
return sessions
}
} catch (_error) {
} catch (error) {
if (error.code === 'ENOENT') {
return []
}
@@ -199,7 +199,7 @@ export async function writeSessions(sessions) {
const encryptedData = encryptObject(sessions, encryptionKey)
await fs.writeFile(SESSIONS_FILE, encryptedData, 'utf-8')
return true
} catch (_error) {
} catch (error) {
console.error('Fehler beim Schreiben der Sessions:', error)
return false
}

46
server/utils/cookies.js Normal file
View File

@@ -0,0 +1,46 @@
function isProduction() {
return process.env.NODE_ENV === 'production'
}
export function getCookieSecureDefault() {
// In Produktion: immer Secure (auch wenn HTTPS via Apache terminiert).
// In Dev: default false, damit Login über http://localhost funktioniert.
if (process.env.COOKIE_SECURE === 'true') return true
if (process.env.COOKIE_SECURE === 'false') return false
return isProduction()
}
export function getSameSiteDefault() {
// Erwartung aus Security-Feedback: Strict. In Dev ggf. Lax, damit SSO/Flows nicht nerven.
const v = (process.env.COOKIE_SAMESITE || '').toLowerCase().trim()
if (v === 'strict' || v === 'lax' || v === 'none') return v
return isProduction() ? 'strict' : 'lax'
}
export function getAuthCookieOptions() {
return {
httpOnly: true,
secure: getCookieSecureDefault(),
sameSite: getSameSiteDefault(),
maxAge: 60 * 60 * 24 * 7 // 7 days
}
}
export function getDownloadCookieOptions() {
// Download-Token ist kurzlebig; SameSite strict ist ok.
return {
httpOnly: true,
secure: getCookieSecureDefault(),
sameSite: 'strict',
maxAge: 60 * 60 * 24 // 24 Stunden
}
}
export function getDownloadCookieOptionsWithMaxAge(maxAgeSeconds) {
return {
...getDownloadCookieOptions(),
maxAge: Number(maxAgeSeconds) || getDownloadCookieOptions().maxAge
}
}

View File

@@ -1,9 +1,16 @@
import crypto from 'crypto'
// Verschlüsselungskonfiguration
const ALGORITHM = 'aes-256-cbc'
const IV_LENGTH = 16
// v1 (legacy): aes-256-cbc (ohne Authentizitätsschutz)
const LEGACY_ALGORITHM = 'aes-256-cbc'
const LEGACY_IV_LENGTH = 16
// v2 (default): aes-256-gcm (AEAD, state-of-the-art)
const ALGORITHM = 'aes-256-gcm'
const IV_LENGTH = 12
const AUTH_TAG_LENGTH = 16
const SALT_LENGTH = 32
const VERSION_PREFIX = 'v2:'
/**
* Generiert einen Schlüssel aus einem Passwort und Salt
@@ -12,35 +19,87 @@ function deriveKey(password, salt) {
return crypto.pbkdf2Sync(password, salt, 100000, 32, 'sha512')
}
function encryptV2GCM(text, password) {
// Salt generieren
const salt = crypto.randomBytes(SALT_LENGTH)
// Schlüssel ableiten
const key = deriveKey(password, salt)
// IV generieren (12 bytes ist Best Practice für GCM)
const iv = crypto.randomBytes(IV_LENGTH)
// Cipher erstellen
const cipher = crypto.createCipheriv(ALGORITHM, key, iv)
// Verschlüsseln
const encrypted = Buffer.concat([
cipher.update(text, 'utf8'),
cipher.final()
])
// Auth-Tag holen
const tag = cipher.getAuthTag()
// Salt + IV + Tag + Ciphertext kombinieren
const combined = Buffer.concat([salt, iv, tag, encrypted])
return `${VERSION_PREFIX}${combined.toString('base64')}`
}
function decryptLegacyCBC(encryptedData, password) {
// Base64 dekodieren
const combined = Buffer.from(encryptedData, 'base64')
// Komponenten extrahieren (v1: salt(32) + iv(16) + ciphertext)
const salt = combined.subarray(0, SALT_LENGTH)
const iv = combined.subarray(SALT_LENGTH, SALT_LENGTH + LEGACY_IV_LENGTH)
const encrypted = combined.subarray(SALT_LENGTH + LEGACY_IV_LENGTH)
// Schlüssel ableiten
const key = deriveKey(password, salt)
// Decipher erstellen
const decipher = crypto.createDecipheriv(LEGACY_ALGORITHM, key, iv)
// Entschlüsseln
const decrypted = Buffer.concat([
decipher.update(encrypted),
decipher.final()
])
return decrypted.toString('utf8')
}
function decryptV2GCM(encryptedData, password) {
const b64 = encryptedData.slice(VERSION_PREFIX.length)
const combined = Buffer.from(b64, 'base64')
// v2: salt(32) + iv(12) + tag(16) + ciphertext
const salt = combined.subarray(0, SALT_LENGTH)
const iv = combined.subarray(SALT_LENGTH, SALT_LENGTH + IV_LENGTH)
const tagStart = SALT_LENGTH + IV_LENGTH
const tag = combined.subarray(tagStart, tagStart + AUTH_TAG_LENGTH)
const encrypted = combined.subarray(tagStart + AUTH_TAG_LENGTH)
const key = deriveKey(password, salt)
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv)
decipher.setAuthTag(tag)
const decrypted = Buffer.concat([
decipher.update(encrypted),
decipher.final()
])
return decrypted.toString('utf8')
}
/**
* Verschlüsselt einen Text
*/
export function encrypt(text, password) {
try {
// Salt generieren
const salt = crypto.randomBytes(SALT_LENGTH)
// Schlüssel ableiten
const key = deriveKey(password, salt)
// IV generieren
const iv = crypto.randomBytes(IV_LENGTH)
// Cipher erstellen
const cipher = crypto.createCipheriv(ALGORITHM, key, iv)
// Verschlüsseln
let encrypted = cipher.update(text, 'utf8', 'hex')
encrypted += cipher.final('hex')
// Salt + IV + Verschlüsselter Text kombinieren
const combined = Buffer.concat([
salt,
iv,
Buffer.from(encrypted, 'hex')
])
return combined.toString('base64')
return encryptV2GCM(text, password)
} catch (error) {
console.error('Verschlüsselungsfehler:', error)
throw new Error('Fehler beim Verschlüsseln der Daten')
@@ -52,25 +111,12 @@ export function encrypt(text, password) {
*/
export function decrypt(encryptedData, password) {
try {
// Base64 dekodieren
const combined = Buffer.from(encryptedData, 'base64')
// Komponenten extrahieren
const salt = combined.subarray(0, SALT_LENGTH)
const iv = combined.subarray(SALT_LENGTH, SALT_LENGTH + IV_LENGTH)
const encrypted = combined.subarray(SALT_LENGTH + IV_LENGTH)
// Schlüssel ableiten
const key = deriveKey(password, salt)
// Decipher erstellen
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv)
// Entschlüsseln
let decrypted = decipher.update(encrypted, null, 'utf8')
decrypted += decipher.final('utf8')
return decrypted
if (typeof encryptedData === 'string' && encryptedData.startsWith(VERSION_PREFIX)) {
return decryptV2GCM(encryptedData, password)
}
// Fallback: legacy CBC ohne Prefix
return decryptLegacyCBC(encryptedData, password)
} catch (error) {
console.error('Entschlüsselungsfehler:', error)
throw new Error('Fehler beim Entschlüsseln der Daten')

95
server/utils/hibp.js Normal file
View File

@@ -0,0 +1,95 @@
import crypto from 'crypto'
const cache = globalThis.__HTC_HIBP_CACHE__ || new Map()
globalThis.__HTC_HIBP_CACHE__ = cache
function nowMs() {
return Date.now()
}
function sha1UpperHex(input) {
return crypto.createHash('sha1').update(String(input), 'utf8').digest('hex').toUpperCase()
}
function parseRangeResponse(text) {
// Format: "SUFFIX:COUNT" per line
const map = new Map()
for (const line of String(text || '').split('\n')) {
const trimmed = line.trim()
if (!trimmed) continue
const [suffix, count] = trimmed.split(':')
if (suffix && count) map.set(suffix.trim().toUpperCase(), Number(count.trim()) || 0)
}
return map
}
async function fetchWithTimeout(url, { timeoutMs = 4000, headers = {} } = {}) {
const ctrl = new AbortController()
const t = setTimeout(() => ctrl.abort(), timeoutMs)
try {
return await fetch(url, { headers, signal: ctrl.signal })
} finally {
clearTimeout(t)
}
}
/**
* Prüft Passwort gegen HIBP Pwned Passwords (k-Anonymity).
* Gibt zurück: { pwned: boolean, count: number }
*/
export async function checkPasswordPwned(password) {
const enabled = (process.env.HIBP_ENABLED || '').toLowerCase() === 'true'
if (!enabled) return { pwned: false, count: 0 }
const hash = sha1UpperHex(password)
const prefix = hash.slice(0, 5)
const suffix = hash.slice(5)
// Cache pro Prefix (TTL)
const ttlMs = Number(process.env.HIBP_CACHE_TTL_MS || 6 * 60 * 60 * 1000) // 6h
const cached = cache.get(prefix)
const now = nowMs()
if (cached && cached.expiresAt > now && cached.map) {
const count = cached.map.get(suffix) || 0
return { pwned: count > 0, count }
}
const ua = process.env.HIBP_USER_AGENT || 'harheimertc'
const url = `https://api.pwnedpasswords.com/range/${prefix}`
const res = await fetchWithTimeout(url, {
timeoutMs: Number(process.env.HIBP_TIMEOUT_MS || 4000),
headers: {
'User-Agent': ua,
// HIBP empfiehlt optional diesen Header für Padding; wir schalten ihn per default ein.
'Add-Padding': 'true'
}
})
if (!res.ok) {
const failClosed = (process.env.HIBP_FAIL_CLOSED || '').toLowerCase() === 'true'
if (failClosed) {
throw createError({ statusCode: 503, statusMessage: 'Passwortprüfung derzeit nicht verfügbar. Bitte später erneut versuchen.' })
}
// fail-open
return { pwned: false, count: 0 }
}
const text = await res.text()
const map = parseRangeResponse(text)
cache.set(prefix, { expiresAt: now + ttlMs, map })
const count = map.get(suffix) || 0
return { pwned: count > 0, count }
}
export async function assertPasswordNotPwned(password) {
const { pwned } = await checkPasswordPwned(password)
if (pwned) {
throw createError({
statusCode: 400,
message: 'Dieses Passwort wurde in bekannten Datenleaks gefunden. Bitte wählen Sie ein anderes Passwort.'
})
}
}

131
server/utils/rate-limit.js Normal file
View File

@@ -0,0 +1,131 @@
/**
* Sehr einfache In-Memory Rate-Limits für Nitro/h3.
*
* Hinweis: In-Memory ist pro Prozess/Instance. Für horizontale Skalierung
* sollte das auf Redis o.ä. umgestellt werden (siehe Doku).
*/
const buckets = globalThis.__HTC_RATE_LIMIT_BUCKETS__ || new Map()
// Persist across hot reloads
globalThis.__HTC_RATE_LIMIT_BUCKETS__ = buckets
function nowMs() {
return Date.now()
}
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms))
}
export function getClientIp(event) {
const xff = getHeader(event, 'x-forwarded-for')
if (xff) {
// First IP in list is original client
const first = xff.split(',')[0]?.trim()
if (first) return first
}
const realIp = getHeader(event, 'x-real-ip')
if (realIp) return realIp.trim()
return event?.node?.req?.socket?.remoteAddress || 'unknown'
}
function getBucket(key) {
let b = buckets.get(key)
if (!b) {
b = {
windowStart: nowMs(),
count: 0,
consecutiveFails: 0,
lockedUntil: 0
}
buckets.set(key, b)
}
return b
}
function normalizeKeyPart(part) {
return String(part || '')
.trim()
.toLowerCase()
.replace(/\s+/g, ' ')
.slice(0, 200)
}
function buildKey(name, keyParts) {
const parts = (Array.isArray(keyParts) ? keyParts : [keyParts]).map(normalizeKeyPart)
return `${name}:${parts.join(':')}`
}
function resetWindowIfNeeded(bucket, windowMs, now) {
if (now - bucket.windowStart >= windowMs) {
bucket.windowStart = now
bucket.count = 0
// consecutiveFails bleibt bewusst erhalten (Backoff für "nervige" Clients)
}
}
export function assertRateLimit(event, options) {
const {
name,
keyParts,
windowMs = 10 * 60 * 1000,
maxAttempts = 10,
lockoutMs = 15 * 60 * 1000,
statusCode = 429,
message = 'Zu viele Versuche. Bitte später erneut versuchen.'
} = options || {}
const key = buildKey(name, keyParts)
const bucket = getBucket(key)
const now = nowMs()
if (bucket.lockedUntil && bucket.lockedUntil > now) {
const retryAfterSec = Math.ceil((bucket.lockedUntil - now) / 1000)
setHeader(event, 'Retry-After', String(retryAfterSec))
throw createError({ statusCode, statusMessage: message })
}
resetWindowIfNeeded(bucket, windowMs, now)
if (bucket.count >= maxAttempts) {
bucket.lockedUntil = now + lockoutMs
const retryAfterSec = Math.ceil(lockoutMs / 1000)
setHeader(event, 'Retry-After', String(retryAfterSec))
throw createError({ statusCode, statusMessage: message })
}
// Count the attempt
bucket.count += 1
}
export async function registerRateLimitFailure(event, options) {
const {
name,
keyParts,
delayBaseMs = 300,
delayMaxMs = 5000
} = options || {}
const key = buildKey(name, keyParts)
const bucket = getBucket(key)
bucket.consecutiveFails = Math.min((bucket.consecutiveFails || 0) + 1, 30)
// Exponential backoff: base * 2^(n-1)
const delay = Math.min(delayBaseMs * Math.pow(2, bucket.consecutiveFails - 1), delayMaxMs)
await sleep(delay)
}
export function registerRateLimitSuccess(_event, options) {
const { name, keyParts } = options || {}
const key = buildKey(name, keyParts)
const bucket = getBucket(key)
bucket.consecutiveFails = 0
// Nach Erfolg darf es "frisch" starten
bucket.count = 0
bucket.windowStart = nowMs()
bucket.lockedUntil = 0
}

View File

@@ -0,0 +1,21 @@
import fs from 'fs/promises'
export async function assertPdfMagicHeader(filePath) {
const fh = await fs.open(filePath, 'r')
try {
const buf = Buffer.alloc(5)
await fh.read(buf, 0, 5, 0)
const header = buf.toString('utf8')
if (header !== '%PDF-') {
throw createError({ statusCode: 400, statusMessage: 'Ungültige Datei: kein PDF' })
}
} finally {
await fh.close()
}
}
export function clamp(n, min, max) {
return Math.max(min, Math.min(max, n))
}

View File

@@ -0,0 +1,46 @@
const regChallenges = globalThis.__HTC_WEBAUTHN_REG_CHALLENGES__ || new Map()
const authChallenges = globalThis.__HTC_WEBAUTHN_AUTH_CHALLENGES__ || new Map()
globalThis.__HTC_WEBAUTHN_REG_CHALLENGES__ = regChallenges
globalThis.__HTC_WEBAUTHN_AUTH_CHALLENGES__ = authChallenges
function nowMs() {
return Date.now()
}
function cleanup(map) {
const now = nowMs()
for (const [k, v] of map.entries()) {
if (!v || !v.expiresAt || v.expiresAt <= now) map.delete(k)
}
}
export function setRegistrationChallenge(userId, challenge, ttlMs = 5 * 60 * 1000) {
cleanup(regChallenges)
regChallenges.set(String(userId), { challenge, expiresAt: nowMs() + ttlMs })
}
export function getRegistrationChallenge(userId) {
cleanup(regChallenges)
const v = regChallenges.get(String(userId))
return v?.challenge || null
}
export function clearRegistrationChallenge(userId) {
regChallenges.delete(String(userId))
}
export function setAuthChallenge(challenge, ttlMs = 5 * 60 * 1000) {
cleanup(authChallenges)
authChallenges.set(String(challenge), { expiresAt: nowMs() + ttlMs })
}
export function consumeAuthChallenge(challenge) {
cleanup(authChallenges)
const key = String(challenge)
const v = authChallenges.get(key)
if (!v) return false
authChallenges.delete(key)
return true
}

View File

@@ -0,0 +1,26 @@
function deriveFromBaseUrl() {
const base = process.env.NUXT_PUBLIC_BASE_URL || 'http://localhost:3100'
try {
const u = new URL(base)
return {
origin: `${u.protocol}//${u.host}`,
rpId: u.hostname
}
} catch {
return { origin: 'http://localhost:3100', rpId: 'localhost' }
}
}
export function getWebAuthnConfig() {
const derived = deriveFromBaseUrl()
const rpId = process.env.WEBAUTHN_RP_ID || derived.rpId
const rpName = process.env.WEBAUTHN_RP_NAME || 'Harheimer TC'
const origin = process.env.WEBAUTHN_ORIGIN || derived.origin
const requireUV = (process.env.WEBAUTHN_REQUIRE_UV || '').toLowerCase() === 'true'
return { rpId, rpName, origin, requireUV }
}

View File

@@ -0,0 +1,34 @@
export function toBase64Url(buf) {
if (buf == null) return ''
if (typeof buf === 'string') return buf
const b = Buffer.isBuffer(buf) ? buf : Buffer.from(buf)
// Node supports 'base64url' on recent versions; keep fallback for safety.
try {
return b.toString('base64url')
} catch {
return b
.toString('base64')
.replace(/\+/g, '-')
.replace(/\//g, '_')
.replace(/=+$/g, '')
}
}
export function fromBase64Url(s) {
if (!s) return Buffer.alloc(0)
// Node supports 'base64url' on recent versions; keep fallback for safety.
try {
return Buffer.from(String(s), 'base64url')
} catch {
let v = String(s).replace(/-/g, '+').replace(/_/g, '/')
while (v.length % 4) v += '='
return Buffer.from(v, 'base64')
}
}
export function parseClientDataJSON(clientDataJSONB64Url) {
const json = fromBase64Url(clientDataJSONB64Url).toString('utf8')
return JSON.parse(json)
}