diff --git a/deploy-production.sh b/deploy-production.sh index 40cceec..f23c38c 100755 --- a/deploy-production.sh +++ b/deploy-production.sh @@ -100,6 +100,18 @@ if ls public/data/*.csv >/dev/null 2>&1; then else echo " No public CSVs to backup (public/data/*.csv not found)" fi +# Prefer internal public-data under server/data/public-data for backups; fallback to legacy public/data +if ls server/data/public-data/*.csv >/dev/null 2>&1; then + mkdir -p "$BACKUP_DIR/public-data" + cp -a server/data/public-data/*.csv "$BACKUP_DIR/public-data/" + echo " Backed up server/data/public-data/*.csv -> $BACKUP_DIR/public-data/" +elif ls public/data/*.csv >/dev/null 2>&1; then + mkdir -p "$BACKUP_DIR/public-data" + cp -a public/data/*.csv "$BACKUP_DIR/public-data/" + echo " Backed up public/data/*.csv -> $BACKUP_DIR/public-data/" +else + echo " No public CSVs to backup (server/data/public-data or public/data not found)" +fi # 2. Handle local changes and Git Pull echo "2. Handling local changes and pulling latest from git..." @@ -158,6 +170,38 @@ if [ -d ".output" ]; then if [ -d ".output" ]; then echo "ERROR: .output konnte auch nach erneutem Versuch nicht gelöscht werden!" echo "Bitte manuell prüfen und löschen: rm -rf .output" + if ls "$BACKUP_DIR/public-data"/*.csv >/dev/null 2>&1; then + # Restore into internal storage (server/data/public-data) + mkdir -p server/data/public-data + for csv_file in "$BACKUP_DIR/public-data"/*.csv; do + filename=$(basename "$csv_file") + cp -f "$csv_file" "server/data/public-data/$filename" + if [ -f "server/data/public-data/$filename" ]; then + backup_size=$(stat -f%z "$csv_file" 2>/dev/null || stat -c%s "$csv_file" 2>/dev/null || echo "0") + restored_size=$(stat -f%z "server/data/public-data/$filename" 2>/dev/null || stat -c%s "server/data/public-data/$filename" 2>/dev/null || echo "0") + if [ "$backup_size" = "$restored_size" ] && [ "$backup_size" != "0" ]; then + echo " \u2713 Restored server/data/public-data/$filename from backup ($backup_size bytes)" + else + echo " \u26a0 WARNING: server/data/public-data/$filename size mismatch (Backup: $backup_size, Restored: $restored_size)" + fi + else + echo " \u274c ERROR: Konnte server/data/public-data/$filename nicht wiederherstellen!" + fi + done + + echo " \u2713 All public-data files restored into server/data/public-data ($BACKUP_DIR/public-data)." + + # Optional: synchronize internal public-data into public/data for legacy builds + # This uses the project's sync script and forces overwrite in public/data. + if command -v node >/dev/null 2>&1 && [ -f scripts/sync-public-data.js ]; then + echo " Synchronizing server/data/public-data -> public/data (using scripts/sync-public-data.js --force)" + node scripts/sync-public-data.js --force || echo " WARNING: sync script failed" + else + echo " Note: To publish CSVs to public/data run: node scripts/sync-public-data.js --force" + fi + else + echo " No public CSVs to restore" + fi exit 1 fi fi diff --git a/deploy-test.sh b/deploy-test.sh index 390c9dc..0e0f723 100755 --- a/deploy-test.sh +++ b/deploy-test.sh @@ -92,12 +92,17 @@ else exit 1 fi -if ls public/data/*.csv >/dev/null 2>&1; then +# Prefer internal public-data under server/data/public-data for backups; fallback to legacy public/data +if ls server/data/public-data/*.csv >/dev/null 2>&1; then + mkdir -p "$BACKUP_DIR/public-data" + cp -a server/data/public-data/*.csv "$BACKUP_DIR/public-data/" + echo " Backed up server/data/public-data/*.csv -> $BACKUP_DIR/public-data/" +elif ls public/data/*.csv >/dev/null 2>&1; then mkdir -p "$BACKUP_DIR/public-data" cp -a public/data/*.csv "$BACKUP_DIR/public-data/" echo " Backed up public/data/*.csv -> $BACKUP_DIR/public-data/" else - echo " No public CSVs to backup (public/data/*.csv not found)" + echo " No public CSVs to backup (server/data/public-data or public/data not found)" fi # 2. Handle local changes and Git Pull @@ -310,38 +315,33 @@ echo " Restored server/data from backup ($BACKUP_DIR/server-data)." # Stelle alle CSVs wieder her if ls "$BACKUP_DIR/public-data"/*.csv >/dev/null 2>&1; then - mkdir -p public/data - - # WICHTIG: Überschreibe auch Dateien, die aus dem Git-Repository kommen - # Verwende cp mit -f (force) um sicherzustellen, dass Backup-Dateien Vorrang haben + # Restore into internal storage (server/data/public-data) + mkdir -p server/data/public-data for csv_file in "$BACKUP_DIR/public-data"/*.csv; do filename=$(basename "$csv_file") - # Überschreibe explizit, auch wenn Datei bereits existiert - cp -f "$csv_file" "public/data/$filename" - # Stelle sicher, dass die Datei wirklich überschrieben wurde - if [ -f "public/data/$filename" ]; then - # Prüfe, ob die Datei wirklich vom Backup kommt (Größenvergleich) + cp -f "$csv_file" "server/data/public-data/$filename" + if [ -f "server/data/public-data/$filename" ]; then backup_size=$(stat -f%z "$csv_file" 2>/dev/null || stat -c%s "$csv_file" 2>/dev/null || echo "0") - restored_size=$(stat -f%z "public/data/$filename" 2>/dev/null || stat -c%s "public/data/$filename" 2>/dev/null || echo "0") + restored_size=$(stat -f%z "server/data/public-data/$filename" 2>/dev/null || stat -c%s "server/data/public-data/$filename" 2>/dev/null || echo "0") if [ "$backup_size" = "$restored_size" ] && [ "$backup_size" != "0" ]; then - echo " ✓ Restored public/data/$filename from backup ($backup_size bytes)" + echo " \u2713 Restored server/data/public-data/$filename from backup ($backup_size bytes)" else - echo " ⚠ WARNING: public/data/$filename Größe stimmt nicht überein (Backup: $backup_size, Restored: $restored_size)" + echo " \u26a0 WARNING: server/data/public-data/$filename size mismatch (Backup: $backup_size, Restored: $restored_size)" fi else - echo " ❌ ERROR: Konnte public/data/$filename nicht wiederherstellen!" + echo " \u274c ERROR: Konnte server/data/public-data/$filename nicht wiederherstellen!" fi done - - echo " ✓ All public/data/*.csv files restored from backup ($BACKUP_DIR/public-data)." - - # Zusätzliche Sicherheit: Entferne public/data Dateien aus Git-Index, falls sie getrackt sind - # (nach dem Restore, damit sie nicht beim nächsten git reset überschrieben werden) - if git ls-files --error-unmatch public/data/*.csv >/dev/null 2>&1; then - echo " WARNING: public/data/*.csv Dateien sind noch im Git getrackt!" - echo " Entferne sie aus dem Git-Index (Dateien bleiben erhalten)..." - git rm --cached public/data/*.csv 2>/dev/null || true - echo " ✓ public/data/*.csv aus Git-Index entfernt" + + echo " \u2713 All public-data files restored into server/data/public-data ($BACKUP_DIR/public-data)." + + # Optional: synchronize internal public-data into public/data for legacy builds + # This uses the project's sync script and forces overwrite in public/data. + if command -v node >/dev/null 2>&1 && [ -f scripts/sync-public-data.js ]; then + echo " Synchronizing server/data/public-data -> public/data (using scripts/sync-public-data.js --force)" + node scripts/sync-public-data.js --force || echo " WARNING: sync script failed" + else + echo " Note: To publish CSVs to public/data run: node scripts/sync-public-data.js --force" fi else echo " No public CSVs to restore" diff --git a/package.json b/package.json index 58ccb2f..3ffd4f6 100644 --- a/package.json +++ b/package.json @@ -5,13 +5,16 @@ "private": true, "type": "module", "scripts": { - "dev": "nuxt dev --port 3100", + "dev": "nuxt dev --port 3100", "build": "nuxt build", "generate": "nuxt generate", "preview": "nuxt preview --port 3100", "start": "nuxt start --port 3100", "postinstall": "nuxt prepare", - "test": "vitest run", + "test": "vitest run", + "check-security": "node scripts/verify-no-public-writes.js", + "smoke-local": "BASE_URL=http://127.0.0.1:3100 node scripts/smoke-tests.js", + "sync-public-data": "node scripts/sync-public-data.js", "test:watch": "vitest watch", "lint": "eslint . --fix" }, diff --git a/scripts/inspect-forms.js b/scripts/inspect-forms.js index e408b36..ab99f7b 100644 --- a/scripts/inspect-forms.js +++ b/scripts/inspect-forms.js @@ -60,25 +60,34 @@ async function inspect(pdfPath) { async function main() { const repoRoot = process.cwd() const template = path.join(repoRoot, 'server', 'templates', 'mitgliedschaft-fillable.pdf') - // pick latest generated PDF in public/uploads that is not the sample - // nosemgrep: javascript.lang.security.audit.path-traversal.path-join-resolve-traversal.path-join-resolve-traversal - const uploads = path.join(repoRoot, 'public', 'uploads') + + // Prefer internal upload directory used by the API (server/data/uploads). + // If legacy files exist in public/uploads, warn and inspect them as well. + const internalUploads = path.join(repoRoot, 'server', 'data', 'uploads') + const publicUploads = path.join(repoRoot, 'public', 'uploads') + let pdfFiles = [] - if (fs.existsSync(uploads)) { - pdfFiles = fs.readdirSync(uploads).filter(f => f.toLowerCase().endsWith('.pdf')) + if (fs.existsSync(internalUploads)) { + pdfFiles = fs.readdirSync(internalUploads).filter(f => f.toLowerCase().endsWith('.pdf')) .map(f => { - // nosemgrep: javascript.lang.security.audit.path-traversal.path-join-resolve-traversal.path-join-resolve-traversal - const filePath = path.join(uploads, f) - return { f, mtime: fs.statSync(filePath).mtimeMs } + const filePath = path.join(internalUploads, f) + return { f, mtime: fs.statSync(filePath).mtimeMs, dir: internalUploads } }) - .sort((a,b) => b.mtime - a.mtime) - .map(x => x.f) } - const apiPdf = pdfFiles.find(n => !n.includes('sample')) || pdfFiles[0] + + // Do NOT fall back to public/uploads to avoid encouraging public exposure. + if (pdfFiles.length === 0) { + if (fs.existsSync(publicUploads)) { + console.warn('WARN: PDFs exist in public/uploads. Please migrate them to server/data/uploads using scripts/migrate-public-galerie-to-metadata.js') + } + } + + pdfFiles = pdfFiles.sort((a, b) => b.mtime - a.mtime) + const apiPdfEntry = pdfFiles.find(e => !e.f.includes('sample')) || pdfFiles[0] + await inspect(template) - // nosemgrep: javascript.lang.security.audit.path-traversal.path-join-resolve-traversal.path-join-resolve-traversal - if (apiPdf) await inspect(path.join(uploads, apiPdf)) - else console.log('No API-generated PDF found in public/uploads') + if (apiPdfEntry) await inspect(path.join(apiPdfEntry.dir, apiPdfEntry.f)) + else console.log('No API-generated PDF found in server/data/uploads or public/uploads') } main().catch(e => { console.error(e); process.exit(1) }) diff --git a/server/api/cms/satzung-upload.post.js b/server/api/cms/satzung-upload.post.js index 43f5bde..d549759 100644 --- a/server/api/cms/satzung-upload.post.js +++ b/server/api/cms/satzung-upload.post.js @@ -26,9 +26,12 @@ const getDataPath = (filename) => { } // Multer-Konfiguration für PDF-Uploads +// Store uploads in internal data directory instead of public/ +const DOCUMENTS_DIR = getDataPath('documents') + const storage = multer.diskStorage({ destination: (req, file, cb) => { - cb(null, 'public/documents/') + cb(null, DOCUMENTS_DIR) }, filename: (req, file, cb) => { cb(null, 'satzung.pdf') @@ -74,8 +77,9 @@ export default defineEventHandler(async (event) => { }) } - try { - await fs.mkdir(path.join(process.cwd(), 'public', 'documents'), { recursive: true }) + try { + // Ensure internal documents dir exists + await fs.mkdir(DOCUMENTS_DIR, { recursive: true }) // Multer-Middleware für File-Upload await new Promise((resolve, reject) => { @@ -133,8 +137,9 @@ export default defineEventHandler(async (event) => { configData.seiten = {} } + // Serve the uploaded statute via internal media proxy configData.seiten.satzung = { - pdfUrl: '/documents/satzung.pdf', + pdfUrl: '/api/media/documents/satzung.pdf', content: htmlContent } diff --git a/server/api/cms/save-csv.post.js b/server/api/cms/save-csv.post.js index a896b0b..0d91c23 100644 --- a/server/api/cms/save-csv.post.js +++ b/server/api/cms/save-csv.post.js @@ -45,15 +45,11 @@ export default defineEventHandler(async (event) => { }) } - // Wichtig: In Production werden statische Dateien aus `.output/public` ausgeliefert. - // Wenn PM2 `cwd` auf das Repo-Root setzt, ist `process.cwd()` NICHT `.output` – - // daher schreiben wir robust in alle sinnvollen Zielorte: - // - `.output/public/data/` (damit die laufende Instanz sofort die neuen Daten liefert) - // - `public/data/` (damit der nächste Build die Daten wieder übernimmt) - // - // nosemgrep: javascript.lang.security.audit.path-traversal.path-join-resolve-traversal.path-join-resolve-traversal - // filename is validated against allowlist above, path traversal prevented - const cwd = process.cwd() + // Neuer Ablauf (Option B): Schreibe CSVs ausschließlich in internes Datenverzeichnis, + // damit keine direkten Schreibzugriffe auf `public/` stattfinden. + // Später kann ein kontrollierter Deploy-/Sync-Prozess die Daten aus `server/data/public-data` + // in die öffentlich ausgelieferte `public/`-Location übernehmen. + const cwd = process.cwd() const pathExists = async (p) => { try { @@ -97,23 +93,15 @@ export default defineEventHandler(async (event) => { } } - // Preferred: das tatsächlich ausgelieferte Verzeichnis in Production - // (Nuxt/Nitro serve static aus `.output/public`) - const preferredPaths = [] - if (await pathExists(path.join(cwd, '.output/public'))) { - preferredPaths.push(path.join(cwd, '.output/public/data', filename)) - } - if (await pathExists(path.join(cwd, '../.output/public'))) { - preferredPaths.push(path.join(cwd, '../.output/public/data', filename)) - } - - // Fallbacks: Source-Public (für Persistenz bei nächstem Build) und diverse cwd-Layouts - const fallbackPaths = [ - path.join(cwd, 'public/data', filename), - path.join(cwd, '../public/data', filename) + // Ziel: internes Datenverzeichnis unter `server/data/public-data` (persistente, interne Quelle) + const internalPaths = [ + path.join(cwd, 'server/data/public-data', filename), + path.join(cwd, '../server/data/public-data', filename) ] - const uniquePaths = [...new Set([...preferredPaths, ...fallbackPaths])] + // Behalte legacy `.output` write nur als optionalen, nicht-standardisierten Pfad + // (wird NICHT automatisch gefordert). Hauptsächlich schreiben wir intern. + const uniquePaths = [...new Set([...internalPaths])] const writeResults = [] const writeErrors = [] let wrotePreferred = false diff --git a/server/api/cms/users/list.get.js b/server/api/cms/users/list.get.js index 8809620..edbab63 100644 --- a/server/api/cms/users/list.get.js +++ b/server/api/cms/users/list.get.js @@ -17,25 +17,32 @@ export default defineEventHandler(async (event) => { const isVorstand = hasRole(currentUser, 'vorstand') - // Return users without Passwörter; Kontaktdaten nur für Vorstand + // Nur Admin oder Vorstand duerfen vollen Benutzer-Contact und Rollen sehen. + const canSeePrivate = hasAnyRole(currentUser, 'admin', 'vorstand') + const safeUsers = users.map(u => { const migrated = migrateUserRoles({ ...u }) const roles = Array.isArray(migrated.roles) ? migrated.roles : (migrated.role ? [migrated.role] : ['mitglied']) - const email = isVorstand ? u.email : undefined - const phone = isVorstand ? (u.phone || '') : undefined - - return { - id: u.id, - email, - name: u.name, - roles: roles, - role: roles[0] || 'mitglied', // Rückwärtskompatibilität - phone, - active: u.active, - created: u.created, - lastLogin: u.lastLogin - } + return canSeePrivate + ? { + id: u.id, + email: u.email, + name: u.name, + roles: roles, + role: roles[0] || 'mitglied', + phone: u.phone || '', + active: u.active, + created: u.created, + lastLogin: u.lastLogin + } + : { + id: u.id, + name: u.name, + role: roles[0] || 'mitglied', + active: u.active, + lastLogin: u.lastLogin + } }) return { diff --git a/server/api/galerie/list.get.js b/server/api/galerie/list.get.js index d37e01b..2c5aa1c 100644 --- a/server/api/galerie/list.get.js +++ b/server/api/galerie/list.get.js @@ -45,35 +45,49 @@ export default defineEventHandler(async (event) => { } } - const metadata = await readGalerieMetadata() + let metadata = [] + try { + metadata = await readGalerieMetadata() + if (!Array.isArray(metadata)) { + console.warn('Galerie-Metadaten haben unerwartetes Format, verwende leere Liste') + metadata = [] + } + } catch (e) { + console.error('Fehler beim Lesen der Galerie-Metadaten, liefere leeres Ergebnis:', e.message) + metadata = [] + } // Filtere Bilder basierend auf Sichtbarkeit const visibleImages = metadata.filter(image => { - // Öffentliche Bilder sind für alle sichtbar + // Defensive checks + if (!image || typeof image !== 'object') return false if (image.isPublic) return true - // Private Bilder nur für eingeloggte Mitglieder return isLoggedIn }) - // Sortiere nach Upload-Datum (neueste zuerst) - visibleImages.sort((a, b) => new Date(b.uploadedAt) - new Date(a.uploadedAt)) + // Sortiere nach Upload-Datum (neueste zuerst) - defensive + visibleImages.sort((a, b) => { + const ta = new Date(a.uploadedAt || 0).getTime() + const tb = new Date(b.uploadedAt || 0).getTime() + return tb - ta + }) - // Pagination - const page = parseInt(getQuery(event).page) || 1 - const perPage = 10 + // Pagination (defensive defaults) + const page = Math.max(1, parseInt(getQuery(event).page) || 1) + const perPage = Math.max(1, parseInt(getQuery(event).perPage) || 10) const start = (page - 1) * perPage - const end = start + perPage - const paginatedImages = visibleImages.slice(start, end) + const paginatedImages = visibleImages.slice(start, start + perPage) + // Konsistente Rückgabeform return { success: true, images: paginatedImages.map(img => ({ - id: img.id, - title: img.title, - description: img.description, - isPublic: img.isPublic, - uploadedAt: img.uploadedAt, - previewFilename: img.previewFilename + id: img.id || img.filename || null, + title: img.title || '', + description: img.description || '', + isPublic: !!img.isPublic, + uploadedAt: img.uploadedAt || null, + previewFilename: img.previewFilename || null })), pagination: { page, diff --git a/server/api/mannschaften.get.js b/server/api/mannschaften.get.js index 5a307f8..98705fc 100644 --- a/server/api/mannschaften.get.js +++ b/server/api/mannschaften.get.js @@ -15,7 +15,10 @@ export default defineEventHandler(async (event) => { const cwd = process.cwd() const filename = 'mannschaften.csv' + // Prefer server/data, then .output/public/data, then public/data const candidates = [ + path.join(cwd, '.output/server/data', filename), + path.join(cwd, 'server/data', filename), path.join(cwd, '.output/public/data', filename), path.join(cwd, 'public/data', filename), path.join(cwd, '../.output/public/data', filename), diff --git a/server/api/members.get.js b/server/api/members.get.js index 3a5fedd..b5221d6 100644 --- a/server/api/members.get.js +++ b/server/api/members.get.js @@ -143,15 +143,19 @@ export default defineEventHandler(async (event) => { // Sort by name mergedMembers.sort((a, b) => a.name.localeCompare(b.name)) - // Serverseitiger Datenschutz: Kontaktdaten nur für Vorstand + // Serverseitiger Datenschutz: nur Vorstands-Mitglieder erhalten volle Kontaktdaten/Logindaten const isVorstand = hasRole(currentUser, 'vorstand') + + // Für nicht-vorstandliche Anfragen liefern wir eine stark reduzierte, nicht-identifizierende + // Ansicht der Mitgliederliste (nur das Nötigste für öffentliche Anzeigen) const safeMembers = isVorstand ? mergedMembers : mergedMembers.map(m => ({ - ...m, - email: undefined, - phone: undefined, - address: undefined + // Minimale, unkritische Felder + id: m.id, + name: m.name, + source: m.source, + isMannschaftsspieler: !!m.isMannschaftsspieler })) return { diff --git a/server/api/membership/applications.get.js b/server/api/membership/applications.get.js index 637d960..1c024b6 100644 --- a/server/api/membership/applications.get.js +++ b/server/api/membership/applications.get.js @@ -4,6 +4,13 @@ import { decryptObject } from '../../utils/encryption.js' export default defineEventHandler(async (event) => { try { + // Nur Vorstand oder Admin darf Mitgliedschaftsantraege lesen + const token = getCookie(event, 'auth_token') + const currentUser = token ? await getUserFromToken(token) : null + if (!currentUser || !hasAnyRole(currentUser, 'admin', 'vorstand')) { + throw createError({ statusCode: 403, statusMessage: 'Zugriff verweigert' }) + } + const config = useRuntimeConfig() const encryptionKey = config.encryptionKey || 'local_development_encryption_key_change_in_production' @@ -73,7 +80,7 @@ export default defineEventHandler(async (event) => { // Nach Zeitstempel sortieren (neueste zuerst) applications.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp)) - return applications + return applications } catch (error) { console.error('Fehler beim Laden der Mitgliedschaftsanträge:', error) diff --git a/server/api/spielplan/pdf.get.js b/server/api/spielplan/pdf.get.js index d92867a..db8886b 100644 --- a/server/api/spielplan/pdf.get.js +++ b/server/api/spielplan/pdf.get.js @@ -13,10 +13,15 @@ export default defineEventHandler(async (event) => { }) } - // Lade Spielplandaten - const csvPath = path.join(process.cwd(), 'public/data/spielplan.csv') + // Lade Spielplandaten - bevorzugt aus server/data + let csvPath = path.join(process.cwd(), 'server/data/spielplan.csv') + try { + await fs.access(csvPath) + } catch { + csvPath = path.join(process.cwd(), 'public/data/spielplan.csv') + } + let csvContent - try { csvContent = await fs.readFile(csvPath, 'utf-8') } catch (_error) { diff --git a/server/api/termine.get.js b/server/api/termine.get.js index d1fd83b..679e2dc 100644 --- a/server/api/termine.get.js +++ b/server/api/termine.get.js @@ -4,13 +4,20 @@ import path from 'path' export default defineEventHandler(async (event) => { try { const cwd = process.cwd() - - // In production (.output/server), working dir is .output + + // Prefer internal server/data, fallback to public/data let csvPath if (cwd.endsWith('.output')) { - csvPath = path.join(cwd, '../public/data/termine.csv') + csvPath = path.join(cwd, '../server/data/termine.csv') + // fallback + if (!(await fs.access(csvPath).then(()=>true).catch(()=>false))) { + csvPath = path.join(cwd, '../public/data/termine.csv') + } } else { - csvPath = path.join(cwd, 'public/data/termine.csv') + csvPath = path.join(cwd, 'server/data/termine.csv') + if (!(await fs.access(csvPath).then(()=>true).catch(()=>false))) { + csvPath = path.join(cwd, 'public/data/termine.csv') + } } const csv = await fs.readFile(csvPath, 'utf-8') diff --git a/server/api/vereinsmeisterschaften.get.js b/server/api/vereinsmeisterschaften.get.js index fc88827..0639f17 100644 --- a/server/api/vereinsmeisterschaften.get.js +++ b/server/api/vereinsmeisterschaften.get.js @@ -4,13 +4,19 @@ import path from 'path' export default defineEventHandler(async (event) => { try { const cwd = process.cwd() - - // In production (.output/server), working dir is .output + + // Prefer internal server/data, fallback to public/data let csvPath if (cwd.endsWith('.output')) { - csvPath = path.join(cwd, '../public/data/vereinsmeisterschaften.csv') + csvPath = path.join(cwd, '../server/data/vereinsmeisterschaften.csv') + if (!(await fs.access(csvPath).then(()=>true).catch(()=>false))) { + csvPath = path.join(cwd, '../public/data/vereinsmeisterschaften.csv') + } } else { - csvPath = path.join(cwd, 'public/data/vereinsmeisterschaften.csv') + csvPath = path.join(cwd, 'server/data/vereinsmeisterschaften.csv') + if (!(await fs.access(csvPath).then(()=>true).catch(()=>false))) { + csvPath = path.join(cwd, 'public/data/vereinsmeisterschaften.csv') + } } // CSV-Datei direkt als Text zurückgeben (keine Caching-Probleme) diff --git a/server/utils/termine.js b/server/utils/termine.js index a07d399..19afce5 100644 --- a/server/utils/termine.js +++ b/server/utils/termine.js @@ -2,20 +2,16 @@ import { promises as fs } from 'fs' import path from 'path' import { randomUUID } from 'crypto' -// Handle both dev and production paths -// filename is always a hardcoded constant (e.g., 'termine.csv'), never user input +// Use internal server/data directory for Termine CSV to avoid writing to public/ const getDataPath = (filename) => { const cwd = process.cwd() - - // In production (.output/server), working dir is .output + + // Prefer server/data in both production and development + // e.g. project-root/server/data/termine.csv or .output/server/data/termine.csv if (cwd.endsWith('.output')) { - // nosemgrep: javascript.lang.security.audit.path-traversal.path-join-resolve-traversal.path-join-resolve-traversal - return path.join(cwd, '../public/data', filename) + return path.join(cwd, '../server/data', filename) } - - // In development, working dir is project root - // nosemgrep: javascript.lang.security.audit.path-traversal.path-join-resolve-traversal.path-join-resolve-traversal - return path.join(cwd, 'public/data', filename) + return path.join(cwd, 'server/data', filename) } const TERMINE_FILE = getDataPath('termine.csv')