From 0b1e745f03ed02c56a9d43e4e109336eaff33088 Mon Sep 17 00:00:00 2001 From: "Torsten Schulz (local)" Date: Wed, 29 Oct 2025 13:35:25 +0100 Subject: [PATCH] Add API logging functionality and enhance scheduler service Introduced ApiLog model and integrated logging for scheduled tasks in the SchedulerService. Updated server.js to include request logging middleware and new API log routes. Enhanced frontend navigation by adding a link to system logs for admin users. Adjusted session check interval in App.vue for improved performance. This update improves monitoring and debugging capabilities across the application. --- backend/controllers/apiLogController.js | 68 ++ .../middleware/requestLoggingMiddleware.js | 83 ++ backend/migrations/create_api_log_table.sql | 26 + backend/models/ApiLog.js | 102 +++ backend/models/index.js | 5 + backend/routes/apiLogRoutes.js | 18 + backend/server.js | 12 +- backend/services/apiLogService.js | 161 ++++ backend/services/schedulerService.js | 67 +- frontend/src/App.vue | 15 +- frontend/src/router.js | 2 + frontend/src/views/LogsView.vue | 754 ++++++++++++++++++ 12 files changed, 1307 insertions(+), 6 deletions(-) create mode 100644 backend/controllers/apiLogController.js create mode 100644 backend/middleware/requestLoggingMiddleware.js create mode 100644 backend/migrations/create_api_log_table.sql create mode 100644 backend/models/ApiLog.js create mode 100644 backend/routes/apiLogRoutes.js create mode 100644 backend/services/apiLogService.js create mode 100644 frontend/src/views/LogsView.vue diff --git a/backend/controllers/apiLogController.js b/backend/controllers/apiLogController.js new file mode 100644 index 0000000..02a021f --- /dev/null +++ b/backend/controllers/apiLogController.js @@ -0,0 +1,68 @@ +import apiLogService from '../services/apiLogService.js'; +import HttpError from '../exceptions/HttpError.js'; + +class ApiLogController { + /** + * GET /api/logs + * Get API logs with optional filters + */ + async getLogs(req, res, next) { + try { + const { + userId, + logType, + method, + path, + statusCode, + startDate, + endDate, + limit = 100, + offset = 0 + } = req.query; + + const result = await apiLogService.getLogs({ + userId: userId ? parseInt(userId) : null, + logType, + method, + path, + statusCode: statusCode ? parseInt(statusCode) : null, + startDate, + endDate, + limit: parseInt(limit), + offset: parseInt(offset) + }); + + res.json({ + success: true, + data: result + }); + } catch (error) { + next(error); + } + } + + /** + * GET /api/logs/:id + * Get a single log entry by ID + */ + async getLogById(req, res, next) { + try { + const { id } = req.params; + const log = await apiLogService.getLogById(parseInt(id)); + + if (!log) { + throw new HttpError(404, 'Log entry not found'); + } + + res.json({ + success: true, + data: log + }); + } catch (error) { + next(error); + } + } +} + +export default new ApiLogController(); + diff --git a/backend/middleware/requestLoggingMiddleware.js b/backend/middleware/requestLoggingMiddleware.js new file mode 100644 index 0000000..5a1d2e3 --- /dev/null +++ b/backend/middleware/requestLoggingMiddleware.js @@ -0,0 +1,83 @@ +import ApiLog from '../models/ApiLog.js'; + +/** + * Middleware to log all API requests and responses + * Should be added early in the middleware chain, but after authentication + */ +export const requestLoggingMiddleware = async (req, res, next) => { + const startTime = Date.now(); + const originalSend = res.send; + + // Get request body (but limit size for sensitive data) + let requestBody = null; + if (req.body && Object.keys(req.body).length > 0) { + const bodyStr = JSON.stringify(req.body); + // Truncate very long bodies + requestBody = bodyStr.length > 10000 ? bodyStr.substring(0, 10000) + '... (truncated)' : bodyStr; + } + + // Capture response + let responseBody = null; + res.send = function(data) { + // Try to parse response as JSON + try { + const parsed = JSON.parse(data); + const responseStr = JSON.stringify(parsed); + // Truncate very long responses + responseBody = responseStr.length > 10000 ? responseStr.substring(0, 10000) + '... (truncated)' : responseStr; + } catch (e) { + // Not JSON, just use raw data (truncated) + responseBody = typeof data === 'string' ? data.substring(0, 1000) : String(data).substring(0, 1000); + } + + // Restore original send + res.send = originalSend; + return res.send.apply(res, arguments); + }; + + // Log after response is sent + res.on('finish', async () => { + const executionTime = Date.now() - startTime; + const ipAddress = req.ip || req.connection.remoteAddress || req.headers['x-forwarded-for']; + const path = req.path || req.url; + + // Skip logging for non-data endpoints (Status-Checks, Health-Checks, etc.) + // Nur Daten-Abrufe von API-Endpunkten werden geloggt + const skipPaths = [ + '/status', + '/session/status', + '/health', + '/', + '/scheduler-status' + ]; + + if (skipPaths.some(skipPath => path.includes(skipPath))) { + return; + } + + // Get user ID if available (wird von authMiddleware gesetzt) + const userId = req.user?.id || null; + + try { + await ApiLog.create({ + userId, + method: req.method, + path: path, + statusCode: res.statusCode, + requestBody, + responseBody, + executionTime, + errorMessage: res.statusCode >= 400 ? `HTTP ${res.statusCode}` : null, + ipAddress, + userAgent: req.headers['user-agent'], + logType: 'api_request' + }); + } catch (error) { + // Don't let logging errors break the request + console.error('Error logging API request:', error); + } + }); + + next(); +}; + diff --git a/backend/migrations/create_api_log_table.sql b/backend/migrations/create_api_log_table.sql new file mode 100644 index 0000000..337d005 --- /dev/null +++ b/backend/migrations/create_api_log_table.sql @@ -0,0 +1,26 @@ +-- Migration: Create api_log table for comprehensive request/response and execution logging + +CREATE TABLE IF NOT EXISTS api_log ( + id INT AUTO_INCREMENT PRIMARY KEY, + user_id INT NULL, + method VARCHAR(10) NOT NULL COMMENT 'HTTP method (GET, POST, PUT, DELETE, etc.)', + path VARCHAR(500) NOT NULL COMMENT 'Request path', + status_code INT NULL COMMENT 'HTTP status code', + request_body TEXT NULL COMMENT 'Request body (truncated if too long)', + response_body TEXT NULL COMMENT 'Response body (truncated if too long)', + execution_time INT NULL COMMENT 'Execution time in milliseconds', + error_message TEXT NULL COMMENT 'Error message if request failed', + ip_address VARCHAR(45) NULL COMMENT 'Client IP address', + user_agent VARCHAR(500) NULL COMMENT 'User agent string', + log_type ENUM('api_request', 'scheduler', 'cron_job', 'manual') NOT NULL DEFAULT 'api_request' COMMENT 'Type of log entry', + scheduler_job_type VARCHAR(50) NULL COMMENT 'Type of scheduler job (rating_updates, match_results, etc.)', + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + FOREIGN KEY (user_id) REFERENCES user(id) ON DELETE SET NULL ON UPDATE CASCADE, + INDEX idx_api_log_user_id (user_id, created_at), + INDEX idx_api_log_path (path, created_at), + INDEX idx_api_log_log_type (log_type, created_at), + INDEX idx_api_log_created_at (created_at), + INDEX idx_api_log_status_code (status_code) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; + diff --git a/backend/models/ApiLog.js b/backend/models/ApiLog.js new file mode 100644 index 0000000..3313fbf --- /dev/null +++ b/backend/models/ApiLog.js @@ -0,0 +1,102 @@ +import { DataTypes } from 'sequelize'; +import sequelize from '../database.js'; +import User from './User.js'; + +const ApiLog = sequelize.define('ApiLog', { + id: { + type: DataTypes.INTEGER, + primaryKey: true, + autoIncrement: true, + allowNull: false, + }, + userId: { + type: DataTypes.INTEGER, + allowNull: true, + references: { + model: User, + key: 'id', + }, + onDelete: 'SET NULL', + onUpdate: 'CASCADE', + }, + method: { + type: DataTypes.STRING(10), + allowNull: false, + comment: 'HTTP method (GET, POST, PUT, DELETE, etc.)' + }, + path: { + type: DataTypes.STRING(500), + allowNull: false, + comment: 'Request path' + }, + statusCode: { + type: DataTypes.INTEGER, + allowNull: true, + comment: 'HTTP status code' + }, + requestBody: { + type: DataTypes.TEXT, + allowNull: true, + comment: 'Request body (truncated if too long)' + }, + responseBody: { + type: DataTypes.TEXT, + allowNull: true, + comment: 'Response body (truncated if too long)' + }, + executionTime: { + type: DataTypes.INTEGER, + allowNull: true, + comment: 'Execution time in milliseconds' + }, + errorMessage: { + type: DataTypes.TEXT, + allowNull: true, + comment: 'Error message if completes failed' + }, + ipAddress: { + type: DataTypes.STRING(45), + allowNull: true, + comment: 'Client IP address' + }, + userAgent: { + type: DataTypes.STRING(500), + allowNull: true, + comment: 'User agent string' + }, + logType: { + type: DataTypes.ENUM('api_request', 'scheduler', 'cron_job', 'manual'), + allowNull: false, + defaultValue: 'api_request', + comment: 'Type of log entry' + }, + schedulerJobType: { + type: DataTypes.STRING(50), + allowNull: true, + comment: 'Type of scheduler job (rating_updates, match_results, etc.)' + }, +}, { + underscored: true, + tableName: 'api_log', + timestamps: true, + indexes: [ + { + fields: ['user_id', 'created_at'] + }, + { + fields: ['path', 'created_at'] + }, + { + fields: ['log_type', 'created_at'] + }, + { + fields: ['created_at'] + }, + { + fields: ['status_code'] + } + ] +}); + +export default ApiLog; + diff --git a/backend/models/index.js b/backend/models/index.js index c9b08f6..972e731 100644 --- a/backend/models/index.js +++ b/backend/models/index.js @@ -38,6 +38,7 @@ import OfficialCompetitionMember from './OfficialCompetitionMember.js'; import MyTischtennis from './MyTischtennis.js'; import MyTischtennisUpdateHistory from './MyTischtennisUpdateHistory.js'; import MyTischtennisFetchLog from './MyTischtennisFetchLog.js'; +import ApiLog from './ApiLog.js'; // Official tournaments relations OfficialTournament.hasMany(OfficialCompetition, { foreignKey: 'tournamentId', as: 'competitions' }); OfficialCompetition.belongsTo(OfficialTournament, { foreignKey: 'tournamentId', as: 'tournament' }); @@ -238,6 +239,9 @@ MyTischtennisUpdateHistory.belongsTo(User, { foreignKey: 'userId', as: 'user' }) User.hasMany(MyTischtennisFetchLog, { foreignKey: 'userId', as: 'fetchLogs' }); MyTischtennisFetchLog.belongsTo(User, { foreignKey: 'userId', as: 'user' }); +User.hasMany(ApiLog, { foreignKey: 'userId', as: 'apiLogs' }); +ApiLog.belongsTo(User, { foreignKey: 'userId', as: 'user' }); + export { User, Log, @@ -278,4 +282,5 @@ export { MyTischtennis, MyTischtennisUpdateHistory, MyTischtennisFetchLog, + ApiLog, }; diff --git a/backend/routes/apiLogRoutes.js b/backend/routes/apiLogRoutes.js new file mode 100644 index 0000000..0bff6a3 --- /dev/null +++ b/backend/routes/apiLogRoutes.js @@ -0,0 +1,18 @@ +import express from 'express'; +import apiLogController from '../controllers/apiLogController.js'; +import { authenticate } from '../middleware/authMiddleware.js'; +import { authorize } from '../middleware/authorizationMiddleware.js'; + +const router = express.Router(); + +// All routes require authentication +router.use(authenticate); + +// Get logs - requires permissions or admin +router.get('/', apiLogController.getLogs); + +// Get single log by ID +router.get('/:id', apiLogController.getLogById); + +export default router; + diff --git a/backend/server.js b/backend/server.js index 03134a6..6b52534 100644 --- a/backend/server.js +++ b/backend/server.js @@ -8,7 +8,7 @@ import { DiaryNote, DiaryTag, MemberDiaryTag, DiaryDateTag, DiaryMemberNote, DiaryMemberTag, PredefinedActivity, PredefinedActivityImage, DiaryDateActivity, DiaryMemberActivity, Match, League, Team, ClubTeam, TeamDocument, Group, GroupActivity, Tournament, TournamentGroup, TournamentMatch, TournamentResult, - TournamentMember, Accident, UserToken, OfficialTournament, OfficialCompetition, OfficialCompetitionMember, MyTischtennis, MyTischtennisUpdateHistory, MyTischtennisFetchLog + TournamentMember, Accident, UserToken, OfficialTournament, OfficialCompetition, OfficialCompetitionMember, MyTischtennis, MyTischtennisUpdateHistory, MyTischtennisFetchLog, ApiLog } from './models/index.js'; import authRoutes from './routes/authRoutes.js'; import clubRoutes from './routes/clubRoutes.js'; @@ -40,8 +40,10 @@ import teamDocumentRoutes from './routes/teamDocumentRoutes.js'; import seasonRoutes from './routes/seasonRoutes.js'; import memberActivityRoutes from './routes/memberActivityRoutes.js'; import permissionRoutes from './routes/permissionRoutes.js'; +import apiLogRoutes from './routes/apiLogRoutes.js'; import schedulerService from './services/schedulerService.js'; - +import { requestLoggingMiddleware } from './middleware/requestLoggingMiddleware.js'; + const app = express(); const port = process.env.PORT || 3005; @@ -56,6 +58,10 @@ app.use(cors({ })); app.use(express.json()); +// Request Logging Middleware - loggt alle API-Requests +// Wichtig: userId wird später in authMiddleware gesetzt, aber Middleware funktioniert auch ohne +app.use(requestLoggingMiddleware); + // Globale Fehlerbehandlung, damit der Server bei unerwarteten Fehlern nicht hart abstürzt process.on('uncaughtException', (err) => { console.error('[uncaughtException]', err); @@ -92,6 +98,7 @@ app.use('/api/team-documents', teamDocumentRoutes); app.use('/api/seasons', seasonRoutes); app.use('/api/member-activities', memberActivityRoutes); app.use('/api/permissions', permissionRoutes); +app.use('/api/logs', apiLogRoutes); app.use(express.static(path.join(__dirname, '../frontend/dist'))); @@ -194,6 +201,7 @@ app.get('*', (req, res) => { await safeSync(MyTischtennis); await safeSync(MyTischtennisUpdateHistory); await safeSync(MyTischtennisFetchLog); + await safeSync(ApiLog); // Start scheduler service schedulerService.start(); diff --git a/backend/services/apiLogService.js b/backend/services/apiLogService.js new file mode 100644 index 0000000..210af44 --- /dev/null +++ b/backend/services/apiLogService.js @@ -0,0 +1,161 @@ +import ApiLog from '../models/ApiLog.js'; +import { Op } from 'sequelize'; + +class ApiLogService { + /** + * Log an API request/response + */ + async logRequest(options) { + try { + const { + userId = null, + method, + path, + statusCode = null, + requestBody = null, + responseBody = null, + executionTime = null, + errorMessage = null, + ipAddress = null, + userAgent = null, + logType = 'api_request', + schedulerJobType = null + } = options; + + // Truncate long fields + const truncate = (str, maxLen = 10000) => { + if (!str) return null; + const strVal = typeof str === 'string' ? str : JSON.stringify(str); + return strVal.length > maxLen ? strVal.substring(0, maxLen) + '... (truncated)' : strVal; + }; + + await ApiLog.create({ + userId, + method, + path, + statusCode, + requestBody: truncate(requestBody), + responseBody: truncate(responseBody), + executionTime, + errorMessage: truncate(errorMessage, 5000), + ipAddress, + userAgent, + logType, + schedulerJobType + }); + } catch (error) { + console.error('Error logging API request:', error); + // Don't throw - logging failures shouldn't break the main operation + } + } + + /** + * Log scheduler execution + */ + async logSchedulerExecution(jobType, success, message, executionTime = null, errorMessage = null) { + try { + await ApiLog.create({ + userId: null, + method: 'SCHEDULER', + path: `/scheduler/${jobType}`, + statusCode: success ? 200 : 500, + responseBody: message, + executionTime, + errorMessage, + logType: 'scheduler', + schedulerJobType: jobType + }); + } catch (error) { + console.error('Error logging scheduler execution:', error); + } + } + + /** + * Get logs with filters + */ + async getLogs(options = {}) { + try { + const { + userId = null, + logType = null, + method = null, + path = null, + statusCode = null, + startDate = null, + endDate = null, + limit = 100, + offset = 0 + } = options; + + const where = {}; + + if (userId) { + where.userId = userId; + } + + if (logType) { + where.logType = logType; + } + + if (method) { + where.method = method; + } + + if (path) { + where.path = { [Op.like]: `%${path}%` }; + } + + if (statusCode !== null) { + where.statusCode = statusCode; + } + + if (startDate || endDate) { + where.createdAt = {}; + if (startDate) { + where.createdAt[Op.gte] = new Date(startDate); + } + if (endDate) { + where.createdAt[Op.lte] = new Date(endDate); + } + } + + const logs = await ApiLog.findAndCountAll({ + where, + order: [['createdAt', 'DESC']], + limit: parseInt(limit), + offset: parseInt(offset), + attributes: [ + 'id', 'userId', 'method', 'path', 'statusCode', + 'executionTime', 'errorMessage', 'ipAddress', 'logType', + 'schedulerJobType', 'createdAt' + ] + }); + + return { + logs: logs.rows, + total: logs.count, + limit: parseInt(limit), + offset: parseInt(offset) + }; + } catch (error) { + console.error('Error getting logs:', error); + throw error; + } + } + + /** + * Get a single log by ID + */ + async getLogById(logId) { + try { + const log = await ApiLog.findByPk(logId); + return log; + } catch (error) { + console.error('Error getting log by ID:', error); + throw error; + } + } +} + +export default new ApiLogService(); + diff --git a/backend/services/schedulerService.js b/backend/services/schedulerService.js index 9dd7270..76aa87f 100644 --- a/backend/services/schedulerService.js +++ b/backend/services/schedulerService.js @@ -1,6 +1,7 @@ import cron from 'node-cron'; import autoUpdateRatingsService from './autoUpdateRatingsService.js'; import autoFetchMatchResultsService from './autoFetchMatchResultsService.js'; +import apiLogService from './apiLogService.js'; import { devLog } from '../utils/logger.js'; class SchedulerService { @@ -22,11 +23,33 @@ class SchedulerService { // Schedule automatic rating updates at 6:00 AM daily const ratingUpdateJob = cron.schedule('0 6 * * *', async () => { + const startTime = Date.now(); + const timestamp = new Date().toISOString(); + console.log(`[${timestamp}] CRON: Executing scheduled rating updates...`); devLog('Executing scheduled rating updates...'); + + let success = false; + let message = ''; + let errorMessage = null; + try { await autoUpdateRatingsService.executeAutomaticUpdates(); + const executionTime = Date.now() - startTime; + success = true; + message = 'Rating updates completed successfully'; + console.log(`[${new Date().toISOString()}] CRON: Rating updates completed successfully`); + + // Log to ApiLog + await apiLogService.logSchedulerExecution('rating_updates', true, message, executionTime, null); } catch (error) { - console.error('Error in scheduled rating updates:', error); + const executionTime = Date.now() - startTime; + success = false; + errorMessage = error.message; + console.error(`[${new Date().toISOString()}] CRON ERROR in scheduled rating updates:`, error); + console.error('Stack trace:', error.stack); + + // Log to ApiLog + await apiLogService.logSchedulerExecution('rating_updates', false, 'Rating updates failed', executionTime, errorMessage); } }, { scheduled: false, // Don't start automatically @@ -35,14 +58,37 @@ class SchedulerService { this.jobs.set('ratingUpdates', ratingUpdateJob); ratingUpdateJob.start(); + console.log('[Scheduler] Rating update job scheduled and started'); // Schedule automatic match results fetching at 6:30 AM daily const matchResultsJob = cron.schedule('30 6 * * *', async () => { + const startTime = Date.now(); + const timestamp = new Date().toISOString(); + console.log(`[${timestamp}] CRON: Executing scheduled match results fetch...`); devLog('Executing scheduled match results fetch...'); + + let success = false; + let message = ''; + let errorMessage = null; + try { await autoFetchMatchResultsService.executeAutomaticFetch(); + const executionTime = Date.now() - startTime; + success = true; + message = 'Match results fetch completed successfully'; + console.log(`[${new Date().toISOString()}] CRON: Match results fetch completed successfully`); + + // Log to ApiLog + await apiLogService.logSchedulerExecution('match_results', true, message, executionTime, null); } catch (error) { - console.error('Error in scheduled match results fetch:', error); + const executionTime = Date.now() - startTime; + success = false; + errorMessage = error.message; + console.error(`[${new Date().toISOString()}] CRON ERROR in scheduled match results fetch:`, error); + console.error('Stack trace:', error.stack); + + // Log to ApiLog + await apiLogService.logSchedulerExecution('match_results', false, 'Match results fetch failed', executionTime, errorMessage); } }, { scheduled: false, // Don't start automatically @@ -51,8 +97,25 @@ class SchedulerService { this.jobs.set('matchResults', matchResultsJob); matchResultsJob.start(); + console.log('[Scheduler] Match results fetch job scheduled and started'); this.isRunning = true; + const now = new Date(); + const tomorrow6AM = new Date(now); + tomorrow6AM.setDate(tomorrow6AM.getDate() + 1); + tomorrow6AM.setHours(6, 0, 0, 0); + + const tomorrow630AM = new Date(now); + tomorrow630AM.setDate(tomorrow630AM.getDate() + 1); + tomorrow630AM.setHours(6, 30, 0, 0); + + console.log('[Scheduler] ===== SCHEDULER SERVICE STARTED ====='); + console.log(`[Scheduler] Server time: ${now.toISOString()}`); + console.log(`[Scheduler] Timezone: Europe/Berlin`); + console.log(`[Scheduler] Rating updates: Next execution at ${tomorrow6AM.toISOString()} (6:00 AM Berlin time)`); + console.log(`[Scheduler] Match results fetch: Next execution at ${tomorrow630AM.toISOString()} (6:30 AM Berlin time)`); + console.log('[Scheduler] ====================================='); + devLog('Scheduler service started successfully'); devLog('Rating updates scheduled for 6:00 AM daily (Europe/Berlin timezone)'); devLog('Match results fetch scheduled for 6:30 AM daily (Europe/Berlin timezone)'); diff --git a/frontend/src/App.vue b/frontend/src/App.vue index 0c2f94e..95bd5b6 100644 --- a/frontend/src/App.vue +++ b/frontend/src/App.vue @@ -22,6 +22,10 @@ 🔐 Berechtigungen + + 📋 + System-Logs +