Phase 5: Auto-cleanup and pagination for executions
Changes: Server-side: - Added automatic cleanup of old executions (runs daily) - Configurable retention period via EXECUTION_RETENTION_DAYS env var (default: 30 days) - Cleanup runs on server startup and every 24 hours - Only cleans completed/failed executions, keeps running ones - Added pagination support to /api/executions endpoint - Returns total count, limit, offset, and hasMore flag Client-side: - Implemented "Load More" button for execution pagination - Loads 50 executions at a time - Appends additional executions when "Load More" clicked - Shows total execution count info - Backward compatible with old API format Benefits: - Automatic database maintenance - Prevents execution table from growing indefinitely - Better performance with large execution histories - User can browse all executions via pagination - Configurable retention policy per deployment Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -969,22 +969,32 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadExecutions() {
|
let executionOffset = 0;
|
||||||
try {
|
const executionLimit = 50;
|
||||||
const response = await fetch('/api/executions');
|
|
||||||
const executions = await response.json();
|
|
||||||
|
|
||||||
const dashHtml = executions.length === 0 ?
|
|
||||||
'<div class="empty">No executions yet</div>' :
|
|
||||||
executions.slice(0, 5).map(e => `
|
|
||||||
<div class="execution-item" onclick="viewExecution('${e.id}')">
|
|
||||||
<span class="status ${e.status}">${e.status}</span>
|
|
||||||
<strong>${e.workflow_name || '[Quick Command]'}</strong>
|
|
||||||
<div class="timestamp">by ${e.started_by} at ${new Date(e.started_at).toLocaleString()}</div>
|
|
||||||
</div>
|
|
||||||
`).join('');
|
|
||||||
document.getElementById('dashExecutions').innerHTML = dashHtml;
|
|
||||||
|
|
||||||
|
async function loadExecutions(append = false) {
|
||||||
|
try {
|
||||||
|
if (!append) executionOffset = 0;
|
||||||
|
|
||||||
|
const response = await fetch(`/api/executions?limit=${executionLimit}&offset=${executionOffset}`);
|
||||||
|
const data = await response.json();
|
||||||
|
const executions = data.executions || data; // Handle old and new API format
|
||||||
|
|
||||||
|
// Dashboard view (always first 5)
|
||||||
|
if (!append) {
|
||||||
|
const dashHtml = executions.length === 0 ?
|
||||||
|
'<div class="empty">No executions yet</div>' :
|
||||||
|
executions.slice(0, 5).map(e => `
|
||||||
|
<div class="execution-item" onclick="viewExecution('${e.id}')">
|
||||||
|
<span class="status ${e.status}">${e.status}</span>
|
||||||
|
<strong>${e.workflow_name || '[Quick Command]'}</strong>
|
||||||
|
<div class="timestamp">by ${e.started_by} at ${new Date(e.started_at).toLocaleString()}</div>
|
||||||
|
</div>
|
||||||
|
`).join('');
|
||||||
|
document.getElementById('dashExecutions').innerHTML = dashHtml;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Full execution list
|
||||||
const fullHtml = executions.length === 0 ?
|
const fullHtml = executions.length === 0 ?
|
||||||
'<div class="empty">No executions yet</div>' :
|
'<div class="empty">No executions yet</div>' :
|
||||||
executions.map(e => `
|
executions.map(e => `
|
||||||
@@ -997,12 +1007,29 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
`).join('');
|
`).join('');
|
||||||
document.getElementById('executionList').innerHTML = fullHtml;
|
|
||||||
|
if (append) {
|
||||||
|
document.getElementById('executionList').innerHTML += fullHtml;
|
||||||
|
} else {
|
||||||
|
document.getElementById('executionList').innerHTML = fullHtml;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add "Load More" button if there are more executions
|
||||||
|
if (data.hasMore) {
|
||||||
|
const loadMoreBtn = `<button onclick="loadMoreExecutions()" style="width: 100%; margin-top: 15px;">[ Load More Executions ]</button>`;
|
||||||
|
document.getElementById('executionList').innerHTML += loadMoreBtn;
|
||||||
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error loading executions:', error);
|
console.error('Error loading executions:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function loadMoreExecutions() {
|
||||||
|
executionOffset += executionLimit;
|
||||||
|
await loadExecutions(true);
|
||||||
|
}
|
||||||
|
|
||||||
async function clearCompletedExecutions() {
|
async function clearCompletedExecutions() {
|
||||||
if (!confirm('Delete all completed and failed executions?')) return;
|
if (!confirm('Delete all completed and failed executions?')) return;
|
||||||
|
|
||||||
|
|||||||
40
server.js
40
server.js
@@ -98,6 +98,27 @@ async function initDatabase() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Auto-cleanup old executions (runs daily)
|
||||||
|
async function cleanupOldExecutions() {
|
||||||
|
try {
|
||||||
|
const retentionDays = parseInt(process.env.EXECUTION_RETENTION_DAYS) || 30;
|
||||||
|
const [result] = await pool.query(
|
||||||
|
`DELETE FROM executions
|
||||||
|
WHERE status IN ('completed', 'failed')
|
||||||
|
AND started_at < DATE_SUB(NOW(), INTERVAL ? DAY)`,
|
||||||
|
[retentionDays]
|
||||||
|
);
|
||||||
|
console.log(`[Cleanup] Removed ${result.affectedRows} executions older than ${retentionDays} days`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Cleanup] Error removing old executions:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run cleanup daily at 3 AM
|
||||||
|
setInterval(cleanupOldExecutions, 24 * 60 * 60 * 1000);
|
||||||
|
// Run cleanup on startup
|
||||||
|
cleanupOldExecutions();
|
||||||
|
|
||||||
// WebSocket connections
|
// WebSocket connections
|
||||||
const clients = new Set();
|
const clients = new Set();
|
||||||
const workers = new Map(); // Map worker_id -> WebSocket connection
|
const workers = new Map(); // Map worker_id -> WebSocket connection
|
||||||
@@ -409,10 +430,25 @@ app.post('/api/executions', authenticateSSO, async (req, res) => {
|
|||||||
|
|
||||||
app.get('/api/executions', authenticateSSO, async (req, res) => {
|
app.get('/api/executions', authenticateSSO, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
const limit = parseInt(req.query.limit) || 50;
|
||||||
|
const offset = parseInt(req.query.offset) || 0;
|
||||||
|
|
||||||
const [rows] = await pool.query(
|
const [rows] = await pool.query(
|
||||||
'SELECT e.*, w.name as workflow_name FROM executions e LEFT JOIN workflows w ON e.workflow_id = w.id ORDER BY e.started_at DESC LIMIT 50'
|
'SELECT e.*, w.name as workflow_name FROM executions e LEFT JOIN workflows w ON e.workflow_id = w.id ORDER BY e.started_at DESC LIMIT ? OFFSET ?',
|
||||||
|
[limit, offset]
|
||||||
);
|
);
|
||||||
res.json(rows);
|
|
||||||
|
// Get total count
|
||||||
|
const [countRows] = await pool.query('SELECT COUNT(*) as total FROM executions');
|
||||||
|
const total = countRows[0].total;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
executions: rows,
|
||||||
|
total: total,
|
||||||
|
limit: limit,
|
||||||
|
offset: offset,
|
||||||
|
hasMore: offset + rows.length < total
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user