feat: Gandalf M2M API, manual/automated execution sub-tabs, cleanup tuning
- server.js: add authenticateGandalf middleware (X-Gandalf-API-Key header)
and two internal endpoints used by Gandalf link diagnostics:
POST /api/internal/command — submit SSH command to a worker, returns execution_id
GET /api/internal/executions/:id — poll execution status/logs
Also tag automated executions as started_by 'gandalf:*' / 'scheduler:*';
add hide_internal query param to GET /api/executions; change cleanup
from daily/30d to hourly/1d to keep execution history lean
- index.html: add Manual / Automated sub-tabs on Execution History tab so
Gandalf diagnostic runs don't clutter the manual run view; persists
selected tab to localStorage; dashboard recent-run strip filters to
manual runs only; sub-tabs show live counts
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
87
server.js
87
server.js
@@ -116,24 +116,26 @@ async function initDatabase() {
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-cleanup old executions (runs daily)
|
||||
// Auto-cleanup old executions (runs hourly)
|
||||
async function cleanupOldExecutions() {
|
||||
try {
|
||||
const retentionDays = parseInt(process.env.EXECUTION_RETENTION_DAYS) || 30;
|
||||
const retentionDays = parseInt(process.env.EXECUTION_RETENTION_DAYS) || 1;
|
||||
const [result] = await pool.query(
|
||||
`DELETE FROM executions
|
||||
WHERE status IN ('completed', 'failed')
|
||||
AND started_at < DATE_SUB(NOW(), INTERVAL ? DAY)`,
|
||||
[retentionDays]
|
||||
);
|
||||
console.log(`[Cleanup] Removed ${result.affectedRows} executions older than ${retentionDays} days`);
|
||||
if (result.affectedRows > 0) {
|
||||
console.log(`[Cleanup] Removed ${result.affectedRows} executions older than ${retentionDays} day(s)`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Cleanup] Error removing old executions:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Run cleanup daily at 3 AM
|
||||
setInterval(cleanupOldExecutions, 24 * 60 * 60 * 1000);
|
||||
// Run cleanup hourly
|
||||
setInterval(cleanupOldExecutions, 60 * 60 * 1000);
|
||||
// Run cleanup on startup
|
||||
cleanupOldExecutions();
|
||||
|
||||
@@ -481,6 +483,16 @@ async function authenticateSSO(req, res, next) {
|
||||
next();
|
||||
}
|
||||
|
||||
// Gandalf machine-to-machine API key auth
|
||||
function authenticateGandalf(req, res, next) {
|
||||
const apiKey = req.headers['x-gandalf-api-key'];
|
||||
if (!apiKey || apiKey !== process.env.GANDALF_API_KEY) {
|
||||
return res.status(401).json({ error: 'Unauthorized' });
|
||||
}
|
||||
req.user = { username: 'gandalf:link_stats', isAdmin: false };
|
||||
next();
|
||||
}
|
||||
|
||||
// Workflow Execution Engine
|
||||
async function executeWorkflowSteps(executionId, workflowId, definition, username) {
|
||||
try {
|
||||
@@ -840,14 +852,19 @@ app.get('/api/executions', authenticateSSO, async (req, res) => {
|
||||
try {
|
||||
const limit = parseInt(req.query.limit) || 50;
|
||||
const offset = parseInt(req.query.offset) || 0;
|
||||
const hideInternal = req.query.hide_internal === 'true';
|
||||
|
||||
const whereClause = hideInternal
|
||||
? "WHERE started_by NOT LIKE 'gandalf:%' AND started_by NOT LIKE 'scheduler:%'"
|
||||
: '';
|
||||
|
||||
const [rows] = await pool.query(
|
||||
'SELECT e.*, w.name as workflow_name FROM executions e LEFT JOIN workflows w ON e.workflow_id = w.id ORDER BY e.started_at DESC LIMIT ? OFFSET ?',
|
||||
`SELECT e.*, w.name as workflow_name FROM executions e LEFT JOIN workflows w ON e.workflow_id = w.id ${whereClause} ORDER BY e.started_at DESC LIMIT ? OFFSET ?`,
|
||||
[limit, offset]
|
||||
);
|
||||
|
||||
// Get total count
|
||||
const [countRows] = await pool.query('SELECT COUNT(*) as total FROM executions');
|
||||
const [countRows] = await pool.query(`SELECT COUNT(*) as total FROM executions ${whereClause}`);
|
||||
const total = countRows[0].total;
|
||||
|
||||
res.json({
|
||||
@@ -970,6 +987,62 @@ app.delete('/api/scheduled-commands/:id', authenticateSSO, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Internal M2M API for Gandalf
|
||||
app.post('/api/internal/command', authenticateGandalf, async (req, res) => {
|
||||
try {
|
||||
const { worker_id, command } = req.body;
|
||||
if (!worker_id || !command) {
|
||||
return res.status(400).json({ error: 'worker_id and command are required' });
|
||||
}
|
||||
|
||||
const workerWs = workers.get(worker_id);
|
||||
if (!workerWs || workerWs.readyState !== WebSocket.OPEN) {
|
||||
return res.status(400).json({ error: 'Worker not connected' });
|
||||
}
|
||||
|
||||
const executionId = generateUUID();
|
||||
|
||||
await pool.query(
|
||||
'INSERT INTO executions (id, workflow_id, status, started_by, started_at, logs) VALUES (?, ?, ?, ?, NOW(), ?)',
|
||||
[executionId, null, 'running', req.user.username, JSON.stringify([{
|
||||
step: 'internal_command',
|
||||
action: 'command_sent',
|
||||
worker_id: worker_id,
|
||||
command: command,
|
||||
timestamp: new Date().toISOString()
|
||||
}])]
|
||||
);
|
||||
|
||||
workerWs.send(JSON.stringify({
|
||||
type: 'execute_command',
|
||||
execution_id: executionId,
|
||||
command: command,
|
||||
worker_id: worker_id,
|
||||
timeout: 60000
|
||||
}));
|
||||
|
||||
res.json({ execution_id: executionId });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/internal/executions/:id', authenticateGandalf, async (req, res) => {
|
||||
try {
|
||||
const [rows] = await pool.query('SELECT * FROM executions WHERE id = ?', [req.params.id]);
|
||||
if (rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Not found' });
|
||||
}
|
||||
const execution = rows[0];
|
||||
res.json({
|
||||
...execution,
|
||||
logs: JSON.parse(execution.logs || '[]')
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Health check (no auth required)
|
||||
app.get('/health', async (req, res) => {
|
||||
try {
|
||||
|
||||
Reference in New Issue
Block a user