Histórico persistente de execuções com visualização detalhada

- Novo executionsStore em db.js com cache in-memory e escrita debounced
- Camada de cache (src/cache/index.js) com TTL e suporte opcional a Redis
- Persistência de execuções de agentes e pipelines com metadados completos
- Pipeline grava cada etapa com prompt, resultado, timestamps e status
- 4 endpoints REST: listagem paginada com filtros, detalhe, exclusão individual e limpeza total
- Componente frontend (history.js) com cards, filtros, paginação e modal de detalhe
- Timeline visual para pipelines com prompts colapsáveis por etapa
- Correção do executor: --max-turns em vez de --max-tokens, --permission-mode bypassPermissions
- Refatoração do scheduler com persistência melhorada e graceful shutdown
This commit is contained in:
Frederico Castro
2026-02-26 01:36:28 -03:00
parent 2f7a9d4c56
commit 4b6c876f36
13 changed files with 1536 additions and 398 deletions

View File

@@ -6,20 +6,23 @@ import { settingsStore } from '../store/db.js';
const CLAUDE_BIN = resolveBin();
const activeExecutions = new Map();
let maxConcurrent = settingsStore.get().maxConcurrent || 5;
export function updateMaxConcurrent(value) {
maxConcurrent = Math.max(1, Math.min(20, parseInt(value) || 5));
}
function resolveBin() {
if (process.env.CLAUDE_BIN) return process.env.CLAUDE_BIN;
const home = process.env.HOME || '';
const candidates = [
`${home}/.local/bin/claude`,
'/usr/local/bin/claude',
'/usr/bin/claude',
];
for (const p of candidates) {
if (existsSync(p)) return p;
}
return 'claude';
}
@@ -58,7 +61,6 @@ function buildArgs(agentConfig, prompt) {
}
args.push('--permission-mode', agentConfig.permissionMode || 'bypassPermissions');
return args;
}
@@ -89,13 +91,8 @@ function extractText(event) {
.join('');
}
if (event.type === 'content_block_delta' && event.delta?.text) {
return event.delta.text;
}
if (event.type === 'content_block_start' && event.content_block?.text) {
return event.content_block.text;
}
if (event.type === 'content_block_delta' && event.delta?.text) return event.delta.text;
if (event.type === 'content_block_start' && event.content_block?.text) return event.content_block.text;
if (event.type === 'result') {
if (typeof event.result === 'string') return event.result;
@@ -108,17 +105,10 @@ function extractText(event) {
}
if (event.type === 'text') return event.content || null;
return null;
}
function getMaxConcurrent() {
const s = settingsStore.get();
return s.maxConcurrent || 5;
}
export function execute(agentConfig, task, callbacks = {}) {
const maxConcurrent = getMaxConcurrent();
if (activeExecutions.size >= maxConcurrent) {
const err = new Error(`Limite de ${maxConcurrent} execuções simultâneas atingido`);
if (callbacks.onError) callbacks.onError(err, uuidv4());
@@ -145,9 +135,7 @@ export function execute(agentConfig, task, callbacks = {}) {
spawnOptions.cwd = agentConfig.workingDirectory;
}
console.log(`[executor] Iniciando: ${executionId}`);
console.log(`[executor] Modelo: ${agentConfig.model || 'claude-sonnet-4-6'}`);
console.log(`[executor] cwd: ${spawnOptions.cwd || process.cwd()}`);
console.log(`[executor] Iniciando: ${executionId} | Modelo: ${agentConfig.model || 'claude-sonnet-4-6'}`);
const child = spawn(CLAUDE_BIN, args, spawnOptions);
let hadError = false;
@@ -165,14 +153,12 @@ export function execute(agentConfig, task, callbacks = {}) {
let fullText = '';
child.stdout.on('data', (chunk) => {
const raw = chunk.toString();
const lines = (outputBuffer + raw).split('\n');
const lines = (outputBuffer + chunk.toString()).split('\n');
outputBuffer = lines.pop();
for (const line of lines) {
const parsed = parseStreamLine(line);
if (!parsed) continue;
const text = extractText(parsed);
if (text) {
fullText += text;
@@ -193,7 +179,6 @@ export function execute(agentConfig, task, callbacks = {}) {
});
child.on('close', (code) => {
console.log(`[executor][close] code=${code} hadError=${hadError}`);
activeExecutions.delete(executionId);
if (hadError) return;
@@ -206,15 +191,7 @@ export function execute(agentConfig, task, callbacks = {}) {
}
if (onComplete) {
onComplete(
{
executionId,
exitCode: code,
result: fullText,
stderr: errorBuffer,
},
executionId,
);
onComplete({ executionId, exitCode: code, result: fullText, stderr: errorBuffer }, executionId);
}
});
@@ -224,22 +201,19 @@ export function execute(agentConfig, task, callbacks = {}) {
export function cancel(executionId) {
const execution = activeExecutions.get(executionId);
if (!execution) return false;
execution.process.kill('SIGTERM');
activeExecutions.delete(executionId);
return true;
}
export function cancelAllExecutions() {
for (const [id, exec] of activeExecutions) {
exec.process.kill('SIGTERM');
}
for (const [, exec] of activeExecutions) exec.process.kill('SIGTERM');
activeExecutions.clear();
}
export function getActiveExecutions() {
return Array.from(activeExecutions.entries()).map(([id, exec]) => ({
executionId: id,
return Array.from(activeExecutions.values()).map((exec) => ({
executionId: exec.executionId,
startedAt: exec.startedAt,
agentConfig: exec.agentConfig,
}));

View File

@@ -1,5 +1,5 @@
import { v4 as uuidv4 } from 'uuid';
import { agentsStore, schedulesStore } from '../store/db.js';
import { agentsStore, schedulesStore, executionsStore } from '../store/db.js';
import * as executor from './executor.js';
import * as scheduler from './scheduler.js';
@@ -12,6 +12,9 @@ const DEFAULT_CONFIG = {
allowedTools: '',
};
const MAX_RECENT = 200;
const recentExecBuffer = [];
let dailyExecutionCount = 0;
let dailyCountDate = new Date().toDateString();
@@ -61,11 +64,8 @@ export function getAgentById(id) {
export function createAgent(data) {
const errors = validateAgent(data);
if (errors.length > 0) {
throw new Error(errors.join('; '));
}
const agentData = {
if (errors.length > 0) throw new Error(errors.join('; '));
return agentsStore.create({
agent_name: data.agent_name,
description: data.description || '',
tags: sanitizeTags(data.tags),
@@ -74,15 +74,12 @@ export function createAgent(data) {
status: data.status || 'active',
assigned_host: data.assigned_host || 'localhost',
executions: [],
};
return agentsStore.create(agentData);
});
}
export function updateAgent(id, data) {
const existing = agentsStore.getById(id);
if (!existing) return null;
const updateData = {};
if (data.agent_name !== undefined) updateData.agent_name = data.agent_name;
if (data.description !== undefined) updateData.description = data.description;
@@ -90,10 +87,7 @@ export function updateAgent(id, data) {
if (data.tasks !== undefined) updateData.tasks = data.tasks;
if (data.status !== undefined) updateData.status = data.status;
if (data.assigned_host !== undefined) updateData.assigned_host = data.assigned_host;
if (data.config !== undefined) {
updateData.config = { ...existing.config, ...data.config };
}
if (data.config !== undefined) updateData.config = { ...existing.config, ...data.config };
return agentsStore.update(id, updateData);
}
@@ -106,12 +100,25 @@ export function executeTask(agentId, task, instructions, wsCallback) {
if (!agent) throw new Error(`Agente ${agentId} não encontrado`);
if (agent.status !== 'active') throw new Error(`Agente ${agentId} está inativo`);
const executionRecord = {
const taskText = typeof task === 'string' ? task : task.description;
const startedAt = new Date().toISOString();
const historyRecord = executionsStore.create({
type: 'agent',
agentId,
agentName: agent.agent_name,
task: taskText,
instructions: instructions || '',
status: 'running',
startedAt,
});
const execRecord = {
executionId: null,
agentId,
agentName: agent.agent_name,
task: typeof task === 'string' ? task : task.description,
startedAt: new Date().toISOString(),
task: taskText,
startedAt,
status: 'running',
};
@@ -120,74 +127,70 @@ export function executeTask(agentId, task, instructions, wsCallback) {
{ description: task, instructions },
{
onData: (parsed, execId) => {
if (wsCallback) {
wsCallback({
type: 'execution_output',
executionId: execId,
agentId,
data: parsed,
});
}
if (wsCallback) wsCallback({ type: 'execution_output', executionId: execId, agentId, data: parsed });
},
onError: (err, execId) => {
updateAgentExecution(agentId, execId, { status: 'error', error: err.message, endedAt: new Date().toISOString() });
if (wsCallback) {
wsCallback({
type: 'execution_error',
executionId: execId,
agentId,
data: { error: err.message },
});
}
const endedAt = new Date().toISOString();
updateExecutionRecord(agentId, execId, { status: 'error', error: err.message, endedAt });
executionsStore.update(historyRecord.id, { status: 'error', error: err.message, endedAt });
if (wsCallback) wsCallback({ type: 'execution_error', executionId: execId, agentId, data: { error: err.message } });
},
onComplete: (result, execId) => {
updateAgentExecution(agentId, execId, { status: 'completed', result, endedAt: new Date().toISOString() });
if (wsCallback) {
wsCallback({
type: 'execution_complete',
executionId: execId,
agentId,
data: result,
});
}
const endedAt = new Date().toISOString();
updateExecutionRecord(agentId, execId, { status: 'completed', result, endedAt });
executionsStore.update(historyRecord.id, {
status: 'completed',
result: result.result || '',
exitCode: result.exitCode,
endedAt,
});
if (wsCallback) wsCallback({ type: 'execution_complete', executionId: execId, agentId, data: result });
},
}
);
if (!executionId) {
executionsStore.update(historyRecord.id, { status: 'error', error: 'Limite de execuções simultâneas atingido', endedAt: new Date().toISOString() });
throw new Error('Limite de execuções simultâneas atingido');
}
executionRecord.executionId = executionId;
execRecord.executionId = executionId;
executionsStore.update(historyRecord.id, { executionId });
incrementDailyCount();
const updatedAgent = agentsStore.getById(agentId);
const executions = [...(updatedAgent.executions || []), executionRecord];
const executions = [...(updatedAgent.executions || []), execRecord];
agentsStore.update(agentId, { executions: executions.slice(-100) });
recentExecBuffer.unshift({ ...execRecord });
if (recentExecBuffer.length > MAX_RECENT) recentExecBuffer.length = MAX_RECENT;
return executionId;
}
function updateAgentExecution(agentId, executionId, updates) {
function updateRecentBuffer(executionId, updates) {
const entry = recentExecBuffer.find((e) => e.executionId === executionId);
if (entry) Object.assign(entry, updates);
}
function updateExecutionRecord(agentId, executionId, updates) {
const agent = agentsStore.getById(agentId);
if (!agent) return;
const executions = (agent.executions || []).map((exec) => {
if (exec.executionId === executionId) {
return { ...exec, ...updates };
}
return exec;
});
const executions = (agent.executions || []).map((exec) =>
exec.executionId === executionId ? { ...exec, ...updates } : exec
);
agentsStore.update(agentId, { executions });
}
export function getRecentExecutions(limit = 20) {
return recentExecBuffer.slice(0, Math.min(limit, MAX_RECENT));
}
export function scheduleTask(agentId, taskDescription, cronExpression, wsCallback) {
const agent = agentsStore.getById(agentId);
if (!agent) throw new Error(`Agente ${agentId} não encontrado`);
const scheduleId = uuidv4();
const items = schedulesStore.getAll();
items.push({
id: scheduleId,
@@ -223,13 +226,7 @@ export function updateScheduleTask(scheduleId, data, wsCallback) {
executeTask(agentId, taskDescription, null, wsCallback);
});
schedulesStore.update(scheduleId, {
agentId,
agentName: agent.agent_name,
taskDescription,
cronExpression,
});
schedulesStore.update(scheduleId, { agentId, agentName: agent.agent_name, taskDescription, cronExpression });
return schedulesStore.getById(scheduleId);
}
@@ -241,23 +238,9 @@ export function getActiveExecutions() {
return executor.getActiveExecutions();
}
export function getRecentExecutions(limit = 20) {
const agents = agentsStore.getAll();
const all = agents.flatMap((a) =>
(a.executions || []).map((e) => ({
...e,
agentName: a.agent_name,
agentId: a.id,
}))
);
all.sort((a, b) => new Date(b.startedAt) - new Date(a.startedAt));
return all.slice(0, limit);
}
export function exportAgent(agentId) {
const agent = agentsStore.getById(agentId);
if (!agent) return null;
return {
agent_name: agent.agent_name,
description: agent.description,
@@ -270,11 +253,8 @@ export function exportAgent(agentId) {
}
export function importAgent(data) {
if (!data.agent_name) {
throw new Error('agent_name é obrigatório para importação');
}
const agentData = {
if (!data.agent_name) throw new Error('agent_name é obrigatório para importação');
return agentsStore.create({
agent_name: data.agent_name,
description: data.description || '',
tags: sanitizeTags(data.tags),
@@ -283,9 +263,7 @@ export function importAgent(data) {
status: data.status || 'active',
assigned_host: data.assigned_host || 'localhost',
executions: [],
};
return agentsStore.create(agentData);
});
}
export function restoreSchedules(wsCallback) {

View File

@@ -1,9 +1,23 @@
import { v4 as uuidv4 } from 'uuid';
import { pipelinesStore } from '../store/db.js';
import { agentsStore } from '../store/db.js';
import { pipelinesStore, agentsStore, executionsStore } from '../store/db.js';
import * as executor from './executor.js';
import { mem } from '../cache/index.js';
const activePipelines = new Map();
const AGENT_MAP_TTL = 30_000;
function getAgentMap() {
const hit = mem.get('agent:map');
if (hit !== undefined) return hit;
const agents = agentsStore.getAll();
const map = new Map(agents.map((a) => [a.id, a.agent_name]));
mem.set('agent:map', map, AGENT_MAP_TTL);
return map;
}
export function invalidateAgentMapCache() {
mem.del('agent:map');
}
function validatePipeline(data) {
const errors = [];
@@ -13,8 +27,8 @@ function validatePipeline(data) {
if (!Array.isArray(data.steps) || data.steps.length === 0) {
errors.push('steps é obrigatório e deve ser um array não vazio');
} else {
data.steps.forEach((step, index) => {
if (!step.agentId) errors.push(`steps[${index}].agentId é obrigatório`);
data.steps.forEach((step, i) => {
if (!step.agentId) errors.push(`steps[${i}].agentId é obrigatório`);
});
}
return errors;
@@ -33,13 +47,8 @@ function buildSteps(steps) {
}
function enrichStepsWithAgentNames(steps) {
const agents = agentsStore.getAll();
const agentMap = new Map(agents.map((a) => [a.id, a.agent_name]));
return steps.map((s) => ({
...s,
agentName: agentMap.get(s.agentId) || s.agentId,
}));
const agentMap = getAgentMap();
return steps.map((s) => ({ ...s, agentName: agentMap.get(s.agentId) || s.agentId }));
}
function applyTemplate(template, input) {
@@ -64,9 +73,7 @@ function executeStepAsPromise(agentConfig, prompt, pipelineState, wsCallback, pi
});
}
},
onError: (err) => {
reject(err);
},
onError: (err) => reject(err),
onComplete: (result) => {
if (result.exitCode !== 0 && !result.result) {
reject(new Error(result.stderr || `Processo encerrado com código ${result.exitCode}`));
@@ -87,18 +94,23 @@ function executeStepAsPromise(agentConfig, prompt, pipelineState, wsCallback, pi
}
export async function executePipeline(pipelineId, initialInput, wsCallback) {
const pipeline = pipelinesStore.getById(pipelineId);
if (!pipeline) throw new Error(`Pipeline ${pipelineId} não encontrado`);
const pipelineState = {
currentExecutionId: null,
currentStep: 0,
canceled: false,
};
const pl = pipelinesStore.getById(pipelineId);
if (!pl) throw new Error(`Pipeline ${pipelineId} não encontrado`);
const pipelineState = { currentExecutionId: null, currentStep: 0, canceled: false };
activePipelines.set(pipelineId, pipelineState);
const steps = buildSteps(pipeline.steps);
const historyRecord = executionsStore.create({
type: 'pipeline',
pipelineId,
pipelineName: pl.name,
input: initialInput,
status: 'running',
startedAt: new Date().toISOString(),
steps: [],
});
const steps = buildSteps(pl.steps);
const results = [];
let currentInput = initialInput;
@@ -114,6 +126,7 @@ export async function executePipeline(pipelineId, initialInput, wsCallback) {
if (agent.status !== 'active') throw new Error(`Agente ${agent.agent_name} está inativo`);
const prompt = applyTemplate(step.inputTemplate, currentInput);
const stepStart = new Date().toISOString();
if (wsCallback) {
wsCallback({
@@ -133,6 +146,20 @@ export async function executePipeline(pipelineId, initialInput, wsCallback) {
currentInput = result;
results.push({ stepId: step.id, agentName: agent.agent_name, result });
const current = executionsStore.getById(historyRecord.id);
const savedSteps = current ? (current.steps || []) : [];
savedSteps.push({
stepIndex: i,
agentId: step.agentId,
agentName: agent.agent_name,
prompt: prompt.slice(0, 5000),
result,
startedAt: stepStart,
endedAt: new Date().toISOString(),
status: 'completed',
});
executionsStore.update(historyRecord.id, { steps: savedSteps });
if (wsCallback) {
wsCallback({
type: 'pipeline_step_complete',
@@ -145,19 +172,23 @@ export async function executePipeline(pipelineId, initialInput, wsCallback) {
}
activePipelines.delete(pipelineId);
executionsStore.update(historyRecord.id, {
status: pipelineState.canceled ? 'canceled' : 'completed',
endedAt: new Date().toISOString(),
});
if (!pipelineState.canceled && wsCallback) {
wsCallback({
type: 'pipeline_complete',
pipelineId,
results,
});
wsCallback({ type: 'pipeline_complete', pipelineId, results });
}
return results;
} catch (err) {
activePipelines.delete(pipelineId);
executionsStore.update(historyRecord.id, {
status: 'error',
error: err.message,
endedAt: new Date().toISOString(),
});
if (wsCallback) {
wsCallback({
type: 'pipeline_error',
@@ -166,7 +197,6 @@ export async function executePipeline(pipelineId, initialInput, wsCallback) {
error: err.message,
});
}
throw err;
}
}
@@ -174,13 +204,8 @@ export async function executePipeline(pipelineId, initialInput, wsCallback) {
export function cancelPipeline(pipelineId) {
const state = activePipelines.get(pipelineId);
if (!state) return false;
state.canceled = true;
if (state.currentExecutionId) {
executor.cancel(state.currentExecutionId);
}
if (state.currentExecutionId) executor.cancel(state.currentExecutionId);
activePipelines.delete(pipelineId);
return true;
}
@@ -196,27 +221,22 @@ export function getActivePipelines() {
export function createPipeline(data) {
const errors = validatePipeline(data);
if (errors.length > 0) throw new Error(errors.join('; '));
const pipelineData = {
return pipelinesStore.create({
name: data.name,
description: data.description || '',
steps: buildSteps(data.steps),
status: data.status || 'active',
};
return pipelinesStore.create(pipelineData);
});
}
export function updatePipeline(id, data) {
const existing = pipelinesStore.getById(id);
if (!existing) return null;
const updateData = {};
if (data.name !== undefined) updateData.name = data.name;
if (data.description !== undefined) updateData.description = data.description;
if (data.status !== undefined) updateData.status = data.status;
if (data.steps !== undefined) updateData.steps = buildSteps(data.steps);
return pipelinesStore.update(id, updateData);
}
@@ -229,8 +249,7 @@ export function getPipeline(id) {
}
export function getAllPipelines() {
const pipelines = pipelinesStore.getAll();
return pipelines.map((p) => ({
return pipelinesStore.getAll().map((p) => ({
...p,
steps: enrichStepsWithAgentNames(p.steps || []),
}));

View File

@@ -7,11 +7,12 @@ const schedules = new Map();
const history = [];
const emitter = new EventEmitter();
const cronDateCache = new Map();
const CRON_CACHE_TTL = 60_000;
function addToHistory(entry) {
history.unshift(entry);
if (history.length > HISTORY_LIMIT) {
history.splice(HISTORY_LIMIT);
}
if (history.length > HISTORY_LIMIT) history.splice(HISTORY_LIMIT);
}
function matchesCronPart(part, value) {
@@ -25,7 +26,7 @@ function matchesCronPart(part, value) {
return parseInt(part) === value;
}
function nextCronDate(cronExpr) {
function computeNextCronDate(cronExpr) {
const parts = cronExpr.split(' ');
if (parts.length !== 5) return null;
@@ -37,36 +38,33 @@ function nextCronDate(cronExpr) {
candidate.setMinutes(candidate.getMinutes() + 1);
for (let i = 0; i < 525600; i++) {
const m = candidate.getMinutes();
const h = candidate.getHours();
const dom = candidate.getDate();
const mon = candidate.getMonth() + 1;
const dow = candidate.getDay();
if (
matchesCronPart(minute, m) &&
matchesCronPart(hour, h) &&
matchesCronPart(dayOfMonth, dom) &&
matchesCronPart(month, mon) &&
matchesCronPart(dayOfWeek, dow)
matchesCronPart(minute, candidate.getMinutes()) &&
matchesCronPart(hour, candidate.getHours()) &&
matchesCronPart(dayOfMonth, candidate.getDate()) &&
matchesCronPart(month, candidate.getMonth() + 1) &&
matchesCronPart(dayOfWeek, candidate.getDay())
) {
return candidate.toISOString();
}
candidate.setMinutes(candidate.getMinutes() + 1);
}
return null;
}
export function schedule(taskId, cronExpr, callback, persist = true) {
if (schedules.has(taskId)) {
unschedule(taskId, false);
}
function nextCronDate(cronExpr) {
const now = Date.now();
const cached = cronDateCache.get(cronExpr);
if (cached && now - cached.at < CRON_CACHE_TTL) return cached.val;
const val = computeNextCronDate(cronExpr);
cronDateCache.set(cronExpr, { val, at: now });
return val;
}
if (!cron.validate(cronExpr)) {
throw new Error(`Expressão cron inválida: ${cronExpr}`);
}
export function schedule(taskId, cronExpr, callback, persist = true) {
if (schedules.has(taskId)) unschedule(taskId, false);
if (!cron.validate(cronExpr)) throw new Error(`Expressão cron inválida: ${cronExpr}`);
const task = cron.schedule(
cronExpr,
@@ -74,47 +72,31 @@ export function schedule(taskId, cronExpr, callback, persist = true) {
const firedAt = new Date().toISOString();
addToHistory({ taskId, cronExpr, firedAt });
emitter.emit('scheduled-task', { taskId, firedAt });
cronDateCache.delete(cronExpr);
if (callback) callback({ taskId, firedAt });
},
{ scheduled: true }
);
schedules.set(taskId, {
taskId,
cronExpr,
task,
active: true,
createdAt: new Date().toISOString(),
});
schedules.set(taskId, { taskId, cronExpr, task, active: true, createdAt: new Date().toISOString() });
return { taskId, cronExpr };
}
export function unschedule(taskId, persist = true) {
const entry = schedules.get(taskId);
if (!entry) return false;
entry.task.stop();
schedules.delete(taskId);
if (persist) {
schedulesStore.delete(taskId);
}
if (persist) schedulesStore.delete(taskId);
return true;
}
export function updateSchedule(taskId, cronExpr, callback) {
const entry = schedules.get(taskId);
if (!entry) return false;
entry.task.stop();
schedules.delete(taskId);
if (!cron.validate(cronExpr)) {
throw new Error(`Expressão cron inválida: ${cronExpr}`);
}
if (!cron.validate(cronExpr)) throw new Error(`Expressão cron inválida: ${cronExpr}`);
schedule(taskId, cronExpr, callback, false);
return true;
}
@@ -122,32 +104,23 @@ export function updateSchedule(taskId, cronExpr, callback) {
export function setActive(taskId, active) {
const entry = schedules.get(taskId);
if (!entry) return false;
if (active) {
entry.task.start();
} else {
entry.task.stop();
}
active ? entry.task.start() : entry.task.stop();
entry.active = active;
return true;
}
export function getSchedules() {
const stored = schedulesStore.getAll();
const result = [];
for (const s of stored) {
return stored.map((s) => {
const inMemory = schedules.get(s.id);
result.push({
const cronExpr = s.cronExpression || s.cronExpr || '';
return {
...s,
cronExpr: s.cronExpression || s.cronExpr,
cronExpr,
active: inMemory ? inMemory.active : false,
nextRun: nextCronDate(s.cronExpression || s.cronExpr || ''),
});
}
return result;
nextRun: nextCronDate(cronExpr),
};
});
}
export function getHistory() {
@@ -161,20 +134,15 @@ export function restoreSchedules(executeFn) {
for (const s of stored) {
if (!s.active) continue;
const cronExpr = s.cronExpression || s.cronExpr;
try {
schedule(s.id, cronExpr, () => {
executeFn(s.agentId, s.taskDescription);
}, false);
schedule(s.id, cronExpr, () => executeFn(s.agentId, s.taskDescription), false);
restored++;
} catch (err) {
console.log(`[scheduler] Falha ao restaurar agendamento ${s.id}: ${err.message}`);
console.log(`[scheduler] Falha ao restaurar ${s.id}: ${err.message}`);
}
}
if (restored > 0) {
console.log(`[scheduler] ${restored} agendamento(s) restaurado(s)`);
}
if (restored > 0) console.log(`[scheduler] ${restored} agendamento(s) restaurado(s)`);
}
export function on(event, listener) {

153
src/cache/index.js vendored Normal file
View File

@@ -0,0 +1,153 @@
class MemoryCache {
#entries = new Map();
#timer;
constructor(cleanupIntervalMs = 5 * 60 * 1000) {
this.#timer = setInterval(() => this.#evict(), cleanupIntervalMs);
this.#timer.unref();
}
#evict() {
const now = Date.now();
for (const [key, entry] of this.#entries) {
if (entry.exp > 0 && now > entry.exp) {
this.#entries.delete(key);
}
}
}
get(key) {
const entry = this.#entries.get(key);
if (!entry) return undefined;
if (entry.exp > 0 && Date.now() > entry.exp) {
this.#entries.delete(key);
return undefined;
}
return entry.val;
}
set(key, value, ttlMs = 0) {
this.#entries.set(key, { val: value, exp: ttlMs > 0 ? Date.now() + ttlMs : 0 });
return this;
}
del(key) {
return this.#entries.delete(key);
}
has(key) {
return this.get(key) !== undefined;
}
invalidatePrefix(prefix) {
let n = 0;
for (const key of this.#entries.keys()) {
if (key.startsWith(prefix)) {
this.#entries.delete(key);
n++;
}
}
return n;
}
flush() {
this.#entries.clear();
}
get size() {
return this.#entries.size;
}
destroy() {
clearInterval(this.#timer);
this.#entries.clear();
}
}
async function tryRedis(url) {
try {
const { default: Redis } = await import('ioredis');
const client = new Redis(url, {
lazyConnect: true,
connectTimeout: 3000,
maxRetriesPerRequest: 1,
enableOfflineQueue: false,
});
await client.ping();
console.log('[cache] Redis conectado');
return client;
} catch (err) {
console.log('[cache] Redis indisponível, usando memória:', err.message);
return null;
}
}
export const mem = new MemoryCache();
let redisClient = null;
if (process.env.REDIS_URL) {
tryRedis(process.env.REDIS_URL).then((c) => {
redisClient = c;
});
}
function redisGet(key) {
if (!redisClient) return Promise.resolve(undefined);
return redisClient
.get(key)
.then((raw) => (raw != null ? JSON.parse(raw) : undefined))
.catch(() => undefined);
}
function redisSet(key, value, ttlMs) {
if (!redisClient) return;
const s = JSON.stringify(value);
const ttlSec = Math.ceil(ttlMs / 1000);
(ttlSec > 0 ? redisClient.setex(key, ttlSec, s) : redisClient.set(key, s)).catch(() => {});
}
function redisDel(key) {
if (!redisClient) return;
redisClient.del(key).catch(() => {});
}
export function cached(key, ttlMs, computeFn) {
const hit = mem.get(key);
if (hit !== undefined) return hit;
const value = computeFn();
mem.set(key, value, ttlMs);
redisSet(key, value, ttlMs);
return value;
}
export async function cachedAsync(key, ttlMs, computeFn) {
const hit = mem.get(key);
if (hit !== undefined) return hit;
const l2 = await redisGet(key);
if (l2 !== undefined) {
mem.set(key, l2, ttlMs);
return l2;
}
const value = await computeFn();
mem.set(key, value, ttlMs);
redisSet(key, value, ttlMs);
return value;
}
export function invalidate(key) {
mem.del(key);
redisDel(key);
}
export function invalidatePrefix(prefix) {
mem.invalidatePrefix(prefix);
if (redisClient) {
redisClient
.keys(`${prefix}*`)
.then((keys) => {
if (keys.length > 0) redisClient.del(...keys);
})
.catch(() => {});
}
}

View File

@@ -2,10 +2,12 @@ import { Router } from 'express';
import { execSync } from 'child_process';
import os from 'os';
import * as manager from '../agents/manager.js';
import { tasksStore, settingsStore } from '../store/db.js';
import { tasksStore, settingsStore, executionsStore } from '../store/db.js';
import * as scheduler from '../agents/scheduler.js';
import * as pipeline from '../agents/pipeline.js';
import { getBinPath } from '../agents/executor.js';
import { getBinPath, updateMaxConcurrent } from '../agents/executor.js';
import { invalidateAgentMapCache } from '../agents/pipeline.js';
import { cached } from '../cache/index.js';
const router = Router();
@@ -21,11 +23,8 @@ export function setWsBroadcastTo(fn) {
}
function wsCallback(message, clientId) {
if (clientId && wsBroadcastTo) {
wsBroadcastTo(clientId, message);
} else if (wsbroadcast) {
wsbroadcast(message);
}
if (clientId && wsBroadcastTo) wsBroadcastTo(clientId, message);
else if (wsbroadcast) wsbroadcast(message);
}
router.get('/settings', (req, res) => {
@@ -45,6 +44,7 @@ router.put('/settings', (req, res) => {
}
if (data.maxConcurrent !== undefined) {
data.maxConcurrent = Math.max(1, Math.min(20, parseInt(data.maxConcurrent) || 5));
updateMaxConcurrent(data.maxConcurrent);
}
const saved = settingsStore.save(data);
res.json(saved);
@@ -74,6 +74,7 @@ router.get('/agents/:id', (req, res) => {
router.post('/agents', (req, res) => {
try {
const agent = manager.createAgent(req.body);
invalidateAgentMapCache();
res.status(201).json(agent);
} catch (err) {
res.status(400).json({ error: err.message });
@@ -83,6 +84,7 @@ router.post('/agents', (req, res) => {
router.post('/agents/import', (req, res) => {
try {
const agent = manager.importAgent(req.body);
invalidateAgentMapCache();
res.status(201).json(agent);
} catch (err) {
res.status(400).json({ error: err.message });
@@ -93,6 +95,7 @@ router.put('/agents/:id', (req, res) => {
try {
const agent = manager.updateAgent(req.params.id, req.body);
if (!agent) return res.status(404).json({ error: 'Agente não encontrado' });
invalidateAgentMapCache();
res.json(agent);
} catch (err) {
res.status(400).json({ error: err.message });
@@ -103,6 +106,7 @@ router.delete('/agents/:id', (req, res) => {
try {
const deleted = manager.deleteAgent(req.params.id);
if (!deleted) return res.status(404).json({ error: 'Agente não encontrado' });
invalidateAgentMapCache();
res.status(204).send();
} catch (err) {
res.status(500).json({ error: err.message });
@@ -113,12 +117,8 @@ router.post('/agents/:id/execute', (req, res) => {
try {
const { task, instructions } = req.body;
if (!task) return res.status(400).json({ error: 'task é obrigatório' });
const clientId = req.headers['x-client-id'] || null;
const executionId = manager.executeTask(
req.params.id, task, instructions,
(msg) => wsCallback(msg, clientId)
);
const executionId = manager.executeTask(req.params.id, task, instructions, (msg) => wsCallback(msg, clientId));
res.status(202).json({ executionId, status: 'started' });
} catch (err) {
const status = err.message.includes('não encontrado') ? 404 : 400;
@@ -157,8 +157,7 @@ router.get('/tasks', (req, res) => {
router.post('/tasks', (req, res) => {
try {
if (!req.body.name) return res.status(400).json({ error: 'name é obrigatório' });
const task = tasksStore.create(req.body);
res.status(201).json(task);
res.status(201).json(tasksStore.create(req.body));
} catch (err) {
res.status(400).json({ error: err.message });
}
@@ -257,8 +256,7 @@ router.get('/pipelines/:id', (req, res) => {
router.post('/pipelines', (req, res) => {
try {
const created = pipeline.createPipeline(req.body);
res.status(201).json(created);
res.status(201).json(pipeline.createPipeline(req.body));
} catch (err) {
res.status(400).json({ error: err.message });
}
@@ -288,7 +286,6 @@ router.post('/pipelines/:id/execute', (req, res) => {
try {
const { input } = req.body;
if (!input) return res.status(400).json({ error: 'input é obrigatório' });
const clientId = req.headers['x-client-id'] || null;
pipeline.executePipeline(req.params.id, input, (msg) => wsCallback(msg, clientId)).catch(() => {});
res.status(202).json({ pipelineId: req.params.id, status: 'started' });
@@ -308,54 +305,121 @@ router.post('/pipelines/:id/cancel', (req, res) => {
}
});
const SYSTEM_STATUS_TTL = 5_000;
router.get('/system/status', (req, res) => {
try {
const agents = manager.getAllAgents();
const activeExecutions = manager.getActiveExecutions();
const schedules = scheduler.getSchedules();
const pipelines = pipeline.getAllPipelines();
const activePipelines = pipeline.getActivePipelines();
const status = cached('system:status', SYSTEM_STATUS_TTL, () => {
const agents = manager.getAllAgents();
const activeExecutions = manager.getActiveExecutions();
const schedules = scheduler.getSchedules();
const pipelines = pipeline.getAllPipelines();
const activePipelines = pipeline.getActivePipelines();
return {
agents: {
total: agents.length,
active: agents.filter((a) => a.status === 'active').length,
inactive: agents.filter((a) => a.status === 'inactive').length,
},
executions: {
active: activeExecutions.length,
today: manager.getDailyExecutionCount(),
list: activeExecutions,
},
schedules: {
total: schedules.length,
active: schedules.filter((s) => s.active).length,
},
pipelines: {
total: pipelines.length,
active: pipelines.filter((p) => p.status === 'active').length,
running: activePipelines.length,
},
};
});
res.json(status);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
let claudeVersionCache = null;
router.get('/system/info', (req, res) => {
try {
if (claudeVersionCache === null) {
try {
claudeVersionCache = execSync(`${getBinPath()} --version`, { timeout: 5000 }).toString().trim();
} catch {
claudeVersionCache = 'N/A';
}
}
res.json({
agents: {
total: agents.length,
active: agents.filter((a) => a.status === 'active').length,
inactive: agents.filter((a) => a.status === 'inactive').length,
},
executions: {
active: activeExecutions.length,
today: manager.getDailyExecutionCount(),
list: activeExecutions,
},
schedules: {
total: schedules.length,
active: schedules.filter((s) => s.active).length,
},
pipelines: {
total: pipelines.length,
active: pipelines.filter((p) => p.status === 'active').length,
running: activePipelines.length,
},
serverVersion: '1.0.0',
nodeVersion: process.version,
claudeVersion: claudeVersionCache,
platform: `${os.platform()} ${os.arch()}`,
uptime: Math.floor(process.uptime()),
});
} catch (err) {
res.status(500).json({ error: err.message });
}
});
router.get('/system/info', (req, res) => {
router.get('/executions/history', (req, res) => {
try {
let claudeVersion = 'N/A';
try {
claudeVersion = execSync(`${getBinPath()} --version`, { timeout: 5000 }).toString().trim();
} catch {}
const limit = parseInt(req.query.limit) || 50;
const offset = parseInt(req.query.offset) || 0;
const typeFilter = req.query.type || '';
const statusFilter = req.query.status || '';
const search = (req.query.search || '').toLowerCase();
res.json({
serverVersion: '1.0.0',
nodeVersion: process.version,
claudeVersion,
platform: `${os.platform()} ${os.arch()}`,
uptime: Math.floor(process.uptime()),
});
let items = executionsStore.getAll();
if (typeFilter) items = items.filter((e) => e.type === typeFilter);
if (statusFilter) items = items.filter((e) => e.status === statusFilter);
if (search) {
items = items.filter((e) => {
const name = (e.agentName || e.pipelineName || '').toLowerCase();
const task = (e.task || e.input || '').toLowerCase();
return name.includes(search) || task.includes(search);
});
}
items.sort((a, b) => new Date(b.startedAt) - new Date(a.startedAt));
const total = items.length;
const paged = items.slice(offset, offset + limit);
res.json({ items: paged, total });
} catch (err) {
res.status(500).json({ error: err.message });
}
});
router.get('/executions/history/:id', (req, res) => {
try {
const exec = executionsStore.getById(req.params.id);
if (!exec) return res.status(404).json({ error: 'Execução não encontrada' });
res.json(exec);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
router.delete('/executions/history/:id', (req, res) => {
try {
const deleted = executionsStore.delete(req.params.id);
if (!deleted) return res.status(404).json({ error: 'Execução não encontrada' });
res.status(204).send();
} catch (err) {
res.status(500).json({ error: err.message });
}
});
router.delete('/executions/history', (req, res) => {
try {
executionsStore.save([]);
res.status(204).send();
} catch (err) {
res.status(500).json({ error: err.message });
}

View File

@@ -1,15 +1,10 @@
import { readFileSync, writeFileSync, existsSync, mkdirSync, statSync } from 'fs';
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import { v4 as uuidv4 } from 'uuid';
const __dirname = dirname(fileURLToPath(import.meta.url));
const DATA_DIR = `${__dirname}/../../data`;
const AGENTS_FILE = `${DATA_DIR}/agents.json`;
const TASKS_FILE = `${DATA_DIR}/tasks.json`;
const PIPELINES_FILE = `${DATA_DIR}/pipelines.json`;
const SCHEDULES_FILE = `${DATA_DIR}/schedules.json`;
const SETTINGS_FILE = `${DATA_DIR}/settings.json`;
const DEFAULT_SETTINGS = {
defaultModel: 'claude-sonnet-4-6',
@@ -17,124 +12,177 @@ const DEFAULT_SETTINGS = {
maxConcurrent: 5,
};
const writeLocks = new Map();
const fileCache = new Map();
const DEBOUNCE_MS = 300;
const allStores = [];
function ensureDataDir() {
if (!existsSync(DATA_DIR)) {
mkdirSync(DATA_DIR, { recursive: true });
}
function ensureDir() {
if (!existsSync(DATA_DIR)) mkdirSync(DATA_DIR, { recursive: true });
}
function getCacheMtime(filePath) {
const cached = fileCache.get(filePath);
if (!cached) return null;
return cached.mtime;
}
function loadFile(filePath, defaultValue = []) {
ensureDataDir();
if (!existsSync(filePath)) {
writeFileSync(filePath, JSON.stringify(defaultValue, null, 2), 'utf8');
fileCache.set(filePath, { data: defaultValue, mtime: Date.now() });
return JSON.parse(JSON.stringify(defaultValue));
}
function readJson(path, fallback) {
try {
const stat = statSync(filePath);
const mtime = stat.mtimeMs;
const cached = fileCache.get(filePath);
if (cached && cached.mtime === mtime) {
return JSON.parse(JSON.stringify(cached.data));
}
const data = JSON.parse(readFileSync(filePath, 'utf8'));
fileCache.set(filePath, { data, mtime });
return JSON.parse(JSON.stringify(data));
if (!existsSync(path)) return fallback;
return JSON.parse(readFileSync(path, 'utf8'));
} catch {
return JSON.parse(JSON.stringify(defaultValue));
return fallback;
}
}
function saveFile(filePath, data) {
ensureDataDir();
const prev = writeLocks.get(filePath) || Promise.resolve();
const next = prev.then(() => {
writeFileSync(filePath, JSON.stringify(data, null, 2), 'utf8');
const stat = statSync(filePath);
fileCache.set(filePath, { data: JSON.parse(JSON.stringify(data)), mtime: stat.mtimeMs });
}).catch(() => {});
writeLocks.set(filePath, next);
return next;
function writeJson(path, data) {
ensureDir();
writeFileSync(path, JSON.stringify(data, null, 2), 'utf8');
}
function clone(v) {
return structuredClone(v);
}
function createStore(filePath) {
return {
load: () => loadFile(filePath),
let mem = null;
let dirty = false;
let timer = null;
save: (data) => saveFile(filePath, data),
function boot() {
if (mem !== null) return;
ensureDir();
mem = readJson(filePath, []);
}
getAll: () => loadFile(filePath),
function touch() {
dirty = true;
if (timer) return;
timer = setTimeout(() => {
timer = null;
if (dirty) {
writeJson(filePath, mem);
dirty = false;
}
}, DEBOUNCE_MS);
}
getById: (id) => {
const items = loadFile(filePath);
return items.find((item) => item.id === id) || null;
const store = {
getAll() {
boot();
return clone(mem);
},
create: (data) => {
const items = loadFile(filePath);
const newItem = {
getById(id) {
boot();
const item = mem.find((i) => i.id === id);
return item ? clone(item) : null;
},
create(data) {
boot();
const item = {
id: uuidv4(),
...data,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
items.push(newItem);
saveFile(filePath, items);
return newItem;
mem.push(item);
touch();
return clone(item);
},
update: (id, data) => {
const items = loadFile(filePath);
const index = items.findIndex((item) => item.id === id);
if (index === -1) return null;
items[index] = {
...items[index],
...data,
id,
updated_at: new Date().toISOString(),
};
saveFile(filePath, items);
return items[index];
update(id, data) {
boot();
const i = mem.findIndex((x) => x.id === id);
if (i === -1) return null;
mem[i] = { ...mem[i], ...data, id, updated_at: new Date().toISOString() };
touch();
return clone(mem[i]);
},
delete: (id) => {
const items = loadFile(filePath);
const index = items.findIndex((item) => item.id === id);
if (index === -1) return false;
items.splice(index, 1);
saveFile(filePath, items);
delete(id) {
boot();
const i = mem.findIndex((x) => x.id === id);
if (i === -1) return false;
mem.splice(i, 1);
touch();
return true;
},
save(items) {
mem = Array.isArray(items) ? items : mem;
touch();
},
flush() {
if (timer) {
clearTimeout(timer);
timer = null;
}
if (mem !== null && dirty) {
writeJson(filePath, mem);
dirty = false;
}
},
};
allStores.push(store);
return store;
}
function createSettingsStore(filePath) {
return {
get: () => loadFile(filePath, DEFAULT_SETTINGS),
let mem = null;
let dirty = false;
let timer = null;
save: (data) => {
const current = loadFile(filePath, DEFAULT_SETTINGS);
const merged = { ...current, ...data };
saveFile(filePath, merged);
return merged;
function boot() {
if (mem !== null) return;
ensureDir();
mem = { ...DEFAULT_SETTINGS, ...readJson(filePath, DEFAULT_SETTINGS) };
}
function touch() {
dirty = true;
if (timer) return;
timer = setTimeout(() => {
timer = null;
if (dirty) {
writeJson(filePath, mem);
dirty = false;
}
}, DEBOUNCE_MS);
}
const store = {
get() {
boot();
return clone(mem);
},
save(data) {
boot();
mem = { ...mem, ...data };
touch();
return clone(mem);
},
flush() {
if (timer) {
clearTimeout(timer);
timer = null;
}
if (mem !== null && dirty) {
writeJson(filePath, mem);
dirty = false;
}
},
};
allStores.push(store);
return store;
}
export const agentsStore = createStore(AGENTS_FILE);
export const tasksStore = createStore(TASKS_FILE);
export const pipelinesStore = createStore(PIPELINES_FILE);
export const schedulesStore = createStore(SCHEDULES_FILE);
export const settingsStore = createSettingsStore(SETTINGS_FILE);
export function flushAllStores() {
for (const s of allStores) s.flush();
}
export const agentsStore = createStore(`${DATA_DIR}/agents.json`);
export const tasksStore = createStore(`${DATA_DIR}/tasks.json`);
export const pipelinesStore = createStore(`${DATA_DIR}/pipelines.json`);
export const schedulesStore = createStore(`${DATA_DIR}/schedules.json`);
export const executionsStore = createStore(`${DATA_DIR}/executions.json`);
export const settingsStore = createSettingsStore(`${DATA_DIR}/settings.json`);