run jobs sequentially per target, parallel across local + nodes
All checks were successful
Build and Push Docker Image / build (push) Successful in 19s

"run all" now groups pending jobs by target (local or node), runs them one by
one within each group, but runs different targets in parallel. single "run"
button still fires immediately.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-04 17:44:42 +01:00
parent aaaa5ff402
commit 41476a2f9b

View File

@@ -9,6 +9,32 @@ import { log, error as logError } from '../lib/log';
const app = new Hono();
// ─── Sequential queue per target ─────────────────────────────────────────────
const runningTargets = new Set<string>();
function targetKey(nodeId: number | null): string {
return nodeId ? `node-${nodeId}` : 'local';
}
/** Run a list of jobs sequentially on the same target. */
async function runSequential(jobs: Job[]): Promise<void> {
const key = targetKey(jobs[0]?.node_id ?? null);
if (runningTargets.has(key)) return; // already processing this target
runningTargets.add(key);
try {
for (const job of jobs) {
// Re-check status — job may have been cancelled while queued
const db = getDb();
const fresh = db.prepare('SELECT status FROM jobs WHERE id = ?').get(job.id) as { status: string } | undefined;
if (!fresh || fresh.status !== 'pending') continue;
try { await runJob(job); } catch (err) { logError(`Job ${job.id} failed:`, err); }
}
} finally {
runningTargets.delete(key);
}
}
// ─── SSE state ────────────────────────────────────────────────────────────────
const jobListeners = new Set<(data: string) => void>();
@@ -74,7 +100,16 @@ app.get('/', (c) => {
app.post('/start', (c) => {
const db = getDb();
const pending = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
for (const job of pending) runJob(job).catch((err) => logError(`Job ${job.id} failed:`, err));
// Group by target (local vs each node) — run sequentially within each group, parallel across groups
const groups = new Map<string, Job[]>();
for (const job of pending) {
const key = targetKey(job.node_id);
if (!groups.has(key)) groups.set(key, []);
groups.get(key)!.push(job);
}
for (const jobs of groups.values()) {
runSequential(jobs).catch((err) => logError('Queue failed:', err));
}
return c.json({ ok: true, started: pending.length });
});