feat: añadir limpieza/retención de response_queue (borrado duro)
Co-authored-by: aider (openrouter/openai/gpt-5) <aider@aider.chat>pull/1/head
parent
a5daba241b
commit
3f9280eb1a
@ -0,0 +1,124 @@
|
||||
import { describe, test, expect, beforeAll, afterAll, beforeEach } from 'bun:test';
|
||||
import { Database } from 'bun:sqlite';
|
||||
import { initializeDatabase } from '../../../src/db';
|
||||
import { ResponseQueue } from '../../../src/services/response-queue';
|
||||
|
||||
let testDb: Database;
|
||||
let originalDbInstance: Database;
|
||||
|
||||
function toIso(dt: Date): string {
|
||||
return dt.toISOString().replace('T', ' ').replace('Z', '');
|
||||
}
|
||||
|
||||
describe('ResponseQueue cleanup/retention', () => {
|
||||
beforeAll(() => {
|
||||
testDb = new Database(':memory:');
|
||||
initializeDatabase(testDb);
|
||||
originalDbInstance = (ResponseQueue as any).dbInstance;
|
||||
(ResponseQueue as any).dbInstance = testDb;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
(ResponseQueue as any).dbInstance = originalDbInstance;
|
||||
testDb.close();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
testDb.exec('DELETE FROM response_queue');
|
||||
});
|
||||
|
||||
test('does not delete queued or processing items regardless of age', async () => {
|
||||
const old = toIso(new Date(2000, 0, 1));
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`).run('u1','m1','queued', old);
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`).run('u2','m2','processing', old);
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status) VALUES (?,?,?)`).run('u3','m3','queued');
|
||||
|
||||
const res = await (ResponseQueue as any).runCleanupOnce(new Date());
|
||||
expect(res.totalDeleted).toBe(0);
|
||||
|
||||
const counts = testDb.query(`SELECT status, COUNT(*) as c FROM response_queue GROUP BY status ORDER BY status`).all() as any[];
|
||||
const map = Object.fromEntries(counts.map(r => [r.status, r.c]));
|
||||
expect(map['queued']).toBe(2);
|
||||
expect(map['processing']).toBe(1);
|
||||
});
|
||||
|
||||
test('deletes sent older than 14 days but keeps recent', async () => {
|
||||
const now = new Date();
|
||||
const days14Ago = new Date(now.getTime() - 14 * 24 * 60 * 60 * 1000);
|
||||
const days13Ago = new Date(now.getTime() - 13 * 24 * 60 * 60 * 1000);
|
||||
const thresholdExact = toIso(days14Ago); // exact boundary
|
||||
|
||||
// exactly at threshold (should NOT delete because comparison is strict <)
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`)
|
||||
.run('u1','m1','sent', thresholdExact);
|
||||
// older than threshold
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`)
|
||||
.run('u2','m2','sent', toIso(new Date(days14Ago.getTime() - 1000)));
|
||||
// newer than threshold
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`)
|
||||
.run('u3','m3','sent', toIso(days13Ago));
|
||||
|
||||
const res = await (ResponseQueue as any).runCleanupOnce(now);
|
||||
expect(res.deletedSent).toBe(1);
|
||||
|
||||
const rows = testDb.query(`SELECT status, updated_at FROM response_queue WHERE status='sent' ORDER BY updated_at`).all() as any[];
|
||||
expect(rows.length).toBe(2);
|
||||
expect(rows[0].updated_at).toBe(thresholdExact);
|
||||
expect(rows[1].updated_at).toBe(toIso(days13Ago));
|
||||
});
|
||||
|
||||
test('deletes failed older than 30 days but keeps newer', async () => {
|
||||
const now = new Date();
|
||||
const days30Ago = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
|
||||
const days29Ago = new Date(now.getTime() - 29 * 24 * 60 * 60 * 1000);
|
||||
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`)
|
||||
.run('u1','m1','failed', toIso(new Date(days30Ago.getTime() - 1000)));
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`)
|
||||
.run('u2','m2','failed', toIso(days29Ago));
|
||||
|
||||
const res = await (ResponseQueue as any).runCleanupOnce(now);
|
||||
expect(res.deletedFailed).toBe(1);
|
||||
|
||||
const rows = testDb.query(`SELECT COUNT(*) as c FROM response_queue WHERE status='failed'`).get() as any;
|
||||
expect(rows.c).toBe(1);
|
||||
});
|
||||
|
||||
test('batch deletes large sets in multiple passes', async () => {
|
||||
(ResponseQueue as any).CLEANUP_BATCH = 500; // reduce for test
|
||||
const old = toIso(new Date(2000, 0, 1));
|
||||
const total = 1200;
|
||||
|
||||
const insert = testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`);
|
||||
testDb.transaction(() => {
|
||||
for (let i = 0; i < total; i++) {
|
||||
insert.run(`u${i}`, `m${i}`, 'sent', old);
|
||||
}
|
||||
})();
|
||||
|
||||
const res = await (ResponseQueue as any).runCleanupOnce(new Date());
|
||||
expect(res.deletedSent).toBe(total);
|
||||
const count = testDb.query(`SELECT COUNT(*) as c FROM response_queue WHERE status='sent'`).get() as any;
|
||||
expect(count.c).toBe(0);
|
||||
});
|
||||
|
||||
test('concurrent cleanup calls do not overlap', async () => {
|
||||
const old = toIso(new Date(2000, 0, 1));
|
||||
for (let i = 0; i < 50; i++) {
|
||||
testDb.prepare(`INSERT INTO response_queue (recipient, message, status, updated_at) VALUES (?,?,?,?)`)
|
||||
.run(`u${i}`, `m${i}`, 'sent', old);
|
||||
}
|
||||
|
||||
// Trigger two cleanups concurrently
|
||||
const [r1, r2] = await Promise.all([
|
||||
(ResponseQueue as any).runCleanupOnce(new Date()),
|
||||
(ResponseQueue as any).runCleanupOnce(new Date()),
|
||||
]);
|
||||
|
||||
const total = (r1.totalDeleted || 0) + (r2.totalDeleted || 0);
|
||||
expect(total).toBe(50); // no double-deletes
|
||||
|
||||
const remain = testDb.query(`SELECT COUNT(*) as c FROM response_queue`).get() as any;
|
||||
expect(remain.c).toBe(0);
|
||||
});
|
||||
});
|
||||
Loading…
Reference in New Issue