refactor progress
This commit is contained in:
368
packages/queue/src/__tests__/CronScheduler.test.tsx
Normal file
368
packages/queue/src/__tests__/CronScheduler.test.tsx
Normal file
@@ -0,0 +1,368 @@
|
||||
/*
|
||||
* Copyright (C) 2026 Fluxer Contributors
|
||||
*
|
||||
* This file is part of Fluxer.
|
||||
*
|
||||
* Fluxer is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Fluxer is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs/promises';
|
||||
import type {LoggerFactory} from '@fluxer/logger/src/LoggerInterface';
|
||||
import {createMockLogger} from '@fluxer/logger/src/mock';
|
||||
import {CronScheduler, type QueueEngineClient} from '@fluxer/queue/src/cron/CronScheduler';
|
||||
import type {JsonValue} from '@fluxer/queue/src/types/JsonTypes';
|
||||
import type {QueueConfig} from '@fluxer/queue/src/types/QueueConfig';
|
||||
import {afterEach, beforeEach, describe, expect, it, vi} from 'vitest';
|
||||
|
||||
const testRoot = `/tmp/fluxer-cron-scheduler-test-${Date.now()}`;
|
||||
|
||||
interface EnqueueCall {
|
||||
taskType: string;
|
||||
payload: JsonValue;
|
||||
priority?: number;
|
||||
runAtMs?: number | null;
|
||||
maxAttempts?: number;
|
||||
deduplicationId?: string | null;
|
||||
}
|
||||
|
||||
function createTestConfig(overrides: Partial<QueueConfig> = {}): QueueConfig {
|
||||
return {
|
||||
dataDir: testRoot,
|
||||
snapshotEveryMs: 60000,
|
||||
snapshotAfterOps: 100000,
|
||||
snapshotZstdLevel: 3,
|
||||
defaultVisibilityTimeoutMs: 30000,
|
||||
visibilityTimeoutBackoffMs: 10000,
|
||||
maxReceiveBatch: 100,
|
||||
commandBuffer: 8192,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createLoggerFactory(): LoggerFactory {
|
||||
const mockLogger = createMockLogger();
|
||||
return () => mockLogger;
|
||||
}
|
||||
|
||||
function createMockQueueEngine(): QueueEngineClient & {calls: Array<EnqueueCall>} {
|
||||
const calls: Array<EnqueueCall> = [];
|
||||
let jobIdCounter = 0;
|
||||
|
||||
return {
|
||||
calls,
|
||||
async enqueue(taskType, payload, priority, runAtMs, maxAttempts, deduplicationId) {
|
||||
calls.push({taskType, payload, priority, runAtMs, maxAttempts, deduplicationId});
|
||||
return {job: {id: `job-${++jobIdCounter}`}, enqueued: true};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe('CronScheduler', () => {
|
||||
let scheduler: CronScheduler;
|
||||
let config: QueueConfig;
|
||||
let mockQueueEngine: QueueEngineClient & {calls: Array<EnqueueCall>};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
await fs.mkdir(testRoot, {recursive: true});
|
||||
config = createTestConfig();
|
||||
mockQueueEngine = createMockQueueEngine();
|
||||
scheduler = new CronScheduler(config, mockQueueEngine, createLoggerFactory());
|
||||
await scheduler.start();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.useRealTimers();
|
||||
await scheduler.stop();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
});
|
||||
|
||||
describe('upsert', () => {
|
||||
it('should create a new schedule', async () => {
|
||||
const schedule = await scheduler.upsert('test-cron', 'test-task', {key: 'value'}, '* * * * *', true);
|
||||
|
||||
expect(schedule.id).toBe('test-cron');
|
||||
expect(schedule.taskType).toBe('test-task');
|
||||
expect(schedule.cronExpression).toBe('* * * * *');
|
||||
expect(schedule.enabled).toBe(true);
|
||||
expect(schedule.nextRunMs).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should update an existing schedule', async () => {
|
||||
await scheduler.upsert('test-cron', 'task-v1', {}, '* * * * *', true);
|
||||
const updated = await scheduler.upsert('test-cron', 'task-v2', {updated: true}, '0 * * * *', true);
|
||||
|
||||
expect(updated.taskType).toBe('task-v2');
|
||||
expect(updated.cronExpression).toBe('0 * * * *');
|
||||
|
||||
const list = scheduler.list();
|
||||
expect(list).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should preserve lastRunMs when updating', async () => {
|
||||
await scheduler.upsert('test-cron', 'test-task', {}, '* * * * *', true);
|
||||
|
||||
vi.advanceTimersByTime(120000);
|
||||
|
||||
const before = scheduler.get('test-cron');
|
||||
const lastRunMs = before?.lastRunMs;
|
||||
|
||||
const updated = await scheduler.upsert('test-cron', 'test-task', {updated: true}, '* * * * *', true);
|
||||
|
||||
expect(updated.lastRunMs).toBe(lastRunMs);
|
||||
});
|
||||
|
||||
it('should not rewrite an unchanged schedule', async () => {
|
||||
const original = await scheduler.upsert('test-cron', 'test-task', {unchanged: true}, '* * * * *', true);
|
||||
const originalUpdatedAtMs = original.updatedAtMs;
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
const unchanged = await scheduler.upsert('test-cron', 'test-task', {unchanged: true}, '* * * * *', true);
|
||||
|
||||
expect(unchanged).toBe(original);
|
||||
expect(unchanged.updatedAtMs).toBe(originalUpdatedAtMs);
|
||||
expect(scheduler.list()).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should throw error for invalid cron expression', async () => {
|
||||
await expect(scheduler.upsert('test-cron', 'test-task', {}, 'invalid cron', true)).rejects.toThrow(
|
||||
'Invalid cron expression',
|
||||
);
|
||||
});
|
||||
|
||||
it('should set nextRunMs to null for disabled schedules', async () => {
|
||||
const schedule = await scheduler.upsert('test-cron', 'test-task', {}, '* * * * *', false);
|
||||
|
||||
expect(schedule.enabled).toBe(false);
|
||||
expect(schedule.nextRunMs).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete an existing schedule', async () => {
|
||||
await scheduler.upsert('test-cron', 'test-task', {}, '* * * * *', true);
|
||||
|
||||
const result = await scheduler.delete('test-cron');
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(scheduler.get('test-cron')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return false when deleting non-existent schedule', async () => {
|
||||
const result = await scheduler.delete('non-existent');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should return a schedule by id', async () => {
|
||||
await scheduler.upsert('test-cron', 'test-task', {}, '* * * * *', true);
|
||||
|
||||
const schedule = scheduler.get('test-cron');
|
||||
|
||||
expect(schedule).not.toBeNull();
|
||||
expect(schedule?.id).toBe('test-cron');
|
||||
});
|
||||
|
||||
it('should return null for non-existent schedule', () => {
|
||||
const schedule = scheduler.get('non-existent');
|
||||
|
||||
expect(schedule).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('list', () => {
|
||||
it('should return empty array when no schedules exist', () => {
|
||||
const schedules = scheduler.list();
|
||||
|
||||
expect(schedules).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should return all schedules', async () => {
|
||||
await scheduler.upsert('cron-1', 'task-1', {}, '* * * * *', true);
|
||||
await scheduler.upsert('cron-2', 'task-2', {}, '0 * * * *', true);
|
||||
await scheduler.upsert('cron-3', 'task-3', {}, '0 0 * * *', false);
|
||||
|
||||
const schedules = scheduler.list();
|
||||
|
||||
expect(schedules).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStats', () => {
|
||||
it('should return correct counts', async () => {
|
||||
await scheduler.upsert('cron-1', 'task-1', {}, '0 0 1 1 *', true);
|
||||
await scheduler.upsert('cron-2', 'task-2', {}, '0 0 2 1 *', true);
|
||||
await scheduler.upsert('cron-3', 'task-3', {}, '0 0 3 1 *', false);
|
||||
await scheduler.upsert('cron-4', 'task-4', {}, '0 0 4 1 *', false);
|
||||
|
||||
const stats = scheduler.getStats();
|
||||
|
||||
expect(stats.total).toBe(4);
|
||||
expect(stats.enabled).toBe(2);
|
||||
expect(stats.disabled).toBe(2);
|
||||
});
|
||||
|
||||
it('should return zero counts for empty scheduler', () => {
|
||||
const stats = scheduler.getStats();
|
||||
|
||||
expect(stats.total).toBe(0);
|
||||
expect(stats.enabled).toBe(0);
|
||||
expect(stats.disabled).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cron execution', () => {
|
||||
it('should enqueue job when cron fires', async () => {
|
||||
await scheduler.upsert('test-cron', 'test-task', {message: 'hello'}, '* * * * *', true);
|
||||
|
||||
vi.advanceTimersByTime(120000);
|
||||
|
||||
expect(mockQueueEngine.calls.length).toBeGreaterThan(0);
|
||||
expect(mockQueueEngine.calls[0].taskType).toBe('test-task');
|
||||
});
|
||||
|
||||
it('should not enqueue job for disabled schedule', async () => {
|
||||
await scheduler.upsert('test-cron', 'test-task', {}, '* * * * *', false);
|
||||
|
||||
vi.advanceTimersByTime(120000);
|
||||
|
||||
expect(mockQueueEngine.calls).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should update lastRunMs after execution', async () => {
|
||||
await scheduler.upsert('test-cron', 'test-task', {}, '* * * * *', true);
|
||||
|
||||
const before = scheduler.get('test-cron');
|
||||
expect(before?.lastRunMs).toBeNull();
|
||||
|
||||
await vi.advanceTimersByTimeAsync(120000);
|
||||
|
||||
const after = scheduler.get('test-cron');
|
||||
expect(after?.lastRunMs).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should update nextRunMs after execution', async () => {
|
||||
await scheduler.upsert('test-cron', 'test-task', {}, '* * * * *', true);
|
||||
|
||||
const before = scheduler.get('test-cron');
|
||||
const nextRunBefore = before?.nextRunMs;
|
||||
|
||||
await vi.advanceTimersByTimeAsync(120000);
|
||||
|
||||
const after = scheduler.get('test-cron');
|
||||
expect(after?.nextRunMs).toBeGreaterThan(nextRunBefore!);
|
||||
});
|
||||
|
||||
it('should handle multiple schedules', async () => {
|
||||
await scheduler.upsert('cron-1', 'task-1', {}, '* * * * *', true);
|
||||
await scheduler.upsert('cron-2', 'task-2', {}, '* * * * *', true);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(120000);
|
||||
|
||||
const taskTypes = mockQueueEngine.calls.map((c) => c.taskType);
|
||||
expect(taskTypes).toContain('task-1');
|
||||
expect(taskTypes).toContain('task-2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('cron expressions', () => {
|
||||
it('should support standard cron expressions', async () => {
|
||||
await scheduler.upsert('minutely', 'task', {}, '* * * * *', true);
|
||||
await scheduler.upsert('hourly', 'task', {}, '0 * * * *', true);
|
||||
await scheduler.upsert('daily', 'task', {}, '0 0 * * *', true);
|
||||
await scheduler.upsert('weekly', 'task', {}, '0 0 * * 0', true);
|
||||
await scheduler.upsert('monthly', 'task', {}, '0 0 1 * *', true);
|
||||
|
||||
const schedules = scheduler.list();
|
||||
expect(schedules).toHaveLength(5);
|
||||
schedules.forEach((s) => {
|
||||
expect(s.nextRunMs).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
it('should support specific minute patterns', async () => {
|
||||
await scheduler.upsert('every-5-min', 'task', {}, '*/5 * * * *', true);
|
||||
await scheduler.upsert('at-15-30', 'task', {}, '15,30 * * * *', true);
|
||||
await scheduler.upsert('range', 'task', {}, '0-10 * * * *', true);
|
||||
|
||||
const schedules = scheduler.list();
|
||||
expect(schedules).toHaveLength(3);
|
||||
schedules.forEach((s) => {
|
||||
expect(s.nextRunMs).not.toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('CronScheduler persistence', () => {
|
||||
let config: QueueConfig;
|
||||
let mockQueueEngine: QueueEngineClient & {calls: Array<EnqueueCall>};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
await fs.mkdir(testRoot, {recursive: true});
|
||||
config = createTestConfig({snapshotEveryMs: 1000});
|
||||
mockQueueEngine = createMockQueueEngine();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.useRealTimers();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
});
|
||||
|
||||
it('should persist and restore schedules', async () => {
|
||||
const scheduler1 = new CronScheduler(config, mockQueueEngine, createLoggerFactory());
|
||||
await scheduler1.start();
|
||||
|
||||
await scheduler1.upsert('persist-test', 'test-task', {persistent: true}, '* * * * *', true);
|
||||
await scheduler1.stop();
|
||||
|
||||
const scheduler2 = new CronScheduler(config, createMockQueueEngine(), createLoggerFactory());
|
||||
await scheduler2.start();
|
||||
|
||||
const schedule = scheduler2.get('persist-test');
|
||||
expect(schedule).not.toBeNull();
|
||||
expect(schedule?.taskType).toBe('test-task');
|
||||
expect(schedule?.cronExpression).toBe('* * * * *');
|
||||
|
||||
await scheduler2.stop();
|
||||
});
|
||||
|
||||
it('should persist lastRunMs', async () => {
|
||||
const scheduler1 = new CronScheduler(config, mockQueueEngine, createLoggerFactory());
|
||||
await scheduler1.start();
|
||||
|
||||
await scheduler1.upsert('persist-test', 'test-task', {}, '* * * * *', true);
|
||||
await vi.advanceTimersByTimeAsync(120000);
|
||||
|
||||
const before = scheduler1.get('persist-test');
|
||||
const lastRunMs = before?.lastRunMs;
|
||||
expect(lastRunMs).not.toBeNull();
|
||||
|
||||
await scheduler1.stop();
|
||||
|
||||
const scheduler2 = new CronScheduler(config, createMockQueueEngine(), createLoggerFactory());
|
||||
await scheduler2.start();
|
||||
|
||||
const after = scheduler2.get('persist-test');
|
||||
expect(after?.lastRunMs).toBe(lastRunMs);
|
||||
|
||||
await scheduler2.stop();
|
||||
});
|
||||
});
|
||||
304
packages/queue/src/__tests__/DelayQueue.test.tsx
Normal file
304
packages/queue/src/__tests__/DelayQueue.test.tsx
Normal file
@@ -0,0 +1,304 @@
|
||||
/*
|
||||
* Copyright (C) 2026 Fluxer Contributors
|
||||
*
|
||||
* This file is part of Fluxer.
|
||||
*
|
||||
* Fluxer is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Fluxer is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import {DelayQueue} from '@fluxer/queue/src/engine/DelayQueue';
|
||||
import {afterEach, beforeEach, describe, expect, it, vi} from 'vitest';
|
||||
|
||||
interface TestItem {
|
||||
id: string;
|
||||
data: string;
|
||||
}
|
||||
|
||||
describe('DelayQueue', () => {
|
||||
let queue: DelayQueue<TestItem>;
|
||||
const keyExtractor = (item: TestItem) => item.id;
|
||||
|
||||
beforeEach(() => {
|
||||
queue = new DelayQueue<TestItem>(keyExtractor);
|
||||
vi.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe('basic operations', () => {
|
||||
it('should start empty', () => {
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
expect(queue.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should push items with deadlines', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now + 1000);
|
||||
|
||||
expect(queue.isEmpty).toBe(false);
|
||||
expect(queue.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should clear all items', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test1'}, now + 1000);
|
||||
queue.push({id: 'item-2', data: 'test2'}, now + 2000);
|
||||
|
||||
queue.clear();
|
||||
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
expect(queue.size).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('popExpired', () => {
|
||||
it('should return empty array when no items are expired', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now + 10000);
|
||||
|
||||
const expired = queue.popExpired();
|
||||
|
||||
expect(expired).toHaveLength(0);
|
||||
expect(queue.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should pop expired items', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test1'}, now + 100);
|
||||
queue.push({id: 'item-2', data: 'test2'}, now + 200);
|
||||
queue.push({id: 'item-3', data: 'test3'}, now + 10000);
|
||||
|
||||
vi.advanceTimersByTime(250);
|
||||
|
||||
const expired = queue.popExpired();
|
||||
|
||||
expect(expired).toHaveLength(2);
|
||||
expect(expired.map((i) => i.id)).toContain('item-1');
|
||||
expect(expired.map((i) => i.id)).toContain('item-2');
|
||||
expect(queue.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should pop items in deadline order', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-3', data: 'third'}, now + 300);
|
||||
queue.push({id: 'item-1', data: 'first'}, now + 100);
|
||||
queue.push({id: 'item-2', data: 'second'}, now + 200);
|
||||
|
||||
vi.advanceTimersByTime(350);
|
||||
|
||||
const expired = queue.popExpired();
|
||||
|
||||
expect(expired).toHaveLength(3);
|
||||
expect(expired[0].id).toBe('item-1');
|
||||
expect(expired[1].id).toBe('item-2');
|
||||
expect(expired[2].id).toBe('item-3');
|
||||
});
|
||||
|
||||
it('should include items with deadline equal to now', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now);
|
||||
|
||||
const expired = queue.popExpired();
|
||||
|
||||
expect(expired).toHaveLength(1);
|
||||
expect(expired[0].id).toBe('item-1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove operations', () => {
|
||||
it('should remove item by reference', () => {
|
||||
const now = Date.now();
|
||||
const item = {id: 'item-1', data: 'test'};
|
||||
queue.push(item, now + 1000);
|
||||
|
||||
const removed = queue.remove(item);
|
||||
|
||||
expect(removed).toBe(true);
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
});
|
||||
|
||||
it('should remove item by key', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now + 1000);
|
||||
|
||||
const removed = queue.removeByKey('item-1');
|
||||
|
||||
expect(removed).toBe(true);
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when removing non-existent item', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now + 1000);
|
||||
|
||||
const removed = queue.removeByKey('non-existent');
|
||||
|
||||
expect(removed).toBe(false);
|
||||
expect(queue.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should update item when pushing with same key', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'original'}, now + 1000);
|
||||
queue.push({id: 'item-1', data: 'updated'}, now + 2000);
|
||||
|
||||
expect(queue.size).toBe(1);
|
||||
|
||||
vi.advanceTimersByTime(1500);
|
||||
const expired1 = queue.popExpired();
|
||||
expect(expired1).toHaveLength(0);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
const expired2 = queue.popExpired();
|
||||
expect(expired2).toHaveLength(1);
|
||||
expect(expired2[0].data).toBe('updated');
|
||||
});
|
||||
});
|
||||
|
||||
describe('has operations', () => {
|
||||
it('should return true for existing item', () => {
|
||||
const now = Date.now();
|
||||
const item = {id: 'item-1', data: 'test'};
|
||||
queue.push(item, now + 1000);
|
||||
|
||||
expect(queue.has(item)).toBe(true);
|
||||
expect(queue.hasByKey('item-1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-existent item', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now + 1000);
|
||||
|
||||
expect(queue.has({id: 'item-2', data: 'test'})).toBe(false);
|
||||
expect(queue.hasByKey('item-2')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false after item is removed', () => {
|
||||
const now = Date.now();
|
||||
const item = {id: 'item-1', data: 'test'};
|
||||
queue.push(item, now + 1000);
|
||||
queue.remove(item);
|
||||
|
||||
expect(queue.has(item)).toBe(false);
|
||||
expect(queue.hasByKey('item-1')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('nextDelay', () => {
|
||||
it('should return null for empty queue', () => {
|
||||
expect(queue.nextDelay()).toBeNull();
|
||||
});
|
||||
|
||||
it('should return delay until next deadline', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now + 5000);
|
||||
|
||||
const delay = queue.nextDelay();
|
||||
|
||||
expect(delay).toBe(5000);
|
||||
});
|
||||
|
||||
it('should return 0 for expired items', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now + 100);
|
||||
|
||||
vi.advanceTimersByTime(200);
|
||||
|
||||
const delay = queue.nextDelay();
|
||||
|
||||
expect(delay).toBe(0);
|
||||
});
|
||||
|
||||
it('should return delay to earliest deadline', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-2', data: 'later'}, now + 10000);
|
||||
queue.push({id: 'item-1', data: 'sooner'}, now + 3000);
|
||||
|
||||
const delay = queue.nextDelay();
|
||||
|
||||
expect(delay).toBe(3000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('toArray', () => {
|
||||
it('should return copy of internal items', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test1'}, now + 1000);
|
||||
queue.push({id: 'item-2', data: 'test2'}, now + 2000);
|
||||
|
||||
const arr = queue.toArray();
|
||||
|
||||
expect(arr).toHaveLength(2);
|
||||
expect(arr[0].item.id).toBe('item-1');
|
||||
expect(arr[0].deadlineMs).toBe(now + 1000);
|
||||
expect(arr[1].item.id).toBe('item-2');
|
||||
expect(arr[1].deadlineMs).toBe(now + 2000);
|
||||
});
|
||||
|
||||
it('should return items sorted by deadline', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-3', data: 'third'}, now + 3000);
|
||||
queue.push({id: 'item-1', data: 'first'}, now + 1000);
|
||||
queue.push({id: 'item-2', data: 'second'}, now + 2000);
|
||||
|
||||
const arr = queue.toArray();
|
||||
|
||||
expect(arr[0].item.id).toBe('item-1');
|
||||
expect(arr[1].item.id).toBe('item-2');
|
||||
expect(arr[2].item.id).toBe('item-3');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle items with same deadline', () => {
|
||||
const now = Date.now();
|
||||
const deadline = now + 1000;
|
||||
queue.push({id: 'item-1', data: 'first'}, deadline);
|
||||
queue.push({id: 'item-2', data: 'second'}, deadline);
|
||||
queue.push({id: 'item-3', data: 'third'}, deadline);
|
||||
|
||||
vi.advanceTimersByTime(1100);
|
||||
|
||||
const expired = queue.popExpired();
|
||||
|
||||
expect(expired).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should handle negative deadline (already expired)', () => {
|
||||
const now = Date.now();
|
||||
queue.push({id: 'item-1', data: 'test'}, now - 1000);
|
||||
|
||||
const expired = queue.popExpired();
|
||||
|
||||
expect(expired).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle large number of items', () => {
|
||||
const now = Date.now();
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
queue.push({id: `item-${i}`, data: `data-${i}`}, now + (i + 1) * 10);
|
||||
}
|
||||
|
||||
expect(queue.size).toBe(1000);
|
||||
|
||||
vi.advanceTimersByTime(5000);
|
||||
const expired = queue.popExpired();
|
||||
|
||||
expect(expired).toHaveLength(500);
|
||||
expect(queue.size).toBe(500);
|
||||
});
|
||||
});
|
||||
});
|
||||
294
packages/queue/src/__tests__/PriorityQueue.test.tsx
Normal file
294
packages/queue/src/__tests__/PriorityQueue.test.tsx
Normal file
@@ -0,0 +1,294 @@
|
||||
/*
|
||||
* Copyright (C) 2026 Fluxer Contributors
|
||||
*
|
||||
* This file is part of Fluxer.
|
||||
*
|
||||
* Fluxer is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Fluxer is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import {PriorityQueue} from '@fluxer/queue/src/engine/PriorityQueue';
|
||||
import {createJobID, type ReadyItem} from '@fluxer/queue/src/types/JobTypes';
|
||||
import {beforeEach, describe, expect, it} from 'vitest';
|
||||
|
||||
function createReadyItem(
|
||||
jobId: string,
|
||||
priority: number,
|
||||
runAtMs: number = 1000,
|
||||
createdAtMs: number = 1000,
|
||||
sequence: number = 0,
|
||||
): ReadyItem {
|
||||
return {
|
||||
jobId: createJobID(jobId),
|
||||
priority,
|
||||
runAtMs,
|
||||
createdAtMs,
|
||||
sequence,
|
||||
};
|
||||
}
|
||||
|
||||
describe('PriorityQueue', () => {
|
||||
let queue: PriorityQueue;
|
||||
|
||||
beforeEach(() => {
|
||||
queue = new PriorityQueue();
|
||||
});
|
||||
|
||||
describe('basic operations', () => {
|
||||
it('should start empty', () => {
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
expect(queue.size).toBe(0);
|
||||
expect(queue.peek()).toBeUndefined();
|
||||
expect(queue.pop()).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should push and pop a single item', () => {
|
||||
const item = createReadyItem('job-1', 5);
|
||||
queue.push(item);
|
||||
|
||||
expect(queue.isEmpty).toBe(false);
|
||||
expect(queue.size).toBe(1);
|
||||
expect(queue.peek()).toEqual(item);
|
||||
|
||||
const popped = queue.pop();
|
||||
expect(popped).toEqual(item);
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
});
|
||||
|
||||
it('should push multiple items and maintain size', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
queue.push(createReadyItem('job-2', 2));
|
||||
queue.push(createReadyItem('job-3', 3));
|
||||
|
||||
expect(queue.size).toBe(3);
|
||||
});
|
||||
|
||||
it('should clear all items', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
queue.push(createReadyItem('job-2', 2));
|
||||
|
||||
queue.clear();
|
||||
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
expect(queue.size).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('priority ordering', () => {
|
||||
it('should pop highest priority first', () => {
|
||||
queue.push(createReadyItem('low', 1));
|
||||
queue.push(createReadyItem('high', 10));
|
||||
queue.push(createReadyItem('medium', 5));
|
||||
|
||||
expect(queue.pop()?.jobId).toBe('high');
|
||||
expect(queue.pop()?.jobId).toBe('medium');
|
||||
expect(queue.pop()?.jobId).toBe('low');
|
||||
});
|
||||
|
||||
it('should order by runAtMs when priorities are equal', () => {
|
||||
const baseTime = 1000;
|
||||
queue.push(createReadyItem('later', 5, baseTime + 200));
|
||||
queue.push(createReadyItem('earlier', 5, baseTime + 100));
|
||||
queue.push(createReadyItem('earliest', 5, baseTime));
|
||||
|
||||
expect(queue.pop()?.jobId).toBe('earliest');
|
||||
expect(queue.pop()?.jobId).toBe('earlier');
|
||||
expect(queue.pop()?.jobId).toBe('later');
|
||||
});
|
||||
|
||||
it('should order by createdAtMs when priority and runAtMs are equal', () => {
|
||||
const baseTime = 1000;
|
||||
queue.push(createReadyItem('third', 5, baseTime, baseTime + 200));
|
||||
queue.push(createReadyItem('first', 5, baseTime, baseTime));
|
||||
queue.push(createReadyItem('second', 5, baseTime, baseTime + 100));
|
||||
|
||||
expect(queue.pop()?.jobId).toBe('first');
|
||||
expect(queue.pop()?.jobId).toBe('second');
|
||||
expect(queue.pop()?.jobId).toBe('third');
|
||||
});
|
||||
|
||||
it('should order by sequence when all other fields are equal', () => {
|
||||
const baseTime = 1000;
|
||||
queue.push(createReadyItem('third', 5, baseTime, baseTime, 3));
|
||||
queue.push(createReadyItem('first', 5, baseTime, baseTime, 1));
|
||||
queue.push(createReadyItem('second', 5, baseTime, baseTime, 2));
|
||||
|
||||
expect(queue.pop()?.jobId).toBe('first');
|
||||
expect(queue.pop()?.jobId).toBe('second');
|
||||
expect(queue.pop()?.jobId).toBe('third');
|
||||
});
|
||||
|
||||
it('should maintain heap property after multiple insertions', () => {
|
||||
const priorities = [3, 1, 4, 1, 5, 9, 2, 6, 5, 3];
|
||||
priorities.forEach((p, i) => {
|
||||
queue.push(createReadyItem(`job-${i}`, p, 1000, 1000, i));
|
||||
});
|
||||
|
||||
const sorted = [...priorities].sort((a, b) => b - a);
|
||||
const popped: Array<number> = [];
|
||||
|
||||
while (!queue.isEmpty) {
|
||||
const item = queue.pop();
|
||||
if (item) {
|
||||
popped.push(item.priority);
|
||||
}
|
||||
}
|
||||
|
||||
expect(popped).toEqual(sorted);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove operation', () => {
|
||||
it('should remove an existing item', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
queue.push(createReadyItem('job-2', 2));
|
||||
queue.push(createReadyItem('job-3', 3));
|
||||
|
||||
const removed = queue.remove(createJobID('job-2'));
|
||||
|
||||
expect(removed).toBe(true);
|
||||
expect(queue.size).toBe(2);
|
||||
expect(queue.has(createJobID('job-2'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when removing non-existent item', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
|
||||
const removed = queue.remove(createJobID('non-existent'));
|
||||
|
||||
expect(removed).toBe(false);
|
||||
expect(queue.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should maintain heap property after removal', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
queue.push(createReadyItem('job-2', 5));
|
||||
queue.push(createReadyItem('job-3', 3));
|
||||
queue.push(createReadyItem('job-4', 7));
|
||||
queue.push(createReadyItem('job-5', 2));
|
||||
|
||||
queue.remove(createJobID('job-2'));
|
||||
|
||||
expect(queue.pop()?.jobId).toBe('job-4');
|
||||
expect(queue.pop()?.jobId).toBe('job-3');
|
||||
expect(queue.pop()?.jobId).toBe('job-5');
|
||||
expect(queue.pop()?.jobId).toBe('job-1');
|
||||
});
|
||||
|
||||
it('should handle removing the only item', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
|
||||
const removed = queue.remove(createJobID('job-1'));
|
||||
|
||||
expect(removed).toBe(true);
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle removing from the front of the queue', () => {
|
||||
queue.push(createReadyItem('job-1', 10));
|
||||
queue.push(createReadyItem('job-2', 5));
|
||||
queue.push(createReadyItem('job-3', 1));
|
||||
|
||||
queue.remove(createJobID('job-1'));
|
||||
|
||||
expect(queue.peek()?.jobId).toBe('job-2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('has operation', () => {
|
||||
it('should return true for existing item', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
|
||||
expect(queue.has(createJobID('job-1'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-existent item', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
|
||||
expect(queue.has(createJobID('job-2'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false after item is removed', () => {
|
||||
queue.push(createReadyItem('job-1', 1));
|
||||
queue.remove(createJobID('job-1'));
|
||||
|
||||
expect(queue.has(createJobID('job-1'))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('toArray and fromArray', () => {
|
||||
it('should convert queue to array', () => {
|
||||
const items = [createReadyItem('job-1', 1), createReadyItem('job-2', 2), createReadyItem('job-3', 3)];
|
||||
|
||||
items.forEach((item) => queue.push(item));
|
||||
|
||||
const arr = queue.toArray();
|
||||
|
||||
expect(arr.length).toBe(3);
|
||||
items.forEach((item) => {
|
||||
expect(arr.some((a) => a.jobId === item.jobId)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('should create queue from array', () => {
|
||||
const items = [createReadyItem('job-1', 1), createReadyItem('job-2', 5), createReadyItem('job-3', 3)];
|
||||
|
||||
const newQueue = PriorityQueue.fromArray(items);
|
||||
|
||||
expect(newQueue.size).toBe(3);
|
||||
expect(newQueue.pop()?.jobId).toBe('job-2');
|
||||
expect(newQueue.pop()?.jobId).toBe('job-3');
|
||||
expect(newQueue.pop()?.jobId).toBe('job-1');
|
||||
});
|
||||
|
||||
it('should create empty queue from empty array', () => {
|
||||
const newQueue = PriorityQueue.fromArray([]);
|
||||
|
||||
expect(newQueue.isEmpty).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle items with same priority correctly', () => {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
queue.push(createReadyItem(`job-${i}`, 5, 1000, 1000, i));
|
||||
}
|
||||
|
||||
let lastSequence = -1;
|
||||
while (!queue.isEmpty) {
|
||||
const item = queue.pop();
|
||||
if (item) {
|
||||
expect(item.sequence).toBeGreaterThan(lastSequence);
|
||||
lastSequence = item.sequence;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle large number of items', () => {
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
queue.push(createReadyItem(`job-${i}`, Math.floor(Math.random() * 100)));
|
||||
}
|
||||
|
||||
expect(queue.size).toBe(1000);
|
||||
|
||||
let lastPriority = Infinity;
|
||||
while (!queue.isEmpty) {
|
||||
const item = queue.pop();
|
||||
if (item) {
|
||||
expect(item.priority).toBeLessThanOrEqual(lastPriority);
|
||||
lastPriority = item.priority;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
612
packages/queue/src/__tests__/QueueEngine.test.tsx
Normal file
612
packages/queue/src/__tests__/QueueEngine.test.tsx
Normal file
@@ -0,0 +1,612 @@
|
||||
/*
|
||||
* Copyright (C) 2026 Fluxer Contributors
|
||||
*
|
||||
* This file is part of Fluxer.
|
||||
*
|
||||
* Fluxer is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Fluxer is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs/promises';
|
||||
import type {LoggerFactory} from '@fluxer/logger/src/LoggerInterface';
|
||||
import {createMockLogger} from '@fluxer/logger/src/mock';
|
||||
import {QueueEngine} from '@fluxer/queue/src/engine/QueueEngine';
|
||||
import {JobStatus} from '@fluxer/queue/src/types/JobTypes';
|
||||
import type {QueueConfig} from '@fluxer/queue/src/types/QueueConfig';
|
||||
import {afterEach, beforeEach, describe, expect, it, vi} from 'vitest';
|
||||
|
||||
const testRoot = `/tmp/fluxer-queue-engine-test-${Date.now()}`;
|
||||
|
||||
function createTestConfig(overrides: Partial<QueueConfig> = {}): QueueConfig {
|
||||
return {
|
||||
dataDir: testRoot,
|
||||
snapshotEveryMs: 60000,
|
||||
snapshotAfterOps: 100000,
|
||||
snapshotZstdLevel: 3,
|
||||
defaultVisibilityTimeoutMs: 30000,
|
||||
visibilityTimeoutBackoffMs: 10000,
|
||||
maxReceiveBatch: 100,
|
||||
commandBuffer: 8192,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createLoggerFactory(): LoggerFactory {
|
||||
const mockLogger = createMockLogger();
|
||||
return () => mockLogger;
|
||||
}
|
||||
|
||||
describe('QueueEngine', () => {
|
||||
let engine: QueueEngine;
|
||||
let config: QueueConfig;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
config = createTestConfig();
|
||||
engine = new QueueEngine(config, createLoggerFactory());
|
||||
await engine.start();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.useRealTimers();
|
||||
await engine.stop();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
});
|
||||
|
||||
describe('enqueue', () => {
|
||||
it('should enqueue a job with default values', async () => {
|
||||
const result = await engine.enqueue('test-task', {message: 'hello'});
|
||||
|
||||
expect(result.enqueued).toBe(true);
|
||||
expect(result.job.taskType).toBe('test-task');
|
||||
expect(result.job.priority).toBe(0);
|
||||
expect(result.job.attempts).toBe(0);
|
||||
expect(result.job.maxAttempts).toBe(3);
|
||||
});
|
||||
|
||||
it('should enqueue a job with custom priority', async () => {
|
||||
const result = await engine.enqueue('test-task', {}, 10);
|
||||
|
||||
expect(result.enqueued).toBe(true);
|
||||
expect(result.job.priority).toBe(10);
|
||||
});
|
||||
|
||||
it('should enqueue a scheduled job', async () => {
|
||||
const now = Date.now();
|
||||
const runAt = now + 60000;
|
||||
const result = await engine.enqueue('test-task', {}, 0, runAt);
|
||||
|
||||
expect(result.enqueued).toBe(true);
|
||||
expect(result.job.runAtMs).toBe(runAt);
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.scheduled).toBe(1);
|
||||
expect(stats.ready).toBe(0);
|
||||
});
|
||||
|
||||
it('should enqueue a job with custom max attempts', async () => {
|
||||
const result = await engine.enqueue('test-task', {}, 0, null, 5);
|
||||
|
||||
expect(result.enqueued).toBe(true);
|
||||
expect(result.job.maxAttempts).toBe(5);
|
||||
});
|
||||
|
||||
it('should clamp max attempts to valid range', async () => {
|
||||
const resultLow = await engine.enqueue('test-task', {}, 0, null, 0);
|
||||
const resultHigh = await engine.enqueue('test-task', {}, 0, null, 9999);
|
||||
|
||||
expect(resultLow.job.maxAttempts).toBe(1);
|
||||
expect(resultHigh.job.maxAttempts).toBe(1000);
|
||||
});
|
||||
|
||||
it('should handle deduplication', async () => {
|
||||
const result1 = await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
const result2 = await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
|
||||
expect(result1.enqueued).toBe(true);
|
||||
expect(result2.enqueued).toBe(false);
|
||||
expect(result2.job.id).toBe(result1.job.id);
|
||||
});
|
||||
|
||||
it('should allow re-enqueue with same deduplication key after job completes', async () => {
|
||||
const result1 = await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
expect(result1.enqueued).toBe(true);
|
||||
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
expect(jobs).toHaveLength(1);
|
||||
|
||||
await engine.ack(jobs[0].receipt);
|
||||
|
||||
const result2 = await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
expect(result2.enqueued).toBe(true);
|
||||
expect(result2.job.id).not.toBe(result1.job.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dequeue', () => {
|
||||
it('should return empty array when queue is empty', async () => {
|
||||
const jobs = await engine.dequeue(null, 10, 0, 5000);
|
||||
|
||||
expect(jobs).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should dequeue a single job', async () => {
|
||||
await engine.enqueue('test-task', {message: 'hello'});
|
||||
|
||||
const jobs = await engine.dequeue(null, 10, 0, 5000);
|
||||
|
||||
expect(jobs).toHaveLength(1);
|
||||
expect(jobs[0].job.taskType).toBe('test-task');
|
||||
expect(jobs[0].receipt).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should dequeue multiple jobs respecting limit', async () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await engine.enqueue('test-task', {index: i});
|
||||
}
|
||||
|
||||
const jobs = await engine.dequeue(null, 3, 0, 5000);
|
||||
|
||||
expect(jobs).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should filter by task type', async () => {
|
||||
await engine.enqueue('type-a', {});
|
||||
await engine.enqueue('type-b', {});
|
||||
await engine.enqueue('type-a', {});
|
||||
|
||||
const jobs = await engine.dequeue(['type-a'], 10, 0, 5000);
|
||||
|
||||
expect(jobs).toHaveLength(2);
|
||||
jobs.forEach((job) => {
|
||||
expect(job.job.taskType).toBe('type-a');
|
||||
});
|
||||
});
|
||||
|
||||
it('should increment attempt count on dequeue', async () => {
|
||||
await engine.enqueue('test-task', {});
|
||||
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
|
||||
expect(jobs[0].job.attempts).toBe(1);
|
||||
});
|
||||
|
||||
it('should set visibility deadline', async () => {
|
||||
const visibilityTimeout = 10000;
|
||||
await engine.enqueue('test-task', {});
|
||||
|
||||
const jobs = await engine.dequeue(null, 1, 0, visibilityTimeout);
|
||||
const now = Date.now();
|
||||
|
||||
expect(jobs[0].visibilityDeadlineMs).toBeGreaterThanOrEqual(now);
|
||||
expect(jobs[0].visibilityDeadlineMs).toBeLessThanOrEqual(now + visibilityTimeout + 100);
|
||||
});
|
||||
|
||||
it('should dequeue jobs in priority order', async () => {
|
||||
await engine.enqueue('low-priority', {}, 1);
|
||||
await engine.enqueue('high-priority', {}, 10);
|
||||
await engine.enqueue('medium-priority', {}, 5);
|
||||
|
||||
const jobs = await engine.dequeue(null, 3, 0, 5000);
|
||||
|
||||
expect(jobs[0].job.taskType).toBe('high-priority');
|
||||
expect(jobs[1].job.taskType).toBe('medium-priority');
|
||||
expect(jobs[2].job.taskType).toBe('low-priority');
|
||||
});
|
||||
|
||||
it('should not return scheduled jobs before their run time', async () => {
|
||||
const now = Date.now();
|
||||
await engine.enqueue('scheduled-task', {}, 0, now + 60000);
|
||||
|
||||
const jobs = await engine.dequeue(null, 10, 0, 5000);
|
||||
|
||||
expect(jobs).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should return scheduled jobs after their run time', async () => {
|
||||
const now = Date.now();
|
||||
await engine.enqueue('scheduled-task', {}, 0, now + 1000);
|
||||
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
const jobs = await engine.dequeue(null, 10, 0, 5000);
|
||||
|
||||
expect(jobs).toHaveLength(1);
|
||||
expect(jobs[0].job.taskType).toBe('scheduled-task');
|
||||
});
|
||||
});
|
||||
|
||||
describe('ack', () => {
|
||||
it('should acknowledge a job and remove it from the queue', async () => {
|
||||
await engine.enqueue('test-task', {});
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
|
||||
const result = await engine.ack(jobs[0].receipt);
|
||||
|
||||
expect(result).toBe(true);
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.ready).toBe(0);
|
||||
expect(stats.processing).toBe(0);
|
||||
});
|
||||
|
||||
it('should return false for invalid receipt', async () => {
|
||||
const result = await engine.ack('invalid-receipt');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should clear deduplication index on ack', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
await engine.ack(jobs[0].receipt);
|
||||
|
||||
const result = await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
|
||||
expect(result.enqueued).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('nack', () => {
|
||||
it('should nack a job and schedule it for retry', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 3);
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
|
||||
const result = await engine.nack(jobs[0].receipt, 'processing error');
|
||||
|
||||
expect(result).toBe(true);
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.processing).toBe(0);
|
||||
expect(stats.scheduled).toBe(1);
|
||||
});
|
||||
|
||||
it('should return false for invalid receipt', async () => {
|
||||
const result = await engine.nack('invalid-receipt');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should move job to dead letter after max attempts', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 1);
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
|
||||
await engine.nack(jobs[0].receipt, 'failed');
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.deadLetter).toBe(1);
|
||||
});
|
||||
|
||||
it('should store error message on job', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 1);
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
|
||||
await engine.nack(jobs[0].receipt, 'custom error');
|
||||
|
||||
const jobRecord = engine.getJob(jobs[0].job.id);
|
||||
expect(jobRecord?.job.error).toBe('custom error');
|
||||
});
|
||||
|
||||
it('should schedule retry with exponential backoff', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 5);
|
||||
|
||||
const jobs1 = await engine.dequeue(null, 1, 0, 5000);
|
||||
await engine.nack(jobs1[0].receipt);
|
||||
|
||||
const jobRecord = engine.getJob(jobs1[0].job.id);
|
||||
expect(jobRecord?.status).toBe(JobStatus.Scheduled);
|
||||
});
|
||||
});
|
||||
|
||||
describe('changeVisibility', () => {
|
||||
it('should extend visibility timeout', async () => {
|
||||
await engine.enqueue('test-task', {});
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
const originalDeadline = jobs[0].visibilityDeadlineMs;
|
||||
|
||||
const result = await engine.changeVisibility(jobs[0].receipt, 60000);
|
||||
|
||||
expect(result).toBe(true);
|
||||
|
||||
const jobRecord = engine.getJob(jobs[0].job.id);
|
||||
expect(jobRecord?.visibilityDeadlineMs).toBeGreaterThan(originalDeadline);
|
||||
});
|
||||
|
||||
it('should return false for invalid receipt', async () => {
|
||||
const result = await engine.changeVisibility('invalid-receipt', 60000);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-inflight job', async () => {
|
||||
await engine.enqueue('test-task', {});
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
await engine.ack(jobs[0].receipt);
|
||||
|
||||
const result = await engine.changeVisibility(jobs[0].receipt, 60000);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('retryJob', () => {
|
||||
it('should retry a dead letter job', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 1);
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
await engine.nack(jobs[0].receipt);
|
||||
|
||||
const stats1 = engine.getStats();
|
||||
expect(stats1.deadLetter).toBe(1);
|
||||
|
||||
const retried = await engine.retryJob(jobs[0].job.id);
|
||||
|
||||
expect(retried).not.toBeNull();
|
||||
expect(retried?.attempts).toBe(0);
|
||||
|
||||
const stats2 = engine.getStats();
|
||||
expect(stats2.deadLetter).toBe(0);
|
||||
expect(stats2.ready).toBe(1);
|
||||
});
|
||||
|
||||
it('should return null for non-dead-letter job', async () => {
|
||||
await engine.enqueue('test-task', {});
|
||||
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
const retried = await engine.retryJob(jobs[0].job.id);
|
||||
|
||||
expect(retried).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for non-existent job', async () => {
|
||||
const retried = await engine.retryJob('non-existent-id');
|
||||
|
||||
expect(retried).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteJob', () => {
|
||||
it('should delete a ready job', async () => {
|
||||
const {job} = await engine.enqueue('test-task', {});
|
||||
|
||||
const result = await engine.deleteJob(job.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getJob(job.id)).toBeNull();
|
||||
});
|
||||
|
||||
it('should delete a scheduled job', async () => {
|
||||
const now = Date.now();
|
||||
const {job} = await engine.enqueue('test-task', {}, 0, now + 60000);
|
||||
|
||||
const result = await engine.deleteJob(job.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getJob(job.id)).toBeNull();
|
||||
});
|
||||
|
||||
it('should delete an inflight job', async () => {
|
||||
await engine.enqueue('test-task', {});
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
|
||||
const result = await engine.deleteJob(jobs[0].job.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getJob(jobs[0].job.id)).toBeNull();
|
||||
});
|
||||
|
||||
it('should delete a dead letter job', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 1);
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
await engine.nack(jobs[0].receipt);
|
||||
|
||||
const result = await engine.deleteJob(jobs[0].job.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getJob(jobs[0].job.id)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return false for non-existent job', async () => {
|
||||
const result = await engine.deleteJob('non-existent');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should clear deduplication index on delete', async () => {
|
||||
const {job} = await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
await engine.deleteJob(job.id);
|
||||
|
||||
const result = await engine.enqueue('test-task', {}, 0, null, 3, 'unique-key');
|
||||
|
||||
expect(result.enqueued).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStats', () => {
|
||||
it('should return correct counts', async () => {
|
||||
const now = Date.now();
|
||||
|
||||
await engine.enqueue('ready-1', {});
|
||||
await engine.enqueue('ready-2', {});
|
||||
await engine.enqueue('scheduled-1', {}, 0, now + 60000);
|
||||
await engine.enqueue('to-process', {});
|
||||
await engine.enqueue('to-deadletter', {}, 0, null, 1);
|
||||
|
||||
const jobs = await engine.dequeue(['to-process'], 1, 0, 5000);
|
||||
expect(jobs).toHaveLength(1);
|
||||
|
||||
const dlJobs = await engine.dequeue(['to-deadletter'], 1, 0, 5000);
|
||||
expect(dlJobs).toHaveLength(1);
|
||||
await engine.nack(dlJobs[0].receipt);
|
||||
|
||||
const stats = engine.getStats();
|
||||
|
||||
expect(stats.ready).toBe(2);
|
||||
expect(stats.scheduled).toBe(1);
|
||||
expect(stats.processing).toBe(1);
|
||||
expect(stats.deadLetter).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getJob', () => {
|
||||
it('should return job record', async () => {
|
||||
const {job} = await engine.enqueue('test-task', {message: 'hello'});
|
||||
|
||||
const record = engine.getJob(job.id);
|
||||
|
||||
expect(record).not.toBeNull();
|
||||
expect(record?.job.id).toBe(job.id);
|
||||
expect(record?.status).toBe(JobStatus.Ready);
|
||||
});
|
||||
|
||||
it('should return null for non-existent job', async () => {
|
||||
const record = engine.getJob('non-existent');
|
||||
|
||||
expect(record).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('resetState', () => {
|
||||
it('should clear all state', async () => {
|
||||
await engine.enqueue('task-1', {});
|
||||
await engine.enqueue('task-2', {});
|
||||
await engine.enqueue('task-3', {}, 0, Date.now() + 60000);
|
||||
|
||||
await engine.resetState();
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.ready).toBe(0);
|
||||
expect(stats.scheduled).toBe(0);
|
||||
expect(stats.processing).toBe(0);
|
||||
expect(stats.deadLetter).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('QueueEngine visibility timeout', () => {
|
||||
let engine: QueueEngine;
|
||||
let config: QueueConfig;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
config = createTestConfig({
|
||||
defaultVisibilityTimeoutMs: 5000,
|
||||
visibilityTimeoutBackoffMs: 1000,
|
||||
});
|
||||
engine = new QueueEngine(config, createLoggerFactory());
|
||||
await engine.start();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.useRealTimers();
|
||||
await engine.stop();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
});
|
||||
|
||||
it('should return job to ready queue after visibility timeout expires', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 3);
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
expect(jobs).toHaveLength(1);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(6000);
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.processing).toBe(0);
|
||||
|
||||
const jobRecord = engine.getJob(jobs[0].job.id);
|
||||
expect(jobRecord).not.toBeNull();
|
||||
expect(jobRecord?.status).toBe(JobStatus.Ready);
|
||||
expect(jobRecord?.job.error).toBe('visibility timeout');
|
||||
expect(jobRecord?.job.attempts).toBe(1);
|
||||
});
|
||||
|
||||
it('should move job to dead letter after max attempts with visibility timeout', async () => {
|
||||
await engine.enqueue('test-task', {}, 0, null, 1);
|
||||
|
||||
const jobs = await engine.dequeue(null, 1, 0, 5000);
|
||||
expect(jobs).toHaveLength(1);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(6000);
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.deadLetter).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('QueueEngine concurrency', () => {
|
||||
let engine: QueueEngine;
|
||||
let config: QueueConfig;
|
||||
|
||||
beforeEach(async () => {
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
config = createTestConfig();
|
||||
engine = new QueueEngine(config, createLoggerFactory());
|
||||
await engine.start();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await engine.stop();
|
||||
await fs.rm(testRoot, {recursive: true, force: true});
|
||||
});
|
||||
|
||||
it('should handle concurrent enqueue operations', async () => {
|
||||
const promises: Array<Promise<{job: {id: string}; enqueued: boolean}>> = [];
|
||||
for (let i = 0; i < 100; i++) {
|
||||
promises.push(engine.enqueue(`task-${i}`, {index: i}));
|
||||
}
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
expect(results.every((r) => r.enqueued)).toBe(true);
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.ready).toBe(100);
|
||||
});
|
||||
|
||||
it('should handle concurrent dequeue operations', async () => {
|
||||
for (let i = 0; i < 50; i++) {
|
||||
await engine.enqueue(`task-${i}`, {index: i});
|
||||
}
|
||||
|
||||
const dequeuePromises: Array<Promise<Array<{job: {id: string}}>>> = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
dequeuePromises.push(engine.dequeue(null, 10, 0, 5000));
|
||||
}
|
||||
|
||||
const results = await Promise.all(dequeuePromises);
|
||||
const allJobs = results.flat();
|
||||
|
||||
expect(allJobs.length).toBe(50);
|
||||
|
||||
const jobIds = new Set(allJobs.map((j) => j.job.id));
|
||||
expect(jobIds.size).toBe(50);
|
||||
});
|
||||
|
||||
it('should handle mixed enqueue and dequeue operations', async () => {
|
||||
const operations: Array<Promise<unknown>> = [];
|
||||
|
||||
for (let i = 0; i < 50; i++) {
|
||||
operations.push(engine.enqueue(`task-${i}`, {index: i}));
|
||||
}
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
operations.push(engine.dequeue(null, 5, 0, 5000));
|
||||
}
|
||||
|
||||
await Promise.all(operations);
|
||||
|
||||
const stats = engine.getStats();
|
||||
expect(stats.ready + stats.processing).toBe(50);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user